]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
Daily bump.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
66647d44 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
726a989a 55#include "gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
d1b38208 116typedef struct GTY(()) machine_function
5b667039
JJ
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
ec507f2d
DE
146/* Always emit branch hint bits. */
147static GTY(()) bool rs6000_always_hint;
148
149/* Schedule instructions for group formation. */
150static GTY(()) bool rs6000_sched_groups;
151
44cd321e
PS
152/* Align branch targets. */
153static GTY(()) bool rs6000_align_branch_targets;
154
569fa502
DN
155/* Support for -msched-costly-dep option. */
156const char *rs6000_sched_costly_dep_str;
157enum rs6000_dependence_cost rs6000_sched_costly_dep;
158
cbe26ab8
DN
159/* Support for -minsert-sched-nops option. */
160const char *rs6000_sched_insert_nops_str;
161enum rs6000_nop_insertion rs6000_sched_insert_nops;
162
7ccf35ed 163/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 164static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 165
602ea4d3 166/* Size of long double. */
6fa3f289
ZW
167int rs6000_long_double_type_size;
168
602ea4d3
JJ
169/* IEEE quad extended precision long double. */
170int rs6000_ieeequad;
171
a2db2771 172/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
173int rs6000_altivec_abi;
174
94f4765c
NF
175/* Nonzero if we want SPE SIMD instructions. */
176int rs6000_spe;
177
a3170dc6
AH
178/* Nonzero if we want SPE ABI extensions. */
179int rs6000_spe_abi;
180
94f4765c
NF
181/* Nonzero to use isel instructions. */
182int rs6000_isel;
183
5da702b1
AH
184/* Nonzero if floating point operations are done in the GPRs. */
185int rs6000_float_gprs = 0;
186
594a51fe
SS
187/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
188int rs6000_darwin64_abi;
189
a0ab749a 190/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 191static GTY(()) int common_mode_defined;
c81bebd7 192
874a0744
MM
193/* Label number of label created for -mrelocatable, to call to so we can
194 get the address of the GOT section */
195int rs6000_pic_labelno;
c81bebd7 196
b91da81f 197#ifdef USING_ELFOS_H
c81bebd7 198/* Which abi to adhere to */
9739c90c 199const char *rs6000_abi_name;
d9407988
MM
200
201/* Semantics of the small data area */
202enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
203
204/* Which small data model to use */
815cdc52 205const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
206
207/* Counter for labels which are to be placed in .fixup. */
208int fixuplabelno = 0;
874a0744 209#endif
4697a36c 210
c4501e62
JJ
211/* Bit size of immediate TLS offsets and string from which it is decoded. */
212int rs6000_tls_size = 32;
213const char *rs6000_tls_size_string;
214
b6c9286a
MM
215/* ABI enumeration available for subtarget to use. */
216enum rs6000_abi rs6000_current_abi;
217
85b776df
AM
218/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
219int dot_symbols;
220
38c1f2d7 221/* Debug flags */
815cdc52 222const char *rs6000_debug_name;
38c1f2d7
MM
223int rs6000_debug_stack; /* debug stack applications */
224int rs6000_debug_arg; /* debug argument handling */
225
aabcd309 226/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
227bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
228
58646b77
PB
229/* Built in types. */
230
231tree rs6000_builtin_types[RS6000_BTI_MAX];
232tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 233
57ac7be9
AM
234const char *rs6000_traceback_name;
235static enum {
236 traceback_default = 0,
237 traceback_none,
238 traceback_part,
239 traceback_full
240} rs6000_traceback;
241
38c1f2d7
MM
242/* Flag to say the TOC is initialized */
243int toc_initialized;
9ebbca7d 244char toc_label_name[10];
38c1f2d7 245
44cd321e
PS
246/* Cached value of rs6000_variable_issue. This is cached in
247 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
248static short cached_can_issue_more;
249
d6b5193b
RS
250static GTY(()) section *read_only_data_section;
251static GTY(()) section *private_data_section;
252static GTY(()) section *read_only_private_data_section;
253static GTY(()) section *sdata2_section;
254static GTY(()) section *toc_section;
255
a3c9585f
KH
256/* Control alignment for fields within structures. */
257/* String from -malign-XXXXX. */
025d9908
KH
258int rs6000_alignment_flags;
259
78f5898b
AH
260/* True for any options that were explicitly set. */
261struct {
df01da37 262 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 263 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
264 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
265 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
266 bool spe; /* True if -mspe= was used. */
267 bool float_gprs; /* True if -mfloat-gprs= was used. */
268 bool isel; /* True if -misel was used. */
269 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 270 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 271 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
272} rs6000_explicit_options;
273
a3170dc6
AH
274struct builtin_description
275{
276 /* mask is not const because we're going to alter it below. This
277 nonsense will go away when we rewrite the -march infrastructure
278 to give us more target flag bits. */
279 unsigned int mask;
280 const enum insn_code icode;
281 const char *const name;
282 const enum rs6000_builtins code;
283};
8b897cfa
RS
284\f
285/* Target cpu costs. */
286
287struct processor_costs {
c4ad648e 288 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
289 const int mulsi_const; /* cost of SImode multiplication by constant. */
290 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
291 const int muldi; /* cost of DImode multiplication. */
292 const int divsi; /* cost of SImode division. */
293 const int divdi; /* cost of DImode division. */
294 const int fp; /* cost of simple SFmode and DFmode insns. */
295 const int dmul; /* cost of DFmode multiplication (and fmadd). */
296 const int sdiv; /* cost of SFmode division (fdivs). */
297 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
298 const int cache_line_size; /* cache line size in bytes. */
299 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
300 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
301 const int simultaneous_prefetches; /* number of parallel prefetch
302 operations. */
8b897cfa
RS
303};
304
305const struct processor_costs *rs6000_cost;
306
307/* Processor costs (relative to an add) */
308
309/* Instruction size costs on 32bit processors. */
310static const
311struct processor_costs size32_cost = {
06a67bdd
RS
312 COSTS_N_INSNS (1), /* mulsi */
313 COSTS_N_INSNS (1), /* mulsi_const */
314 COSTS_N_INSNS (1), /* mulsi_const9 */
315 COSTS_N_INSNS (1), /* muldi */
316 COSTS_N_INSNS (1), /* divsi */
317 COSTS_N_INSNS (1), /* divdi */
318 COSTS_N_INSNS (1), /* fp */
319 COSTS_N_INSNS (1), /* dmul */
320 COSTS_N_INSNS (1), /* sdiv */
321 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
322 32,
323 0,
324 0,
5f732aba 325 0,
8b897cfa
RS
326};
327
328/* Instruction size costs on 64bit processors. */
329static const
330struct processor_costs size64_cost = {
06a67bdd
RS
331 COSTS_N_INSNS (1), /* mulsi */
332 COSTS_N_INSNS (1), /* mulsi_const */
333 COSTS_N_INSNS (1), /* mulsi_const9 */
334 COSTS_N_INSNS (1), /* muldi */
335 COSTS_N_INSNS (1), /* divsi */
336 COSTS_N_INSNS (1), /* divdi */
337 COSTS_N_INSNS (1), /* fp */
338 COSTS_N_INSNS (1), /* dmul */
339 COSTS_N_INSNS (1), /* sdiv */
340 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
341 128,
342 0,
343 0,
5f732aba 344 0,
8b897cfa
RS
345};
346
347/* Instruction costs on RIOS1 processors. */
348static const
349struct processor_costs rios1_cost = {
06a67bdd
RS
350 COSTS_N_INSNS (5), /* mulsi */
351 COSTS_N_INSNS (4), /* mulsi_const */
352 COSTS_N_INSNS (3), /* mulsi_const9 */
353 COSTS_N_INSNS (5), /* muldi */
354 COSTS_N_INSNS (19), /* divsi */
355 COSTS_N_INSNS (19), /* divdi */
356 COSTS_N_INSNS (2), /* fp */
357 COSTS_N_INSNS (2), /* dmul */
358 COSTS_N_INSNS (19), /* sdiv */
359 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 360 128, /* cache line size */
5f732aba
DE
361 64, /* l1 cache */
362 512, /* l2 cache */
0b11da67 363 0, /* streams */
8b897cfa
RS
364};
365
366/* Instruction costs on RIOS2 processors. */
367static const
368struct processor_costs rios2_cost = {
06a67bdd
RS
369 COSTS_N_INSNS (2), /* mulsi */
370 COSTS_N_INSNS (2), /* mulsi_const */
371 COSTS_N_INSNS (2), /* mulsi_const9 */
372 COSTS_N_INSNS (2), /* muldi */
373 COSTS_N_INSNS (13), /* divsi */
374 COSTS_N_INSNS (13), /* divdi */
375 COSTS_N_INSNS (2), /* fp */
376 COSTS_N_INSNS (2), /* dmul */
377 COSTS_N_INSNS (17), /* sdiv */
378 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 379 256, /* cache line size */
5f732aba
DE
380 256, /* l1 cache */
381 1024, /* l2 cache */
0b11da67 382 0, /* streams */
8b897cfa
RS
383};
384
385/* Instruction costs on RS64A processors. */
386static const
387struct processor_costs rs64a_cost = {
06a67bdd
RS
388 COSTS_N_INSNS (20), /* mulsi */
389 COSTS_N_INSNS (12), /* mulsi_const */
390 COSTS_N_INSNS (8), /* mulsi_const9 */
391 COSTS_N_INSNS (34), /* muldi */
392 COSTS_N_INSNS (65), /* divsi */
393 COSTS_N_INSNS (67), /* divdi */
394 COSTS_N_INSNS (4), /* fp */
395 COSTS_N_INSNS (4), /* dmul */
396 COSTS_N_INSNS (31), /* sdiv */
397 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 398 128, /* cache line size */
5f732aba
DE
399 128, /* l1 cache */
400 2048, /* l2 cache */
0b11da67 401 1, /* streams */
8b897cfa
RS
402};
403
404/* Instruction costs on MPCCORE processors. */
405static const
406struct processor_costs mpccore_cost = {
06a67bdd
RS
407 COSTS_N_INSNS (2), /* mulsi */
408 COSTS_N_INSNS (2), /* mulsi_const */
409 COSTS_N_INSNS (2), /* mulsi_const9 */
410 COSTS_N_INSNS (2), /* muldi */
411 COSTS_N_INSNS (6), /* divsi */
412 COSTS_N_INSNS (6), /* divdi */
413 COSTS_N_INSNS (4), /* fp */
414 COSTS_N_INSNS (5), /* dmul */
415 COSTS_N_INSNS (10), /* sdiv */
416 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 417 32, /* cache line size */
5f732aba
DE
418 4, /* l1 cache */
419 16, /* l2 cache */
0b11da67 420 1, /* streams */
8b897cfa
RS
421};
422
423/* Instruction costs on PPC403 processors. */
424static const
425struct processor_costs ppc403_cost = {
06a67bdd
RS
426 COSTS_N_INSNS (4), /* mulsi */
427 COSTS_N_INSNS (4), /* mulsi_const */
428 COSTS_N_INSNS (4), /* mulsi_const9 */
429 COSTS_N_INSNS (4), /* muldi */
430 COSTS_N_INSNS (33), /* divsi */
431 COSTS_N_INSNS (33), /* divdi */
432 COSTS_N_INSNS (11), /* fp */
433 COSTS_N_INSNS (11), /* dmul */
434 COSTS_N_INSNS (11), /* sdiv */
435 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 436 32, /* cache line size */
5f732aba
DE
437 4, /* l1 cache */
438 16, /* l2 cache */
0b11da67 439 1, /* streams */
8b897cfa
RS
440};
441
442/* Instruction costs on PPC405 processors. */
443static const
444struct processor_costs ppc405_cost = {
06a67bdd
RS
445 COSTS_N_INSNS (5), /* mulsi */
446 COSTS_N_INSNS (4), /* mulsi_const */
447 COSTS_N_INSNS (3), /* mulsi_const9 */
448 COSTS_N_INSNS (5), /* muldi */
449 COSTS_N_INSNS (35), /* divsi */
450 COSTS_N_INSNS (35), /* divdi */
451 COSTS_N_INSNS (11), /* fp */
452 COSTS_N_INSNS (11), /* dmul */
453 COSTS_N_INSNS (11), /* sdiv */
454 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 455 32, /* cache line size */
5f732aba
DE
456 16, /* l1 cache */
457 128, /* l2 cache */
0b11da67 458 1, /* streams */
8b897cfa
RS
459};
460
461/* Instruction costs on PPC440 processors. */
462static const
463struct processor_costs ppc440_cost = {
06a67bdd
RS
464 COSTS_N_INSNS (3), /* mulsi */
465 COSTS_N_INSNS (2), /* mulsi_const */
466 COSTS_N_INSNS (2), /* mulsi_const9 */
467 COSTS_N_INSNS (3), /* muldi */
468 COSTS_N_INSNS (34), /* divsi */
469 COSTS_N_INSNS (34), /* divdi */
470 COSTS_N_INSNS (5), /* fp */
471 COSTS_N_INSNS (5), /* dmul */
472 COSTS_N_INSNS (19), /* sdiv */
473 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 474 32, /* cache line size */
5f732aba
DE
475 32, /* l1 cache */
476 256, /* l2 cache */
0b11da67 477 1, /* streams */
8b897cfa
RS
478};
479
480/* Instruction costs on PPC601 processors. */
481static const
482struct processor_costs ppc601_cost = {
06a67bdd
RS
483 COSTS_N_INSNS (5), /* mulsi */
484 COSTS_N_INSNS (5), /* mulsi_const */
485 COSTS_N_INSNS (5), /* mulsi_const9 */
486 COSTS_N_INSNS (5), /* muldi */
487 COSTS_N_INSNS (36), /* divsi */
488 COSTS_N_INSNS (36), /* divdi */
489 COSTS_N_INSNS (4), /* fp */
490 COSTS_N_INSNS (5), /* dmul */
491 COSTS_N_INSNS (17), /* sdiv */
492 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 493 32, /* cache line size */
5f732aba
DE
494 32, /* l1 cache */
495 256, /* l2 cache */
0b11da67 496 1, /* streams */
8b897cfa
RS
497};
498
499/* Instruction costs on PPC603 processors. */
500static const
501struct processor_costs ppc603_cost = {
06a67bdd
RS
502 COSTS_N_INSNS (5), /* mulsi */
503 COSTS_N_INSNS (3), /* mulsi_const */
504 COSTS_N_INSNS (2), /* mulsi_const9 */
505 COSTS_N_INSNS (5), /* muldi */
506 COSTS_N_INSNS (37), /* divsi */
507 COSTS_N_INSNS (37), /* divdi */
508 COSTS_N_INSNS (3), /* fp */
509 COSTS_N_INSNS (4), /* dmul */
510 COSTS_N_INSNS (18), /* sdiv */
511 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 512 32, /* cache line size */
5f732aba
DE
513 8, /* l1 cache */
514 64, /* l2 cache */
0b11da67 515 1, /* streams */
8b897cfa
RS
516};
517
518/* Instruction costs on PPC604 processors. */
519static const
520struct processor_costs ppc604_cost = {
06a67bdd
RS
521 COSTS_N_INSNS (4), /* mulsi */
522 COSTS_N_INSNS (4), /* mulsi_const */
523 COSTS_N_INSNS (4), /* mulsi_const9 */
524 COSTS_N_INSNS (4), /* muldi */
525 COSTS_N_INSNS (20), /* divsi */
526 COSTS_N_INSNS (20), /* divdi */
527 COSTS_N_INSNS (3), /* fp */
528 COSTS_N_INSNS (3), /* dmul */
529 COSTS_N_INSNS (18), /* sdiv */
530 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 531 32, /* cache line size */
5f732aba
DE
532 16, /* l1 cache */
533 512, /* l2 cache */
0b11da67 534 1, /* streams */
8b897cfa
RS
535};
536
537/* Instruction costs on PPC604e processors. */
538static const
539struct processor_costs ppc604e_cost = {
06a67bdd
RS
540 COSTS_N_INSNS (2), /* mulsi */
541 COSTS_N_INSNS (2), /* mulsi_const */
542 COSTS_N_INSNS (2), /* mulsi_const9 */
543 COSTS_N_INSNS (2), /* muldi */
544 COSTS_N_INSNS (20), /* divsi */
545 COSTS_N_INSNS (20), /* divdi */
546 COSTS_N_INSNS (3), /* fp */
547 COSTS_N_INSNS (3), /* dmul */
548 COSTS_N_INSNS (18), /* sdiv */
549 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 550 32, /* cache line size */
5f732aba
DE
551 32, /* l1 cache */
552 1024, /* l2 cache */
0b11da67 553 1, /* streams */
8b897cfa
RS
554};
555
f0517163 556/* Instruction costs on PPC620 processors. */
8b897cfa
RS
557static const
558struct processor_costs ppc620_cost = {
06a67bdd
RS
559 COSTS_N_INSNS (5), /* mulsi */
560 COSTS_N_INSNS (4), /* mulsi_const */
561 COSTS_N_INSNS (3), /* mulsi_const9 */
562 COSTS_N_INSNS (7), /* muldi */
563 COSTS_N_INSNS (21), /* divsi */
564 COSTS_N_INSNS (37), /* divdi */
565 COSTS_N_INSNS (3), /* fp */
566 COSTS_N_INSNS (3), /* dmul */
567 COSTS_N_INSNS (18), /* sdiv */
568 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 569 128, /* cache line size */
5f732aba
DE
570 32, /* l1 cache */
571 1024, /* l2 cache */
0b11da67 572 1, /* streams */
f0517163
RS
573};
574
575/* Instruction costs on PPC630 processors. */
576static const
577struct processor_costs ppc630_cost = {
06a67bdd
RS
578 COSTS_N_INSNS (5), /* mulsi */
579 COSTS_N_INSNS (4), /* mulsi_const */
580 COSTS_N_INSNS (3), /* mulsi_const9 */
581 COSTS_N_INSNS (7), /* muldi */
582 COSTS_N_INSNS (21), /* divsi */
583 COSTS_N_INSNS (37), /* divdi */
584 COSTS_N_INSNS (3), /* fp */
585 COSTS_N_INSNS (3), /* dmul */
586 COSTS_N_INSNS (17), /* sdiv */
587 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 588 128, /* cache line size */
5f732aba
DE
589 64, /* l1 cache */
590 1024, /* l2 cache */
0b11da67 591 1, /* streams */
8b897cfa
RS
592};
593
d296e02e
AP
594/* Instruction costs on Cell processor. */
595/* COSTS_N_INSNS (1) ~ one add. */
596static const
597struct processor_costs ppccell_cost = {
598 COSTS_N_INSNS (9/2)+2, /* mulsi */
599 COSTS_N_INSNS (6/2), /* mulsi_const */
600 COSTS_N_INSNS (6/2), /* mulsi_const9 */
601 COSTS_N_INSNS (15/2)+2, /* muldi */
602 COSTS_N_INSNS (38/2), /* divsi */
603 COSTS_N_INSNS (70/2), /* divdi */
604 COSTS_N_INSNS (10/2), /* fp */
605 COSTS_N_INSNS (10/2), /* dmul */
606 COSTS_N_INSNS (74/2), /* sdiv */
607 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 608 128, /* cache line size */
5f732aba
DE
609 32, /* l1 cache */
610 512, /* l2 cache */
611 6, /* streams */
d296e02e
AP
612};
613
8b897cfa
RS
614/* Instruction costs on PPC750 and PPC7400 processors. */
615static const
616struct processor_costs ppc750_cost = {
06a67bdd
RS
617 COSTS_N_INSNS (5), /* mulsi */
618 COSTS_N_INSNS (3), /* mulsi_const */
619 COSTS_N_INSNS (2), /* mulsi_const9 */
620 COSTS_N_INSNS (5), /* muldi */
621 COSTS_N_INSNS (17), /* divsi */
622 COSTS_N_INSNS (17), /* divdi */
623 COSTS_N_INSNS (3), /* fp */
624 COSTS_N_INSNS (3), /* dmul */
625 COSTS_N_INSNS (17), /* sdiv */
626 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 627 32, /* cache line size */
5f732aba
DE
628 32, /* l1 cache */
629 512, /* l2 cache */
0b11da67 630 1, /* streams */
8b897cfa
RS
631};
632
633/* Instruction costs on PPC7450 processors. */
634static const
635struct processor_costs ppc7450_cost = {
06a67bdd
RS
636 COSTS_N_INSNS (4), /* mulsi */
637 COSTS_N_INSNS (3), /* mulsi_const */
638 COSTS_N_INSNS (3), /* mulsi_const9 */
639 COSTS_N_INSNS (4), /* muldi */
640 COSTS_N_INSNS (23), /* divsi */
641 COSTS_N_INSNS (23), /* divdi */
642 COSTS_N_INSNS (5), /* fp */
643 COSTS_N_INSNS (5), /* dmul */
644 COSTS_N_INSNS (21), /* sdiv */
645 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 646 32, /* cache line size */
5f732aba
DE
647 32, /* l1 cache */
648 1024, /* l2 cache */
0b11da67 649 1, /* streams */
8b897cfa 650};
a3170dc6 651
8b897cfa
RS
652/* Instruction costs on PPC8540 processors. */
653static const
654struct processor_costs ppc8540_cost = {
06a67bdd
RS
655 COSTS_N_INSNS (4), /* mulsi */
656 COSTS_N_INSNS (4), /* mulsi_const */
657 COSTS_N_INSNS (4), /* mulsi_const9 */
658 COSTS_N_INSNS (4), /* muldi */
659 COSTS_N_INSNS (19), /* divsi */
660 COSTS_N_INSNS (19), /* divdi */
661 COSTS_N_INSNS (4), /* fp */
662 COSTS_N_INSNS (4), /* dmul */
663 COSTS_N_INSNS (29), /* sdiv */
664 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 665 32, /* cache line size */
5f732aba
DE
666 32, /* l1 cache */
667 256, /* l2 cache */
0b11da67 668 1, /* prefetch streams /*/
8b897cfa
RS
669};
670
fa41c305
EW
671/* Instruction costs on E300C2 and E300C3 cores. */
672static const
673struct processor_costs ppce300c2c3_cost = {
674 COSTS_N_INSNS (4), /* mulsi */
675 COSTS_N_INSNS (4), /* mulsi_const */
676 COSTS_N_INSNS (4), /* mulsi_const9 */
677 COSTS_N_INSNS (4), /* muldi */
678 COSTS_N_INSNS (19), /* divsi */
679 COSTS_N_INSNS (19), /* divdi */
680 COSTS_N_INSNS (3), /* fp */
681 COSTS_N_INSNS (4), /* dmul */
682 COSTS_N_INSNS (18), /* sdiv */
683 COSTS_N_INSNS (33), /* ddiv */
642639ce 684 32,
a19b7d46
EW
685 16, /* l1 cache */
686 16, /* l2 cache */
642639ce 687 1, /* prefetch streams /*/
fa41c305
EW
688};
689
edae5fe3
DE
690/* Instruction costs on PPCE500MC processors. */
691static const
692struct processor_costs ppce500mc_cost = {
693 COSTS_N_INSNS (4), /* mulsi */
694 COSTS_N_INSNS (4), /* mulsi_const */
695 COSTS_N_INSNS (4), /* mulsi_const9 */
696 COSTS_N_INSNS (4), /* muldi */
697 COSTS_N_INSNS (14), /* divsi */
698 COSTS_N_INSNS (14), /* divdi */
699 COSTS_N_INSNS (8), /* fp */
700 COSTS_N_INSNS (10), /* dmul */
701 COSTS_N_INSNS (36), /* sdiv */
702 COSTS_N_INSNS (66), /* ddiv */
703 64, /* cache line size */
704 32, /* l1 cache */
705 128, /* l2 cache */
706 1, /* prefetch streams /*/
707};
708
8b897cfa
RS
709/* Instruction costs on POWER4 and POWER5 processors. */
710static const
711struct processor_costs power4_cost = {
06a67bdd
RS
712 COSTS_N_INSNS (3), /* mulsi */
713 COSTS_N_INSNS (2), /* mulsi_const */
714 COSTS_N_INSNS (2), /* mulsi_const9 */
715 COSTS_N_INSNS (4), /* muldi */
716 COSTS_N_INSNS (18), /* divsi */
717 COSTS_N_INSNS (34), /* divdi */
718 COSTS_N_INSNS (3), /* fp */
719 COSTS_N_INSNS (3), /* dmul */
720 COSTS_N_INSNS (17), /* sdiv */
721 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 722 128, /* cache line size */
5f732aba
DE
723 32, /* l1 cache */
724 1024, /* l2 cache */
0b11da67 725 8, /* prefetch streams /*/
8b897cfa
RS
726};
727
44cd321e
PS
728/* Instruction costs on POWER6 processors. */
729static const
730struct processor_costs power6_cost = {
731 COSTS_N_INSNS (8), /* mulsi */
732 COSTS_N_INSNS (8), /* mulsi_const */
733 COSTS_N_INSNS (8), /* mulsi_const9 */
734 COSTS_N_INSNS (8), /* muldi */
735 COSTS_N_INSNS (22), /* divsi */
736 COSTS_N_INSNS (28), /* divdi */
737 COSTS_N_INSNS (3), /* fp */
738 COSTS_N_INSNS (3), /* dmul */
739 COSTS_N_INSNS (13), /* sdiv */
740 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 741 128, /* cache line size */
5f732aba
DE
742 64, /* l1 cache */
743 2048, /* l2 cache */
0b11da67 744 16, /* prefetch streams */
44cd321e
PS
745};
746
8b897cfa 747\f
a2369ed3 748static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 749static const char *rs6000_invalid_within_doloop (const_rtx);
c6c3dba9 750static bool rs6000_legitimate_address_p (enum machine_mode, rtx, bool);
f90b7a5a 751static rtx rs6000_generate_compare (rtx, enum machine_mode);
a2369ed3
DJ
752static void rs6000_emit_stack_tie (void);
753static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 754static bool spe_func_has_64bit_regs_p (void);
b20a9cca 755static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 756 int, HOST_WIDE_INT);
a2369ed3 757static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 758static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
759static unsigned rs6000_hash_constant (rtx);
760static unsigned toc_hash_function (const void *);
761static int toc_hash_eq (const void *, const void *);
a2369ed3 762static bool constant_pool_expr_p (rtx);
d04b6e6e 763static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
764static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
765static struct machine_function * rs6000_init_machine_status (void);
766static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 767static bool no_global_regs_above (int, bool);
5add3202 768#ifdef HAVE_GAS_HIDDEN
a2369ed3 769static void rs6000_assemble_visibility (tree, int);
5add3202 770#endif
a2369ed3
DJ
771static int rs6000_ra_ever_killed (void);
772static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 773static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 774static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 775static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 776static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 777static const char *rs6000_mangle_type (const_tree);
6bc7bc14 778EXPORTED_CONST struct attribute_spec rs6000_attribute_table[];
a2369ed3 779static void rs6000_set_default_type_attributes (tree);
f78c3290 780static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
ff35822b 781static rtx rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
f78c3290
NF
782static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
783 enum machine_mode, bool, bool, bool);
52ff33d0 784static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
785static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
786static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
787static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
788static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
789static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
790 tree);
a2369ed3 791static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 792static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 793static void rs6000_file_start (void);
7c262518 794#if TARGET_ELF
9b580a0b 795static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
796static void rs6000_elf_asm_out_constructor (rtx, int);
797static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 798static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 799static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
800static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
801 unsigned HOST_WIDE_INT);
a56d7372 802static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 803 ATTRIBUTE_UNUSED;
7c262518 804#endif
3101faab 805static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
806static void rs6000_alloc_sdmode_stack_slot (void);
807static void rs6000_instantiate_decls (void);
cbaaba19 808#if TARGET_XCOFF
0d5817b2 809static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 810static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 811static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 812static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 813static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 814static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 815 unsigned HOST_WIDE_INT);
d6b5193b
RS
816static void rs6000_xcoff_unique_section (tree, int);
817static section *rs6000_xcoff_select_rtx_section
818 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
819static const char * rs6000_xcoff_strip_name_encoding (const char *);
820static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
821static void rs6000_xcoff_file_start (void);
822static void rs6000_xcoff_file_end (void);
f1384257 823#endif
a2369ed3 824static int rs6000_variable_issue (FILE *, int, rtx, int);
f40751dd 825static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
a2369ed3 826static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 827static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 828static bool is_microcoded_insn (rtx);
d296e02e 829static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
830static bool is_cracked_insn (rtx);
831static bool is_branch_slot_insn (rtx);
44cd321e 832static bool is_load_insn (rtx);
e3a0e200 833static rtx get_store_dest (rtx pat);
44cd321e
PS
834static bool is_store_insn (rtx);
835static bool set_to_load_agen (rtx,rtx);
982afe02 836static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
837static int rs6000_adjust_priority (rtx, int);
838static int rs6000_issue_rate (void);
b198261f 839static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
840static rtx get_next_active_insn (rtx, rtx);
841static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
842static bool insn_must_be_first_in_group (rtx);
843static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
844static bool is_costly_group (rtx *, rtx);
845static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
846static int redefine_groups (FILE *, int, rtx, rtx);
847static int pad_groups (FILE *, int, rtx, rtx);
848static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
849static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
850static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 851static int rs6000_use_sched_lookahead (void);
d296e02e 852static int rs6000_use_sched_lookahead_guard (rtx);
e855c69d
AB
853static void * rs6000_alloc_sched_context (void);
854static void rs6000_init_sched_context (void *, bool);
855static void rs6000_set_sched_context (void *);
856static void rs6000_free_sched_context (void *);
9c78b944 857static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 858static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
859static tree rs6000_builtin_mul_widen_even (tree);
860static tree rs6000_builtin_mul_widen_odd (tree);
744aa42f 861static tree rs6000_builtin_conversion (unsigned int, tree);
0fca40f5 862static tree rs6000_builtin_vec_perm (tree, tree *);
a2369ed3 863
58646b77 864static void def_builtin (int, const char *, tree, int);
3101faab 865static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
866static void rs6000_init_builtins (void);
867static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
868static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
869static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
870static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
871static void altivec_init_builtins (void);
872static void rs6000_common_init_builtins (void);
c15c90bb 873static void rs6000_init_libfuncs (void);
a2369ed3 874
96038623
DE
875static void paired_init_builtins (void);
876static rtx paired_expand_builtin (tree, rtx, bool *);
877static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
878static rtx paired_expand_stv_builtin (enum insn_code, tree);
879static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
880
b20a9cca
AM
881static void enable_mask_for_builtins (struct builtin_description *, int,
882 enum rs6000_builtins,
883 enum rs6000_builtins);
a2369ed3
DJ
884static void spe_init_builtins (void);
885static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 886static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
887static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
888static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
889static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
890static rs6000_stack_t *rs6000_stack_info (void);
891static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
892
893static rtx altivec_expand_builtin (tree, rtx, bool *);
894static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
895static rtx altivec_expand_st_builtin (tree, rtx, bool *);
896static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
897static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 898static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 899 const char *, tree, rtx);
a2369ed3 900static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
901static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
902static rtx altivec_expand_vec_set_builtin (tree);
903static rtx altivec_expand_vec_ext_builtin (tree, rtx);
904static int get_element_number (tree, tree);
78f5898b 905static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 906static void rs6000_parse_tls_size_option (void);
5da702b1 907static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
908static int first_altivec_reg_to_save (void);
909static unsigned int compute_vrsave_mask (void);
9390387d 910static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
911static void is_altivec_return_reg (rtx, void *);
912static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
913int easy_vector_constant (rtx, enum machine_mode);
a2369ed3 914static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 915static void rs6000_init_dwarf_reg_sizes_extra (tree);
506d7b68 916static rtx rs6000_legitimize_address (rtx, rtx, enum machine_mode);
a2369ed3 917static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 918static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
919static rtx rs6000_tls_get_addr (void);
920static rtx rs6000_got_sym (void);
9390387d 921static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
922static const char *rs6000_get_some_local_dynamic_name (void);
923static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 924static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 925static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 926 enum machine_mode, tree);
0b5383eb
DJ
927static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
928 HOST_WIDE_INT);
929static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
930 tree, HOST_WIDE_INT);
931static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
932 HOST_WIDE_INT,
933 rtx[], int *);
934static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
935 const_tree, HOST_WIDE_INT,
936 rtx[], int *);
937static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 938static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 939static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
940static void setup_incoming_varargs (CUMULATIVE_ARGS *,
941 enum machine_mode, tree,
942 int *, int);
8cd5a4e0 943static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 944 const_tree, bool);
78a52f11
RH
945static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
946 tree, bool);
3101faab 947static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
948#if TARGET_MACHO
949static void macho_branch_islands (void);
efdba735
SH
950static int no_previous_def (tree function_name);
951static tree get_prev_label (tree function_name);
c4e18b1c 952static void rs6000_darwin_file_start (void);
efdba735
SH
953#endif
954
c35d187f 955static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 956static void rs6000_va_start (tree, rtx);
726a989a 957static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
586de218 958static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 959static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 960static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 961static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 962 enum machine_mode);
94ff898d 963static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
964 enum machine_mode);
965static int get_vsel_insn (enum machine_mode);
966static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 967static tree rs6000_stack_protect_fail (void);
21213b4c
DP
968
969const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
970static enum machine_mode rs6000_eh_return_filter_mode (void);
971
17211ab5
GK
972/* Hash table stuff for keeping track of TOC entries. */
973
d1b38208 974struct GTY(()) toc_hash_struct
17211ab5
GK
975{
976 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
977 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
978 rtx key;
979 enum machine_mode key_mode;
980 int labelno;
981};
982
983static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
984\f
985/* Default register names. */
986char rs6000_reg_names[][8] =
987{
802a0058
MM
988 "0", "1", "2", "3", "4", "5", "6", "7",
989 "8", "9", "10", "11", "12", "13", "14", "15",
990 "16", "17", "18", "19", "20", "21", "22", "23",
991 "24", "25", "26", "27", "28", "29", "30", "31",
992 "0", "1", "2", "3", "4", "5", "6", "7",
993 "8", "9", "10", "11", "12", "13", "14", "15",
994 "16", "17", "18", "19", "20", "21", "22", "23",
995 "24", "25", "26", "27", "28", "29", "30", "31",
996 "mq", "lr", "ctr","ap",
997 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
998 "xer",
999 /* AltiVec registers. */
0cd5e3a1
AH
1000 "0", "1", "2", "3", "4", "5", "6", "7",
1001 "8", "9", "10", "11", "12", "13", "14", "15",
1002 "16", "17", "18", "19", "20", "21", "22", "23",
1003 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1004 "vrsave", "vscr",
1005 /* SPE registers. */
7d5175e1
JJ
1006 "spe_acc", "spefscr",
1007 /* Soft frame pointer. */
1008 "sfp"
c81bebd7
MM
1009};
1010
1011#ifdef TARGET_REGNAMES
8b60264b 1012static const char alt_reg_names[][8] =
c81bebd7 1013{
802a0058
MM
1014 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1015 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1016 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1017 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1018 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1019 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1020 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1021 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1022 "mq", "lr", "ctr", "ap",
1023 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1024 "xer",
59a4c851 1025 /* AltiVec registers. */
0ac081f6 1026 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1027 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1028 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1029 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1030 "vrsave", "vscr",
1031 /* SPE registers. */
7d5175e1
JJ
1032 "spe_acc", "spefscr",
1033 /* Soft frame pointer. */
1034 "sfp"
c81bebd7
MM
1035};
1036#endif
9878760c 1037\f
daf11973
MM
1038#ifndef MASK_STRICT_ALIGN
1039#define MASK_STRICT_ALIGN 0
1040#endif
ffcfcb5f
AM
1041#ifndef TARGET_PROFILE_KERNEL
1042#define TARGET_PROFILE_KERNEL 0
1043#endif
3961e8fe
RH
1044
1045/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1046#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1047\f
1048/* Initialize the GCC target structure. */
91d231cb
JM
1049#undef TARGET_ATTRIBUTE_TABLE
1050#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1051#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1052#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1053
301d03af
RS
1054#undef TARGET_ASM_ALIGNED_DI_OP
1055#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1056
1057/* Default unaligned ops are only provided for ELF. Find the ops needed
1058 for non-ELF systems. */
1059#ifndef OBJECT_FORMAT_ELF
cbaaba19 1060#if TARGET_XCOFF
ae6c1efd 1061/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1062 64-bit targets. */
1063#undef TARGET_ASM_UNALIGNED_HI_OP
1064#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1065#undef TARGET_ASM_UNALIGNED_SI_OP
1066#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1067#undef TARGET_ASM_UNALIGNED_DI_OP
1068#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1069#else
1070/* For Darwin. */
1071#undef TARGET_ASM_UNALIGNED_HI_OP
1072#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1073#undef TARGET_ASM_UNALIGNED_SI_OP
1074#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1075#undef TARGET_ASM_UNALIGNED_DI_OP
1076#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1077#undef TARGET_ASM_ALIGNED_DI_OP
1078#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1079#endif
1080#endif
1081
1082/* This hook deals with fixups for relocatable code and DI-mode objects
1083 in 64-bit code. */
1084#undef TARGET_ASM_INTEGER
1085#define TARGET_ASM_INTEGER rs6000_assemble_integer
1086
93638d7a
AM
1087#ifdef HAVE_GAS_HIDDEN
1088#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1089#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1090#endif
1091
c4501e62
JJ
1092#undef TARGET_HAVE_TLS
1093#define TARGET_HAVE_TLS HAVE_AS_TLS
1094
1095#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1096#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1097
08c148a8
NB
1098#undef TARGET_ASM_FUNCTION_PROLOGUE
1099#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1100#undef TARGET_ASM_FUNCTION_EPILOGUE
1101#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1102
506d7b68
PB
1103#undef TARGET_LEGITIMIZE_ADDRESS
1104#define TARGET_LEGITIMIZE_ADDRESS rs6000_legitimize_address
1105
b54cf83a
DE
1106#undef TARGET_SCHED_VARIABLE_ISSUE
1107#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1108
c237e94a
ZW
1109#undef TARGET_SCHED_ISSUE_RATE
1110#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1111#undef TARGET_SCHED_ADJUST_COST
1112#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1113#undef TARGET_SCHED_ADJUST_PRIORITY
1114#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1115#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1116#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1117#undef TARGET_SCHED_INIT
1118#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1119#undef TARGET_SCHED_FINISH
1120#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1121#undef TARGET_SCHED_REORDER
1122#define TARGET_SCHED_REORDER rs6000_sched_reorder
1123#undef TARGET_SCHED_REORDER2
1124#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1125
be12c2b0
VM
1126#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1127#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1128
d296e02e
AP
1129#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1130#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1131
e855c69d
AB
1132#undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1133#define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1134#undef TARGET_SCHED_INIT_SCHED_CONTEXT
1135#define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1136#undef TARGET_SCHED_SET_SCHED_CONTEXT
1137#define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1138#undef TARGET_SCHED_FREE_SCHED_CONTEXT
1139#define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1140
7ccf35ed
DN
1141#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1142#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1143#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1144#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1145#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1146#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1147#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1148#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
0fca40f5
IR
1149#undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1150#define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
7ccf35ed 1151
5b900a4c
DN
1152#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1153#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1154
0ac081f6
AH
1155#undef TARGET_INIT_BUILTINS
1156#define TARGET_INIT_BUILTINS rs6000_init_builtins
1157
1158#undef TARGET_EXPAND_BUILTIN
1159#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1160
608063c3
JB
1161#undef TARGET_MANGLE_TYPE
1162#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1163
c15c90bb
ZW
1164#undef TARGET_INIT_LIBFUNCS
1165#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1166
f1384257 1167#if TARGET_MACHO
0e5dbd9b 1168#undef TARGET_BINDS_LOCAL_P
31920d83 1169#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1170#endif
0e5dbd9b 1171
77ccdfed
EC
1172#undef TARGET_MS_BITFIELD_LAYOUT_P
1173#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1174
3961e8fe
RH
1175#undef TARGET_ASM_OUTPUT_MI_THUNK
1176#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1177
3961e8fe 1178#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1179#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1180
4977bab6
ZW
1181#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1182#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1183
2e3f0db6
DJ
1184#undef TARGET_INVALID_WITHIN_DOLOOP
1185#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1186
3c50106f
RH
1187#undef TARGET_RTX_COSTS
1188#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67 1189#undef TARGET_ADDRESS_COST
f40751dd 1190#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 1191
96714395
AH
1192#undef TARGET_DWARF_REGISTER_SPAN
1193#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1194
37ea0b7e
JM
1195#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1196#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1197
c6e8c921
GK
1198/* On rs6000, function arguments are promoted, as are function return
1199 values. */
1200#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1201#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1202#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1203#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1204
c6e8c921
GK
1205#undef TARGET_RETURN_IN_MEMORY
1206#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1207
1208#undef TARGET_SETUP_INCOMING_VARARGS
1209#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1210
1211/* Always strict argument naming on rs6000. */
1212#undef TARGET_STRICT_ARGUMENT_NAMING
1213#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1214#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1215#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1216#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1217#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1218#undef TARGET_MUST_PASS_IN_STACK
1219#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1220#undef TARGET_PASS_BY_REFERENCE
1221#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1222#undef TARGET_ARG_PARTIAL_BYTES
1223#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1224
c35d187f
RH
1225#undef TARGET_BUILD_BUILTIN_VA_LIST
1226#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1227
d7bd8aeb
JJ
1228#undef TARGET_EXPAND_BUILTIN_VA_START
1229#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1230
cd3ce9b4
JM
1231#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1232#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1233
93f90be6
FJ
1234#undef TARGET_EH_RETURN_FILTER_MODE
1235#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1236
00b79d54
BE
1237#undef TARGET_SCALAR_MODE_SUPPORTED_P
1238#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1239
f676971a
EC
1240#undef TARGET_VECTOR_MODE_SUPPORTED_P
1241#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1242
4d3e6fae
FJ
1243#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1244#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1245
78f5898b
AH
1246#undef TARGET_HANDLE_OPTION
1247#define TARGET_HANDLE_OPTION rs6000_handle_option
1248
1249#undef TARGET_DEFAULT_TARGET_FLAGS
1250#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1251 (TARGET_DEFAULT)
78f5898b 1252
3aebbe5f
JJ
1253#undef TARGET_STACK_PROTECT_FAIL
1254#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1255
445cf5eb
JM
1256/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1257 The PowerPC architecture requires only weak consistency among
1258 processors--that is, memory accesses between processors need not be
1259 sequentially consistent and memory accesses among processors can occur
1260 in any order. The ability to order memory accesses weakly provides
1261 opportunities for more efficient use of the system bus. Unless a
1262 dependency exists, the 604e allows read operations to precede store
1263 operations. */
1264#undef TARGET_RELAXED_ORDERING
1265#define TARGET_RELAXED_ORDERING true
1266
fdbe66f2
EB
1267#ifdef HAVE_AS_TLS
1268#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1269#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1270#endif
1271
aacd3885
RS
1272/* Use a 32-bit anchor range. This leads to sequences like:
1273
1274 addis tmp,anchor,high
1275 add dest,tmp,low
1276
1277 where tmp itself acts as an anchor, and can be shared between
1278 accesses to the same 64k page. */
1279#undef TARGET_MIN_ANCHOR_OFFSET
1280#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1281#undef TARGET_MAX_ANCHOR_OFFSET
1282#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1283#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1284#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1285
9c78b944
DE
1286#undef TARGET_BUILTIN_RECIPROCAL
1287#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1288
e41b2a33
PB
1289#undef TARGET_EXPAND_TO_RTL_HOOK
1290#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1291
1292#undef TARGET_INSTANTIATE_DECLS
1293#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1294
c6c3dba9
PB
1295#undef TARGET_LEGITIMATE_ADDRESS_P
1296#define TARGET_LEGITIMATE_ADDRESS_P rs6000_legitimate_address_p
1297
f6897b10 1298struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1299\f
0d1fbc8c
AH
1300
1301/* Value is 1 if hard register REGNO can hold a value of machine-mode
1302 MODE. */
1303static int
1304rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1305{
1306 /* The GPRs can hold any mode, but values bigger than one register
1307 cannot go past R31. */
1308 if (INT_REGNO_P (regno))
1309 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1310
a5a97921 1311 /* The float registers can only hold floating modes and DImode.
7393f7f8 1312 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1313 if (FP_REGNO_P (regno))
1314 return
96038623 1315 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1316 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1317 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1318 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1319 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1320 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1321 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1322
1323 /* The CR register can only hold CC modes. */
1324 if (CR_REGNO_P (regno))
1325 return GET_MODE_CLASS (mode) == MODE_CC;
1326
1327 if (XER_REGNO_P (regno))
1328 return mode == PSImode;
1329
1330 /* AltiVec only in AldyVec registers. */
1331 if (ALTIVEC_REGNO_P (regno))
1332 return ALTIVEC_VECTOR_MODE (mode);
1333
1334 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1335 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1336 return 1;
1337
1338 /* We cannot put TImode anywhere except general register and it must be
1339 able to fit within the register set. */
1340
1341 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1342}
1343
1344/* Initialize rs6000_hard_regno_mode_ok_p table. */
1345static void
1346rs6000_init_hard_regno_mode_ok (void)
1347{
1348 int r, m;
1349
1350 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1351 for (m = 0; m < NUM_MACHINE_MODES; ++m)
bbbbb16a 1352 if (rs6000_hard_regno_mode_ok (r, (enum machine_mode) m))
0d1fbc8c
AH
1353 rs6000_hard_regno_mode_ok_p[m][r] = true;
1354}
1355
e4cad568
GK
1356#if TARGET_MACHO
1357/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1358
1359static void
1360darwin_rs6000_override_options (void)
1361{
1362 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1363 off. */
1364 rs6000_altivec_abi = 1;
1365 TARGET_ALTIVEC_VRSAVE = 1;
1366 if (DEFAULT_ABI == ABI_DARWIN)
1367 {
1368 if (MACHO_DYNAMIC_NO_PIC_P)
1369 {
1370 if (flag_pic)
1371 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1372 flag_pic = 0;
1373 }
1374 else if (flag_pic == 1)
1375 {
1376 flag_pic = 2;
1377 }
1378 }
1379 if (TARGET_64BIT && ! TARGET_POWERPC64)
1380 {
1381 target_flags |= MASK_POWERPC64;
1382 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1383 }
1384 if (flag_mkernel)
1385 {
1386 rs6000_default_long_calls = 1;
1387 target_flags |= MASK_SOFT_FLOAT;
1388 }
1389
1390 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1391 Altivec. */
1392 if (!flag_mkernel && !flag_apple_kext
1393 && TARGET_64BIT
1394 && ! (target_flags_explicit & MASK_ALTIVEC))
1395 target_flags |= MASK_ALTIVEC;
1396
1397 /* Unless the user (not the configurer) has explicitly overridden
1398 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1399 G4 unless targetting the kernel. */
1400 if (!flag_mkernel
1401 && !flag_apple_kext
1402 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1403 && ! (target_flags_explicit & MASK_ALTIVEC)
1404 && ! rs6000_select[1].string)
1405 {
1406 target_flags |= MASK_ALTIVEC;
1407 }
1408}
1409#endif
1410
c1e55850
GK
1411/* If not otherwise specified by a target, make 'long double' equivalent to
1412 'double'. */
1413
1414#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1415#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1416#endif
1417
5248c961
RK
1418/* Override command line options. Mostly we process the processor
1419 type and sometimes adjust other TARGET_ options. */
1420
1421void
d779d0dc 1422rs6000_override_options (const char *default_cpu)
5248c961 1423{
c4d38ccb 1424 size_t i, j;
8e3f41e7 1425 struct rs6000_cpu_select *ptr;
66188a7e 1426 int set_masks;
5248c961 1427
66188a7e 1428 /* Simplifications for entries below. */
85638c0d 1429
66188a7e
GK
1430 enum {
1431 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1432 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1433 };
85638c0d 1434
66188a7e
GK
1435 /* This table occasionally claims that a processor does not support
1436 a particular feature even though it does, but the feature is slower
1437 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1438 complete description of the processor's support.
66188a7e
GK
1439
1440 Please keep this list in order, and don't forget to update the
1441 documentation in invoke.texi when adding a new processor or
1442 flag. */
5248c961
RK
1443 static struct ptt
1444 {
8b60264b
KG
1445 const char *const name; /* Canonical processor name. */
1446 const enum processor_type processor; /* Processor type enum value. */
1447 const int target_enable; /* Target flags to enable. */
8b60264b 1448 } const processor_target_table[]
66188a7e 1449 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1450 {"403", PROCESSOR_PPC403,
66188a7e 1451 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1452 {"405", PROCESSOR_PPC405,
716019c0
JM
1453 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1454 {"405fp", PROCESSOR_PPC405,
1455 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1456 {"440", PROCESSOR_PPC440,
716019c0
JM
1457 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1458 {"440fp", PROCESSOR_PPC440,
1459 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1460 {"464", PROCESSOR_PPC440,
1461 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1462 {"464fp", PROCESSOR_PPC440,
1463 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1464 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1465 {"601", PROCESSOR_PPC601,
66188a7e
GK
1466 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1467 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1468 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1469 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1470 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1471 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1472 {"620", PROCESSOR_PPC620,
1473 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1474 {"630", PROCESSOR_PPC630,
1475 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1476 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1477 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1478 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1479 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1480 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1481 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1482 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1483 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1484 /* 8548 has a dummy entry for now. */
a45bce6e 1485 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1486 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1487 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1488 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1489 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1490 {"970", PROCESSOR_POWER4,
66188a7e 1491 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1492 {"cell", PROCESSOR_CELL,
1493 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1494 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1495 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1496 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1497 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1498 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1499 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1500 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1501 {"power2", PROCESSOR_POWER,
1502 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1503 {"power3", PROCESSOR_PPC630,
1504 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1505 {"power4", PROCESSOR_POWER4,
9a8d7941 1506 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1bc39d2f 1507 | MASK_MFCRF},
ec507f2d 1508 {"power5", PROCESSOR_POWER5,
9a8d7941 1509 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1510 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1511 {"power5+", PROCESSOR_POWER5,
9a8d7941 1512 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1513 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1514 {"power6", PROCESSOR_POWER6,
0783d48d
DE
1515 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1516 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1517 {"power6x", PROCESSOR_POWER6,
0783d48d
DE
1518 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1519 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1520 | MASK_MFPGPR},
d40c9e33
PB
1521 {"power7", PROCESSOR_POWER5,
1522 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1523 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
66188a7e
GK
1524 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1525 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1526 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1527 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1528 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1529 {"rios2", PROCESSOR_RIOS2,
1530 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1531 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1532 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1533 {"rs64", PROCESSOR_RS64A,
1534 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1535 };
5248c961 1536
ca7558fc 1537 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1538
66188a7e
GK
1539 /* Some OSs don't support saving the high part of 64-bit registers on
1540 context switch. Other OSs don't support saving Altivec registers.
1541 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1542 settings; if the user wants either, the user must explicitly specify
1543 them and we won't interfere with the user's specification. */
1544
1545 enum {
1546 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1547 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1548 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1549 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1550 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1551 };
0d1fbc8c 1552
c4ad648e 1553 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1554#ifdef OS_MISSING_POWERPC64
1555 if (OS_MISSING_POWERPC64)
1556 set_masks &= ~MASK_POWERPC64;
1557#endif
1558#ifdef OS_MISSING_ALTIVEC
1559 if (OS_MISSING_ALTIVEC)
1560 set_masks &= ~MASK_ALTIVEC;
1561#endif
1562
768875a8
AM
1563 /* Don't override by the processor default if given explicitly. */
1564 set_masks &= ~target_flags_explicit;
957211c3 1565
a4f6c312 1566 /* Identify the processor type. */
8e3f41e7 1567 rs6000_select[0].string = default_cpu;
3cb999d8 1568 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1569
b6a1cbae 1570 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1571 {
8e3f41e7
MM
1572 ptr = &rs6000_select[i];
1573 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1574 {
8e3f41e7
MM
1575 for (j = 0; j < ptt_size; j++)
1576 if (! strcmp (ptr->string, processor_target_table[j].name))
1577 {
1578 if (ptr->set_tune_p)
1579 rs6000_cpu = processor_target_table[j].processor;
1580
1581 if (ptr->set_arch_p)
1582 {
66188a7e
GK
1583 target_flags &= ~set_masks;
1584 target_flags |= (processor_target_table[j].target_enable
1585 & set_masks);
8e3f41e7
MM
1586 }
1587 break;
1588 }
1589
4406229e 1590 if (j == ptt_size)
8e3f41e7 1591 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1592 }
1593 }
8a61d227 1594
edae5fe3
DE
1595 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1596 && !rs6000_explicit_options.isel)
a3170dc6
AH
1597 rs6000_isel = 1;
1598
edae5fe3
DE
1599 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1600 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1601 {
1602 if (TARGET_ALTIVEC)
1603 error ("AltiVec not supported in this target");
1604 if (TARGET_SPE)
1605 error ("Spe not supported in this target");
1606 }
1607
25696a75 1608 /* Disable Cell microcode if we are optimizing for the Cell
c921bad8
AP
1609 and not optimizing for size. */
1610 if (rs6000_gen_cell_microcode == -1)
1611 rs6000_gen_cell_microcode = !(rs6000_cpu == PROCESSOR_CELL
1612 && !optimize_size);
1613
dff9f1b6 1614 /* If we are optimizing big endian systems for space, use the load/store
c921bad8
AP
1615 multiple and string instructions unless we are not generating
1616 Cell microcode. */
1617 if (BYTES_BIG_ENDIAN && optimize_size && !rs6000_gen_cell_microcode)
957211c3 1618 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1619
a4f6c312
SS
1620 /* Don't allow -mmultiple or -mstring on little endian systems
1621 unless the cpu is a 750, because the hardware doesn't support the
1622 instructions used in little endian mode, and causes an alignment
1623 trap. The 750 does not cause an alignment trap (except when the
1624 target is unaligned). */
bef84347 1625
b21fb038 1626 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1627 {
1628 if (TARGET_MULTIPLE)
1629 {
1630 target_flags &= ~MASK_MULTIPLE;
b21fb038 1631 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1632 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1633 }
1634
1635 if (TARGET_STRING)
1636 {
1637 target_flags &= ~MASK_STRING;
b21fb038 1638 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1639 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1640 }
1641 }
3933e0e1 1642
38c1f2d7
MM
1643 /* Set debug flags */
1644 if (rs6000_debug_name)
1645 {
bfc79d3b 1646 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1647 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1648 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1649 rs6000_debug_stack = 1;
bfc79d3b 1650 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1651 rs6000_debug_arg = 1;
1652 else
c725bd79 1653 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1654 }
1655
57ac7be9
AM
1656 if (rs6000_traceback_name)
1657 {
1658 if (! strncmp (rs6000_traceback_name, "full", 4))
1659 rs6000_traceback = traceback_full;
1660 else if (! strncmp (rs6000_traceback_name, "part", 4))
1661 rs6000_traceback = traceback_part;
1662 else if (! strncmp (rs6000_traceback_name, "no", 2))
1663 rs6000_traceback = traceback_none;
1664 else
9e637a26 1665 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1666 rs6000_traceback_name);
1667 }
1668
78f5898b
AH
1669 if (!rs6000_explicit_options.long_double)
1670 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1671
602ea4d3 1672#ifndef POWERPC_LINUX
d3603e8c 1673 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1674 rs6000_ieeequad = 1;
1675#endif
1676
0db747be
DE
1677 /* Enable Altivec ABI for AIX -maltivec. */
1678 if (TARGET_XCOFF && TARGET_ALTIVEC)
1679 rs6000_altivec_abi = 1;
1680
a2db2771
JJ
1681 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1682 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1683 be explicitly overridden in either case. */
1684 if (TARGET_ELF)
6d0ef01e 1685 {
a2db2771
JJ
1686 if (!rs6000_explicit_options.altivec_abi
1687 && (TARGET_64BIT || TARGET_ALTIVEC))
1688 rs6000_altivec_abi = 1;
1689
1690 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1691 if (!rs6000_explicit_options.vrsave)
1692 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1693 }
1694
594a51fe
SS
1695 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1696 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1697 {
1698 rs6000_darwin64_abi = 1;
9c7956fd 1699#if TARGET_MACHO
6ac49599 1700 darwin_one_byte_bool = 1;
9c7956fd 1701#endif
d9168963
SS
1702 /* Default to natural alignment, for better performance. */
1703 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1704 }
1705
194c524a
DE
1706 /* Place FP constants in the constant pool instead of TOC
1707 if section anchors enabled. */
1708 if (flag_section_anchors)
1709 TARGET_NO_FP_IN_TOC = 1;
1710
c4501e62
JJ
1711 /* Handle -mtls-size option. */
1712 rs6000_parse_tls_size_option ();
1713
a7ae18e2
AH
1714#ifdef SUBTARGET_OVERRIDE_OPTIONS
1715 SUBTARGET_OVERRIDE_OPTIONS;
1716#endif
1717#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1718 SUBSUBTARGET_OVERRIDE_OPTIONS;
1719#endif
4d4cbc0e
AH
1720#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1721 SUB3TARGET_OVERRIDE_OPTIONS;
1722#endif
a7ae18e2 1723
edae5fe3 1724 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1725 {
edae5fe3 1726 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1727 MASK_STRING above when optimizing for size. */
1728 if ((target_flags & MASK_STRING) != 0)
1729 target_flags = target_flags & ~MASK_STRING;
1730 }
1731 else if (rs6000_select[1].string != NULL)
1732 {
1733 /* For the powerpc-eabispe configuration, we set all these by
1734 default, so let's unset them if we manually set another
1735 CPU that is not the E500. */
a2db2771 1736 if (!rs6000_explicit_options.spe_abi)
5da702b1 1737 rs6000_spe_abi = 0;
78f5898b 1738 if (!rs6000_explicit_options.spe)
5da702b1 1739 rs6000_spe = 0;
78f5898b 1740 if (!rs6000_explicit_options.float_gprs)
5da702b1 1741 rs6000_float_gprs = 0;
78f5898b 1742 if (!rs6000_explicit_options.isel)
5da702b1
AH
1743 rs6000_isel = 0;
1744 }
b5044283 1745
eca0d5e8
JM
1746 /* Detect invalid option combinations with E500. */
1747 CHECK_E500_OPTIONS;
1748
ec507f2d 1749 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1750 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1751 && rs6000_cpu != PROCESSOR_POWER6
1752 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1753 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1754 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1755 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1756 || rs6000_cpu == PROCESSOR_POWER5
1757 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1758
ec507f2d
DE
1759 rs6000_sched_restricted_insns_priority
1760 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1761
569fa502 1762 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1763 rs6000_sched_costly_dep
1764 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1765
569fa502
DN
1766 if (rs6000_sched_costly_dep_str)
1767 {
f676971a 1768 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1769 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1770 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1771 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1772 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1773 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1774 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1775 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1776 else
32e8bb8e
ILT
1777 rs6000_sched_costly_dep = ((enum rs6000_dependence_cost)
1778 atoi (rs6000_sched_costly_dep_str));
cbe26ab8
DN
1779 }
1780
1781 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1782 rs6000_sched_insert_nops
1783 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1784
cbe26ab8
DN
1785 if (rs6000_sched_insert_nops_str)
1786 {
1787 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1788 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1789 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1790 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1791 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1792 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1793 else
32e8bb8e
ILT
1794 rs6000_sched_insert_nops = ((enum rs6000_nop_insertion)
1795 atoi (rs6000_sched_insert_nops_str));
569fa502
DN
1796 }
1797
c81bebd7 1798#ifdef TARGET_REGNAMES
a4f6c312
SS
1799 /* If the user desires alternate register names, copy in the
1800 alternate names now. */
c81bebd7 1801 if (TARGET_REGNAMES)
4e135bdd 1802 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1803#endif
1804
df01da37 1805 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1806 If -maix-struct-return or -msvr4-struct-return was explicitly
1807 used, don't override with the ABI default. */
df01da37
DE
1808 if (!rs6000_explicit_options.aix_struct_ret)
1809 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1810
602ea4d3 1811 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1812 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1813
f676971a 1814 if (TARGET_TOC)
9ebbca7d 1815 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1816
301d03af
RS
1817 /* We can only guarantee the availability of DI pseudo-ops when
1818 assembling for 64-bit targets. */
ae6c1efd 1819 if (!TARGET_64BIT)
301d03af
RS
1820 {
1821 targetm.asm_out.aligned_op.di = NULL;
1822 targetm.asm_out.unaligned_op.di = NULL;
1823 }
1824
1494c534
DE
1825 /* Set branch target alignment, if not optimizing for size. */
1826 if (!optimize_size)
1827 {
d296e02e
AP
1828 /* Cell wants to be aligned 8byte for dual issue. */
1829 if (rs6000_cpu == PROCESSOR_CELL)
1830 {
1831 if (align_functions <= 0)
1832 align_functions = 8;
1833 if (align_jumps <= 0)
1834 align_jumps = 8;
1835 if (align_loops <= 0)
1836 align_loops = 8;
1837 }
44cd321e 1838 if (rs6000_align_branch_targets)
1494c534
DE
1839 {
1840 if (align_functions <= 0)
1841 align_functions = 16;
1842 if (align_jumps <= 0)
1843 align_jumps = 16;
1844 if (align_loops <= 0)
1845 align_loops = 16;
1846 }
1847 if (align_jumps_max_skip <= 0)
1848 align_jumps_max_skip = 15;
1849 if (align_loops_max_skip <= 0)
1850 align_loops_max_skip = 15;
1851 }
2792d578 1852
71f123ca
FS
1853 /* Arrange to save and restore machine status around nested functions. */
1854 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1855
1856 /* We should always be splitting complex arguments, but we can't break
1857 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1858 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1859 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1860
1861 /* Initialize rs6000_cost with the appropriate target costs. */
1862 if (optimize_size)
1863 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1864 else
1865 switch (rs6000_cpu)
1866 {
1867 case PROCESSOR_RIOS1:
1868 rs6000_cost = &rios1_cost;
1869 break;
1870
1871 case PROCESSOR_RIOS2:
1872 rs6000_cost = &rios2_cost;
1873 break;
1874
1875 case PROCESSOR_RS64A:
1876 rs6000_cost = &rs64a_cost;
1877 break;
1878
1879 case PROCESSOR_MPCCORE:
1880 rs6000_cost = &mpccore_cost;
1881 break;
1882
1883 case PROCESSOR_PPC403:
1884 rs6000_cost = &ppc403_cost;
1885 break;
1886
1887 case PROCESSOR_PPC405:
1888 rs6000_cost = &ppc405_cost;
1889 break;
1890
1891 case PROCESSOR_PPC440:
1892 rs6000_cost = &ppc440_cost;
1893 break;
1894
1895 case PROCESSOR_PPC601:
1896 rs6000_cost = &ppc601_cost;
1897 break;
1898
1899 case PROCESSOR_PPC603:
1900 rs6000_cost = &ppc603_cost;
1901 break;
1902
1903 case PROCESSOR_PPC604:
1904 rs6000_cost = &ppc604_cost;
1905 break;
1906
1907 case PROCESSOR_PPC604e:
1908 rs6000_cost = &ppc604e_cost;
1909 break;
1910
1911 case PROCESSOR_PPC620:
8b897cfa
RS
1912 rs6000_cost = &ppc620_cost;
1913 break;
1914
f0517163
RS
1915 case PROCESSOR_PPC630:
1916 rs6000_cost = &ppc630_cost;
1917 break;
1918
982afe02 1919 case PROCESSOR_CELL:
d296e02e
AP
1920 rs6000_cost = &ppccell_cost;
1921 break;
1922
8b897cfa
RS
1923 case PROCESSOR_PPC750:
1924 case PROCESSOR_PPC7400:
1925 rs6000_cost = &ppc750_cost;
1926 break;
1927
1928 case PROCESSOR_PPC7450:
1929 rs6000_cost = &ppc7450_cost;
1930 break;
1931
1932 case PROCESSOR_PPC8540:
1933 rs6000_cost = &ppc8540_cost;
1934 break;
1935
fa41c305
EW
1936 case PROCESSOR_PPCE300C2:
1937 case PROCESSOR_PPCE300C3:
1938 rs6000_cost = &ppce300c2c3_cost;
1939 break;
1940
edae5fe3
DE
1941 case PROCESSOR_PPCE500MC:
1942 rs6000_cost = &ppce500mc_cost;
1943 break;
1944
8b897cfa
RS
1945 case PROCESSOR_POWER4:
1946 case PROCESSOR_POWER5:
1947 rs6000_cost = &power4_cost;
1948 break;
1949
44cd321e
PS
1950 case PROCESSOR_POWER6:
1951 rs6000_cost = &power6_cost;
1952 break;
1953
8b897cfa 1954 default:
37409796 1955 gcc_unreachable ();
8b897cfa 1956 }
0b11da67
DE
1957
1958 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1959 set_param_value ("simultaneous-prefetches",
1960 rs6000_cost->simultaneous_prefetches);
1961 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1962 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1963 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1964 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1965 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1966 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1967
1968 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1969 can be optimized to ap = __builtin_next_arg (0). */
1970 if (DEFAULT_ABI != ABI_V4)
1971 targetm.expand_builtin_va_start = NULL;
696e45ba
ME
1972
1973 /* Set up single/double float flags.
1974 If TARGET_HARD_FLOAT is set, but neither single or double is set,
1975 then set both flags. */
1976 if (TARGET_HARD_FLOAT && TARGET_FPRS
1977 && rs6000_single_float == 0 && rs6000_double_float == 0)
1978 rs6000_single_float = rs6000_double_float = 1;
1979
1980 /* Reset single and double FP flags if target is E500. */
1981 if (TARGET_E500)
1982 {
1983 rs6000_single_float = rs6000_double_float = 0;
1984 if (TARGET_E500_SINGLE)
1985 rs6000_single_float = 1;
1986 if (TARGET_E500_DOUBLE)
1987 rs6000_single_float = rs6000_double_float = 1;
1988 }
1989
001b9eb6
PH
1990 /* If not explicitly specified via option, decide whether to generate indexed
1991 load/store instructions. */
1992 if (TARGET_AVOID_XFORM == -1)
1993 /* Avoid indexed addressing when targeting Power6 in order to avoid
1994 the DERAT mispredict penalty. */
1995 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB);
1996
696e45ba 1997 rs6000_init_hard_regno_mode_ok ();
5248c961 1998}
5accd822 1999
7ccf35ed
DN
2000/* Implement targetm.vectorize.builtin_mask_for_load. */
2001static tree
2002rs6000_builtin_mask_for_load (void)
2003{
2004 if (TARGET_ALTIVEC)
2005 return altivec_builtin_mask_for_load;
2006 else
2007 return 0;
2008}
2009
7910ae0c
DN
2010/* Implement targetm.vectorize.builtin_conversion.
2011 Returns a decl of a function that implements conversion of an integer vector
2012 into a floating-point vector, or vice-versa. TYPE is the type of the integer
2013 side of the conversion.
2014 Return NULL_TREE if it is not available. */
f57d17f1 2015static tree
744aa42f 2016rs6000_builtin_conversion (unsigned int tcode, tree type)
f57d17f1 2017{
744aa42f
ILT
2018 enum tree_code code = (enum tree_code) tcode;
2019
f57d17f1
TM
2020 if (!TARGET_ALTIVEC)
2021 return NULL_TREE;
982afe02 2022
f57d17f1
TM
2023 switch (code)
2024 {
7910ae0c
DN
2025 case FIX_TRUNC_EXPR:
2026 switch (TYPE_MODE (type))
2027 {
2028 case V4SImode:
2029 return TYPE_UNSIGNED (type)
2030 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2031 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2032 default:
2033 return NULL_TREE;
2034 }
2035
f57d17f1
TM
2036 case FLOAT_EXPR:
2037 switch (TYPE_MODE (type))
2038 {
2039 case V4SImode:
7910ae0c
DN
2040 return TYPE_UNSIGNED (type)
2041 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2042 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
f57d17f1
TM
2043 default:
2044 return NULL_TREE;
2045 }
7910ae0c 2046
f57d17f1
TM
2047 default:
2048 return NULL_TREE;
2049 }
2050}
2051
89d67cca
DN
2052/* Implement targetm.vectorize.builtin_mul_widen_even. */
2053static tree
2054rs6000_builtin_mul_widen_even (tree type)
2055{
2056 if (!TARGET_ALTIVEC)
2057 return NULL_TREE;
2058
2059 switch (TYPE_MODE (type))
2060 {
2061 case V8HImode:
7910ae0c
DN
2062 return TYPE_UNSIGNED (type)
2063 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2064 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
89d67cca
DN
2065
2066 case V16QImode:
7910ae0c
DN
2067 return TYPE_UNSIGNED (type)
2068 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2069 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
89d67cca
DN
2070 default:
2071 return NULL_TREE;
2072 }
2073}
2074
2075/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2076static tree
2077rs6000_builtin_mul_widen_odd (tree type)
2078{
2079 if (!TARGET_ALTIVEC)
2080 return NULL_TREE;
2081
2082 switch (TYPE_MODE (type))
2083 {
2084 case V8HImode:
7910ae0c
DN
2085 return TYPE_UNSIGNED (type)
2086 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2087 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
89d67cca
DN
2088
2089 case V16QImode:
7910ae0c
DN
2090 return TYPE_UNSIGNED (type)
2091 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2092 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
89d67cca
DN
2093 default:
2094 return NULL_TREE;
2095 }
2096}
2097
5b900a4c
DN
2098
2099/* Return true iff, data reference of TYPE can reach vector alignment (16)
2100 after applying N number of iterations. This routine does not determine
2101 how may iterations are required to reach desired alignment. */
2102
2103static bool
3101faab 2104rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2105{
2106 if (is_packed)
2107 return false;
2108
2109 if (TARGET_32BIT)
2110 {
2111 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2112 return true;
2113
2114 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2115 return true;
2116
2117 return false;
2118 }
2119 else
2120 {
2121 if (TARGET_MACHO)
2122 return false;
2123
2124 /* Assuming that all other types are naturally aligned. CHECKME! */
2125 return true;
2126 }
2127}
2128
0fca40f5
IR
2129/* Implement targetm.vectorize.builtin_vec_perm. */
2130tree
2131rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2132{
2133 tree d;
2134
2135 *mask_element_type = unsigned_char_type_node;
2136
2137 switch (TYPE_MODE (type))
2138 {
2139 case V16QImode:
2140 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2141 break;
2142
2143 case V8HImode:
2144 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2145 break;
2146
2147 case V4SImode:
2148 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2149 break;
2150
2151 case V4SFmode:
2152 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2153 break;
2154
2155 default:
2156 return NULL_TREE;
2157 }
2158
2159 gcc_assert (d);
2160 return d;
2161}
2162
5da702b1
AH
2163/* Handle generic options of the form -mfoo=yes/no.
2164 NAME is the option name.
2165 VALUE is the option value.
2166 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2167 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2168static void
5da702b1 2169rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2170{
5da702b1 2171 if (value == 0)
993f19a8 2172 return;
5da702b1
AH
2173 else if (!strcmp (value, "yes"))
2174 *flag = 1;
2175 else if (!strcmp (value, "no"))
2176 *flag = 0;
08b57fb3 2177 else
5da702b1 2178 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2179}
2180
c4501e62
JJ
2181/* Validate and record the size specified with the -mtls-size option. */
2182
2183static void
863d938c 2184rs6000_parse_tls_size_option (void)
c4501e62
JJ
2185{
2186 if (rs6000_tls_size_string == 0)
2187 return;
2188 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2189 rs6000_tls_size = 16;
2190 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2191 rs6000_tls_size = 32;
2192 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2193 rs6000_tls_size = 64;
2194 else
9e637a26 2195 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2196}
2197
5accd822 2198void
a2369ed3 2199optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2200{
2e3f0db6
DJ
2201 if (DEFAULT_ABI == ABI_DARWIN)
2202 /* The Darwin libraries never set errno, so we might as well
2203 avoid calling them when that's the only reason we would. */
2204 flag_errno_math = 0;
59d6560b
DE
2205
2206 /* Double growth factor to counter reduced min jump length. */
2207 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2208
2209 /* Enable section anchors by default.
2210 Skip section anchors for Objective C and Objective C++
2f3b0d4a
ST
2211 until front-ends fixed. */
2212 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
d6cc6ec9 2213 flag_section_anchors = 2;
5accd822 2214}
78f5898b 2215
0bb7b92e
ME
2216static enum fpu_type_t
2217rs6000_parse_fpu_option (const char *option)
2218{
2219 if (!strcmp("none", option)) return FPU_NONE;
2220 if (!strcmp("sp_lite", option)) return FPU_SF_LITE;
2221 if (!strcmp("dp_lite", option)) return FPU_DF_LITE;
2222 if (!strcmp("sp_full", option)) return FPU_SF_FULL;
2223 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
2224 error("unknown value %s for -mfpu", option);
2225 return FPU_NONE;
2226}
2227
78f5898b
AH
2228/* Implement TARGET_HANDLE_OPTION. */
2229
2230static bool
2231rs6000_handle_option (size_t code, const char *arg, int value)
2232{
0bb7b92e
ME
2233 enum fpu_type_t fpu_type = FPU_NONE;
2234
78f5898b
AH
2235 switch (code)
2236 {
2237 case OPT_mno_power:
2238 target_flags &= ~(MASK_POWER | MASK_POWER2
2239 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2240 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2241 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2242 break;
2243 case OPT_mno_powerpc:
2244 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2245 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2246 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2247 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2248 break;
2249 case OPT_mfull_toc:
d2894ab5
DE
2250 target_flags &= ~MASK_MINIMAL_TOC;
2251 TARGET_NO_FP_IN_TOC = 0;
2252 TARGET_NO_SUM_IN_TOC = 0;
2253 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2254#ifdef TARGET_USES_SYSV4_OPT
2255 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2256 just the same as -mminimal-toc. */
2257 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2258 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2259#endif
2260 break;
2261
2262#ifdef TARGET_USES_SYSV4_OPT
2263 case OPT_mtoc:
2264 /* Make -mtoc behave like -mminimal-toc. */
2265 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2266 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2267 break;
2268#endif
2269
2270#ifdef TARGET_USES_AIX64_OPT
2271 case OPT_maix64:
2272#else
2273 case OPT_m64:
2274#endif
2c9c9afd
AM
2275 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2276 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2277 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2278 break;
2279
2280#ifdef TARGET_USES_AIX64_OPT
2281 case OPT_maix32:
2282#else
2283 case OPT_m32:
2284#endif
2285 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2286 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2287 break;
2288
2289 case OPT_minsert_sched_nops_:
2290 rs6000_sched_insert_nops_str = arg;
2291 break;
2292
2293 case OPT_mminimal_toc:
2294 if (value == 1)
2295 {
d2894ab5
DE
2296 TARGET_NO_FP_IN_TOC = 0;
2297 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2298 }
2299 break;
2300
2301 case OPT_mpower:
2302 if (value == 1)
c2dba4ab
AH
2303 {
2304 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2305 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2306 }
78f5898b
AH
2307 break;
2308
2309 case OPT_mpower2:
2310 if (value == 1)
c2dba4ab
AH
2311 {
2312 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2313 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2314 }
78f5898b
AH
2315 break;
2316
2317 case OPT_mpowerpc_gpopt:
2318 case OPT_mpowerpc_gfxopt:
2319 if (value == 1)
c2dba4ab
AH
2320 {
2321 target_flags |= MASK_POWERPC;
2322 target_flags_explicit |= MASK_POWERPC;
2323 }
78f5898b
AH
2324 break;
2325
df01da37
DE
2326 case OPT_maix_struct_return:
2327 case OPT_msvr4_struct_return:
2328 rs6000_explicit_options.aix_struct_ret = true;
2329 break;
2330
b5e3caf2
BE
2331 case OPT_mvrsave:
2332 rs6000_explicit_options.vrsave = true;
2333 TARGET_ALTIVEC_VRSAVE = value;
2334 break;
2335
78f5898b 2336 case OPT_mvrsave_:
a2db2771 2337 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2338 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2339 break;
78f5898b 2340
94f4765c
NF
2341 case OPT_misel:
2342 rs6000_explicit_options.isel = true;
2343 rs6000_isel = value;
2344 break;
2345
78f5898b
AH
2346 case OPT_misel_:
2347 rs6000_explicit_options.isel = true;
2348 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2349 break;
2350
94f4765c
NF
2351 case OPT_mspe:
2352 rs6000_explicit_options.spe = true;
2353 rs6000_spe = value;
2354 break;
2355
78f5898b
AH
2356 case OPT_mspe_:
2357 rs6000_explicit_options.spe = true;
2358 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2359 break;
2360
2361 case OPT_mdebug_:
2362 rs6000_debug_name = arg;
2363 break;
2364
2365#ifdef TARGET_USES_SYSV4_OPT
2366 case OPT_mcall_:
2367 rs6000_abi_name = arg;
2368 break;
2369
2370 case OPT_msdata_:
2371 rs6000_sdata_name = arg;
2372 break;
2373
2374 case OPT_mtls_size_:
2375 rs6000_tls_size_string = arg;
2376 break;
2377
2378 case OPT_mrelocatable:
2379 if (value == 1)
c2dba4ab 2380 {
e0bf274f
AM
2381 target_flags |= MASK_MINIMAL_TOC;
2382 target_flags_explicit |= MASK_MINIMAL_TOC;
2383 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2384 }
78f5898b
AH
2385 break;
2386
2387 case OPT_mrelocatable_lib:
2388 if (value == 1)
c2dba4ab 2389 {
e0bf274f
AM
2390 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2391 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2392 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2393 }
78f5898b 2394 else
c2dba4ab
AH
2395 {
2396 target_flags &= ~MASK_RELOCATABLE;
2397 target_flags_explicit |= MASK_RELOCATABLE;
2398 }
78f5898b
AH
2399 break;
2400#endif
2401
2402 case OPT_mabi_:
78f5898b
AH
2403 if (!strcmp (arg, "altivec"))
2404 {
a2db2771 2405 rs6000_explicit_options.altivec_abi = true;
78f5898b 2406 rs6000_altivec_abi = 1;
a2db2771
JJ
2407
2408 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2409 rs6000_spe_abi = 0;
2410 }
2411 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2412 {
a2db2771 2413 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2414 rs6000_altivec_abi = 0;
2415 }
78f5898b
AH
2416 else if (! strcmp (arg, "spe"))
2417 {
a2db2771 2418 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2419 rs6000_spe_abi = 1;
2420 rs6000_altivec_abi = 0;
2421 if (!TARGET_SPE_ABI)
2422 error ("not configured for ABI: '%s'", arg);
2423 }
2424 else if (! strcmp (arg, "no-spe"))
d3603e8c 2425 {
a2db2771 2426 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2427 rs6000_spe_abi = 0;
2428 }
78f5898b
AH
2429
2430 /* These are here for testing during development only, do not
2431 document in the manual please. */
2432 else if (! strcmp (arg, "d64"))
2433 {
2434 rs6000_darwin64_abi = 1;
2435 warning (0, "Using darwin64 ABI");
2436 }
2437 else if (! strcmp (arg, "d32"))
2438 {
2439 rs6000_darwin64_abi = 0;
2440 warning (0, "Using old darwin ABI");
2441 }
2442
602ea4d3
JJ
2443 else if (! strcmp (arg, "ibmlongdouble"))
2444 {
d3603e8c 2445 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2446 rs6000_ieeequad = 0;
2447 warning (0, "Using IBM extended precision long double");
2448 }
2449 else if (! strcmp (arg, "ieeelongdouble"))
2450 {
d3603e8c 2451 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2452 rs6000_ieeequad = 1;
2453 warning (0, "Using IEEE extended precision long double");
2454 }
2455
78f5898b
AH
2456 else
2457 {
2458 error ("unknown ABI specified: '%s'", arg);
2459 return false;
2460 }
2461 break;
2462
2463 case OPT_mcpu_:
2464 rs6000_select[1].string = arg;
2465 break;
2466
2467 case OPT_mtune_:
2468 rs6000_select[2].string = arg;
2469 break;
2470
2471 case OPT_mtraceback_:
2472 rs6000_traceback_name = arg;
2473 break;
2474
2475 case OPT_mfloat_gprs_:
2476 rs6000_explicit_options.float_gprs = true;
2477 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2478 rs6000_float_gprs = 1;
2479 else if (! strcmp (arg, "double"))
2480 rs6000_float_gprs = 2;
2481 else if (! strcmp (arg, "no"))
2482 rs6000_float_gprs = 0;
2483 else
2484 {
2485 error ("invalid option for -mfloat-gprs: '%s'", arg);
2486 return false;
2487 }
2488 break;
2489
2490 case OPT_mlong_double_:
2491 rs6000_explicit_options.long_double = true;
2492 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2493 if (value != 64 && value != 128)
2494 {
2495 error ("Unknown switch -mlong-double-%s", arg);
2496 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2497 return false;
2498 }
2499 else
2500 rs6000_long_double_type_size = value;
2501 break;
2502
2503 case OPT_msched_costly_dep_:
2504 rs6000_sched_costly_dep_str = arg;
2505 break;
2506
2507 case OPT_malign_:
2508 rs6000_explicit_options.alignment = true;
2509 if (! strcmp (arg, "power"))
2510 {
2511 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2512 some C library functions, so warn about it. The flag may be
2513 useful for performance studies from time to time though, so
2514 don't disable it entirely. */
2515 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2516 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2517 " it is incompatible with the installed C and C++ libraries");
2518 rs6000_alignment_flags = MASK_ALIGN_POWER;
2519 }
2520 else if (! strcmp (arg, "natural"))
2521 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2522 else
2523 {
2524 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2525 return false;
2526 }
2527 break;
696e45ba
ME
2528
2529 case OPT_msingle_float:
2530 if (!TARGET_SINGLE_FPU)
2531 warning (0, "-msingle-float option equivalent to -mhard-float");
2532 /* -msingle-float implies -mno-double-float and TARGET_HARD_FLOAT. */
2533 rs6000_double_float = 0;
2534 target_flags &= ~MASK_SOFT_FLOAT;
2535 target_flags_explicit |= MASK_SOFT_FLOAT;
2536 break;
2537
2538 case OPT_mdouble_float:
2539 /* -mdouble-float implies -msingle-float and TARGET_HARD_FLOAT. */
2540 rs6000_single_float = 1;
2541 target_flags &= ~MASK_SOFT_FLOAT;
2542 target_flags_explicit |= MASK_SOFT_FLOAT;
2543 break;
2544
2545 case OPT_msimple_fpu:
2546 if (!TARGET_SINGLE_FPU)
2547 warning (0, "-msimple-fpu option ignored");
2548 break;
2549
2550 case OPT_mhard_float:
2551 /* -mhard_float implies -msingle-float and -mdouble-float. */
2552 rs6000_single_float = rs6000_double_float = 1;
2553 break;
2554
2555 case OPT_msoft_float:
2556 /* -msoft_float implies -mnosingle-float and -mnodouble-float. */
2557 rs6000_single_float = rs6000_double_float = 0;
2558 break;
0bb7b92e
ME
2559
2560 case OPT_mfpu_:
2561 fpu_type = rs6000_parse_fpu_option(arg);
2562 if (fpu_type != FPU_NONE)
2563 /* If -mfpu is not none, then turn off SOFT_FLOAT, turn on HARD_FLOAT. */
2564 {
2565 target_flags &= ~MASK_SOFT_FLOAT;
2566 target_flags_explicit |= MASK_SOFT_FLOAT;
2567 rs6000_xilinx_fpu = 1;
2568 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_SF_FULL)
2569 rs6000_single_float = 1;
2570 if (fpu_type == FPU_DF_LITE || fpu_type == FPU_DF_FULL)
2571 rs6000_single_float = rs6000_double_float = 1;
2572 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_DF_LITE)
2573 rs6000_simple_fpu = 1;
2574 }
2575 else
2576 {
2577 /* -mfpu=none is equivalent to -msoft-float */
2578 target_flags |= MASK_SOFT_FLOAT;
2579 target_flags_explicit |= MASK_SOFT_FLOAT;
2580 rs6000_single_float = rs6000_double_float = 0;
2581 }
2582 break;
78f5898b
AH
2583 }
2584 return true;
2585}
3cfa4909
MM
2586\f
2587/* Do anything needed at the start of the asm file. */
2588
1bc7c5b6 2589static void
863d938c 2590rs6000_file_start (void)
3cfa4909 2591{
c4d38ccb 2592 size_t i;
3cfa4909 2593 char buffer[80];
d330fd93 2594 const char *start = buffer;
3cfa4909 2595 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2596 const char *default_cpu = TARGET_CPU_DEFAULT;
2597 FILE *file = asm_out_file;
2598
2599 default_file_start ();
2600
2601#ifdef TARGET_BI_ARCH
2602 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2603 default_cpu = 0;
2604#endif
3cfa4909
MM
2605
2606 if (flag_verbose_asm)
2607 {
2608 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2609 rs6000_select[0].string = default_cpu;
2610
b6a1cbae 2611 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2612 {
2613 ptr = &rs6000_select[i];
2614 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2615 {
2616 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2617 start = "";
2618 }
2619 }
2620
9c6b4ed9 2621 if (PPC405_ERRATUM77)
b0bfee6e 2622 {
9c6b4ed9 2623 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2624 start = "";
2625 }
b0bfee6e 2626
b91da81f 2627#ifdef USING_ELFOS_H
3cfa4909
MM
2628 switch (rs6000_sdata)
2629 {
2630 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2631 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2632 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2633 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2634 }
2635
2636 if (rs6000_sdata && g_switch_value)
2637 {
307b599c
MK
2638 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2639 g_switch_value);
3cfa4909
MM
2640 start = "";
2641 }
2642#endif
2643
2644 if (*start == '\0')
949ea356 2645 putc ('\n', file);
3cfa4909 2646 }
b723e82f 2647
e51917ae
JM
2648#ifdef HAVE_AS_GNU_ATTRIBUTE
2649 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2650 {
2651 fprintf (file, "\t.gnu_attribute 4, %d\n",
696e45ba
ME
2652 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
2653 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
2654 : 2));
aaa42494
DJ
2655 fprintf (file, "\t.gnu_attribute 8, %d\n",
2656 (TARGET_ALTIVEC_ABI ? 2
2657 : TARGET_SPE_ABI ? 3
2658 : 1));
f9fd1e77
NF
2659 fprintf (file, "\t.gnu_attribute 12, %d\n",
2660 aix_struct_return ? 2 : 1);
2661
aaa42494 2662 }
e51917ae
JM
2663#endif
2664
b723e82f
JJ
2665 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2666 {
d6b5193b
RS
2667 switch_to_section (toc_section);
2668 switch_to_section (text_section);
b723e82f 2669 }
3cfa4909 2670}
c4e18b1c 2671
5248c961 2672\f
a0ab749a 2673/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2674
2675int
863d938c 2676direct_return (void)
9878760c 2677{
4697a36c
MM
2678 if (reload_completed)
2679 {
2680 rs6000_stack_t *info = rs6000_stack_info ();
2681
2682 if (info->first_gp_reg_save == 32
2683 && info->first_fp_reg_save == 64
00b960c7 2684 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2685 && ! info->lr_save_p
2686 && ! info->cr_save_p
00b960c7 2687 && info->vrsave_mask == 0
c81fc13e 2688 && ! info->push_p)
4697a36c
MM
2689 return 1;
2690 }
2691
2692 return 0;
9878760c
RK
2693}
2694
4e74d8ec
MM
2695/* Return the number of instructions it takes to form a constant in an
2696 integer register. */
2697
48d72335 2698int
a2369ed3 2699num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2700{
2701 /* signed constant loadable with {cal|addi} */
547b216d 2702 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2703 return 1;
2704
4e74d8ec 2705 /* constant loadable with {cau|addis} */
547b216d
DE
2706 else if ((value & 0xffff) == 0
2707 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2708 return 1;
2709
5f59ecb7 2710#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2711 else if (TARGET_POWERPC64)
4e74d8ec 2712 {
a65c591c
DE
2713 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2714 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2715
a65c591c 2716 if (high == 0 || high == -1)
4e74d8ec
MM
2717 return 2;
2718
a65c591c 2719 high >>= 1;
4e74d8ec 2720
a65c591c 2721 if (low == 0)
4e74d8ec 2722 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2723 else
2724 return (num_insns_constant_wide (high)
e396202a 2725 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2726 }
2727#endif
2728
2729 else
2730 return 2;
2731}
2732
2733int
a2369ed3 2734num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2735{
37409796 2736 HOST_WIDE_INT low, high;
bb8df8a6 2737
37409796 2738 switch (GET_CODE (op))
0d30d435 2739 {
37409796 2740 case CONST_INT:
0d30d435 2741#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2742 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2743 && mask64_operand (op, mode))
c4ad648e 2744 return 2;
0d30d435
DE
2745 else
2746#endif
2747 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2748
37409796 2749 case CONST_DOUBLE:
e41b2a33 2750 if (mode == SFmode || mode == SDmode)
37409796
NS
2751 {
2752 long l;
2753 REAL_VALUE_TYPE rv;
bb8df8a6 2754
37409796 2755 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2756 if (DECIMAL_FLOAT_MODE_P (mode))
2757 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2758 else
2759 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2760 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2761 }
a260abc9 2762
37409796
NS
2763 if (mode == VOIDmode || mode == DImode)
2764 {
2765 high = CONST_DOUBLE_HIGH (op);
2766 low = CONST_DOUBLE_LOW (op);
2767 }
2768 else
2769 {
2770 long l[2];
2771 REAL_VALUE_TYPE rv;
bb8df8a6 2772
37409796 2773 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2774 if (DECIMAL_FLOAT_MODE_P (mode))
2775 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2776 else
2777 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2778 high = l[WORDS_BIG_ENDIAN == 0];
2779 low = l[WORDS_BIG_ENDIAN != 0];
2780 }
47ad8c61 2781
37409796
NS
2782 if (TARGET_32BIT)
2783 return (num_insns_constant_wide (low)
2784 + num_insns_constant_wide (high));
2785 else
2786 {
2787 if ((high == 0 && low >= 0)
2788 || (high == -1 && low < 0))
2789 return num_insns_constant_wide (low);
bb8df8a6 2790
1990cd79 2791 else if (mask64_operand (op, mode))
37409796 2792 return 2;
bb8df8a6 2793
37409796
NS
2794 else if (low == 0)
2795 return num_insns_constant_wide (high) + 1;
bb8df8a6 2796
37409796
NS
2797 else
2798 return (num_insns_constant_wide (high)
2799 + num_insns_constant_wide (low) + 1);
2800 }
bb8df8a6 2801
37409796
NS
2802 default:
2803 gcc_unreachable ();
4e74d8ec 2804 }
4e74d8ec
MM
2805}
2806
0972012c
RS
2807/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2808 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2809 corresponding element of the vector, but for V4SFmode and V2SFmode,
2810 the corresponding "float" is interpreted as an SImode integer. */
2811
847535b6 2812HOST_WIDE_INT
0972012c
RS
2813const_vector_elt_as_int (rtx op, unsigned int elt)
2814{
2815 rtx tmp = CONST_VECTOR_ELT (op, elt);
2816 if (GET_MODE (op) == V4SFmode
2817 || GET_MODE (op) == V2SFmode)
2818 tmp = gen_lowpart (SImode, tmp);
2819 return INTVAL (tmp);
2820}
452a7d36 2821
77ccdfed 2822/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2823 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2824 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2825 all items are set to the same value and contain COPIES replicas of the
2826 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2827 operand and the others are set to the value of the operand's msb. */
2828
2829static bool
2830vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2831{
66180ff3
PB
2832 enum machine_mode mode = GET_MODE (op);
2833 enum machine_mode inner = GET_MODE_INNER (mode);
2834
2835 unsigned i;
2836 unsigned nunits = GET_MODE_NUNITS (mode);
2837 unsigned bitsize = GET_MODE_BITSIZE (inner);
2838 unsigned mask = GET_MODE_MASK (inner);
2839
0972012c 2840 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2841 HOST_WIDE_INT splat_val = val;
2842 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2843
2844 /* Construct the value to be splatted, if possible. If not, return 0. */
2845 for (i = 2; i <= copies; i *= 2)
452a7d36 2846 {
66180ff3
PB
2847 HOST_WIDE_INT small_val;
2848 bitsize /= 2;
2849 small_val = splat_val >> bitsize;
2850 mask >>= bitsize;
2851 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2852 return false;
2853 splat_val = small_val;
2854 }
c4ad648e 2855
66180ff3
PB
2856 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2857 if (EASY_VECTOR_15 (splat_val))
2858 ;
2859
2860 /* Also check if we can splat, and then add the result to itself. Do so if
2861 the value is positive, of if the splat instruction is using OP's mode;
2862 for splat_val < 0, the splat and the add should use the same mode. */
2863 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2864 && (splat_val >= 0 || (step == 1 && copies == 1)))
2865 ;
2866
2867 else
2868 return false;
2869
2870 /* Check if VAL is present in every STEP-th element, and the
2871 other elements are filled with its most significant bit. */
2872 for (i = 0; i < nunits - 1; ++i)
2873 {
2874 HOST_WIDE_INT desired_val;
2875 if (((i + 1) & (step - 1)) == 0)
2876 desired_val = val;
2877 else
2878 desired_val = msb_val;
2879
0972012c 2880 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2881 return false;
452a7d36 2882 }
66180ff3
PB
2883
2884 return true;
452a7d36
HP
2885}
2886
69ef87e2 2887
77ccdfed 2888/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2889 with a vspltisb, vspltish or vspltisw. */
2890
2891bool
2892easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2893{
66180ff3 2894 unsigned step, copies;
d744e06e 2895
66180ff3
PB
2896 if (mode == VOIDmode)
2897 mode = GET_MODE (op);
2898 else if (mode != GET_MODE (op))
2899 return false;
d744e06e 2900
66180ff3
PB
2901 /* Start with a vspltisw. */
2902 step = GET_MODE_NUNITS (mode) / 4;
2903 copies = 1;
2904
2905 if (vspltis_constant (op, step, copies))
2906 return true;
2907
2908 /* Then try with a vspltish. */
2909 if (step == 1)
2910 copies <<= 1;
2911 else
2912 step >>= 1;
2913
2914 if (vspltis_constant (op, step, copies))
2915 return true;
2916
2917 /* And finally a vspltisb. */
2918 if (step == 1)
2919 copies <<= 1;
2920 else
2921 step >>= 1;
2922
2923 if (vspltis_constant (op, step, copies))
2924 return true;
2925
2926 return false;
d744e06e
AH
2927}
2928
66180ff3
PB
2929/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2930 result is OP. Abort if it is not possible. */
d744e06e 2931
f676971a 2932rtx
66180ff3 2933gen_easy_altivec_constant (rtx op)
452a7d36 2934{
66180ff3
PB
2935 enum machine_mode mode = GET_MODE (op);
2936 int nunits = GET_MODE_NUNITS (mode);
2937 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2938 unsigned step = nunits / 4;
2939 unsigned copies = 1;
2940
2941 /* Start with a vspltisw. */
2942 if (vspltis_constant (op, step, copies))
2943 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2944
2945 /* Then try with a vspltish. */
2946 if (step == 1)
2947 copies <<= 1;
2948 else
2949 step >>= 1;
2950
2951 if (vspltis_constant (op, step, copies))
2952 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2953
2954 /* And finally a vspltisb. */
2955 if (step == 1)
2956 copies <<= 1;
2957 else
2958 step >>= 1;
2959
2960 if (vspltis_constant (op, step, copies))
2961 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2962
2963 gcc_unreachable ();
d744e06e
AH
2964}
2965
2966const char *
a2369ed3 2967output_vec_const_move (rtx *operands)
d744e06e
AH
2968{
2969 int cst, cst2;
2970 enum machine_mode mode;
2971 rtx dest, vec;
2972
2973 dest = operands[0];
2974 vec = operands[1];
d744e06e 2975 mode = GET_MODE (dest);
69ef87e2 2976
d744e06e
AH
2977 if (TARGET_ALTIVEC)
2978 {
66180ff3 2979 rtx splat_vec;
d744e06e
AH
2980 if (zero_constant (vec, mode))
2981 return "vxor %0,%0,%0";
37409796 2982
66180ff3
PB
2983 splat_vec = gen_easy_altivec_constant (vec);
2984 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2985 operands[1] = XEXP (splat_vec, 0);
2986 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2987 return "#";
bb8df8a6 2988
66180ff3 2989 switch (GET_MODE (splat_vec))
98ef3137 2990 {
37409796 2991 case V4SImode:
66180ff3 2992 return "vspltisw %0,%1";
c4ad648e 2993
37409796 2994 case V8HImode:
66180ff3 2995 return "vspltish %0,%1";
c4ad648e 2996
37409796 2997 case V16QImode:
66180ff3 2998 return "vspltisb %0,%1";
bb8df8a6 2999
37409796
NS
3000 default:
3001 gcc_unreachable ();
98ef3137 3002 }
69ef87e2
AH
3003 }
3004
37409796 3005 gcc_assert (TARGET_SPE);
bb8df8a6 3006
37409796
NS
3007 /* Vector constant 0 is handled as a splitter of V2SI, and in the
3008 pattern of V1DI, V4HI, and V2SF.
3009
3010 FIXME: We should probably return # and add post reload
3011 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
3012 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
3013 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
3014 operands[1] = CONST_VECTOR_ELT (vec, 0);
3015 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
3016 if (cst == cst2)
3017 return "li %0,%1\n\tevmergelo %0,%0,%0";
3018 else
3019 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
3020}
3021
f5027409
RE
3022/* Initialize TARGET of vector PAIRED to VALS. */
3023
3024void
3025paired_expand_vector_init (rtx target, rtx vals)
3026{
3027 enum machine_mode mode = GET_MODE (target);
3028 int n_elts = GET_MODE_NUNITS (mode);
3029 int n_var = 0;
0a2aaacc 3030 rtx x, new_rtx, tmp, constant_op, op1, op2;
f5027409
RE
3031 int i;
3032
3033 for (i = 0; i < n_elts; ++i)
3034 {
3035 x = XVECEXP (vals, 0, i);
3036 if (!CONSTANT_P (x))
3037 ++n_var;
3038 }
3039 if (n_var == 0)
3040 {
3041 /* Load from constant pool. */
3042 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
3043 return;
3044 }
3045
3046 if (n_var == 2)
3047 {
3048 /* The vector is initialized only with non-constants. */
0a2aaacc 3049 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
f5027409
RE
3050 XVECEXP (vals, 0, 1));
3051
0a2aaacc 3052 emit_move_insn (target, new_rtx);
f5027409
RE
3053 return;
3054 }
3055
3056 /* One field is non-constant and the other one is a constant. Load the
3057 constant from the constant pool and use ps_merge instruction to
3058 construct the whole vector. */
3059 op1 = XVECEXP (vals, 0, 0);
3060 op2 = XVECEXP (vals, 0, 1);
3061
3062 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
3063
3064 tmp = gen_reg_rtx (GET_MODE (constant_op));
3065 emit_move_insn (tmp, constant_op);
3066
3067 if (CONSTANT_P (op1))
0a2aaacc 3068 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
f5027409 3069 else
0a2aaacc 3070 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
f5027409 3071
0a2aaacc 3072 emit_move_insn (target, new_rtx);
f5027409
RE
3073}
3074
e2e95f45
RE
3075void
3076paired_expand_vector_move (rtx operands[])
3077{
3078 rtx op0 = operands[0], op1 = operands[1];
3079
3080 emit_move_insn (op0, op1);
3081}
3082
3083/* Emit vector compare for code RCODE. DEST is destination, OP1 and
3084 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
3085 operands for the relation operation COND. This is a recursive
3086 function. */
3087
3088static void
3089paired_emit_vector_compare (enum rtx_code rcode,
3090 rtx dest, rtx op0, rtx op1,
3091 rtx cc_op0, rtx cc_op1)
3092{
3093 rtx tmp = gen_reg_rtx (V2SFmode);
3094 rtx tmp1, max, min, equal_zero;
3095
3096 gcc_assert (TARGET_PAIRED_FLOAT);
3097 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
3098
3099 switch (rcode)
3100 {
3101 case LT:
3102 case LTU:
3103 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3104 return;
3105 case GE:
3106 case GEU:
3107 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3108 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3109 return;
3110 case LE:
3111 case LEU:
3112 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3113 return;
3114 case GT:
3115 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3116 return;
3117 case EQ:
3118 tmp1 = gen_reg_rtx (V2SFmode);
3119 max = gen_reg_rtx (V2SFmode);
3120 min = gen_reg_rtx (V2SFmode);
3121 equal_zero = gen_reg_rtx (V2SFmode);
3122
3123 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3124 emit_insn (gen_selv2sf4
3125 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3126 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3127 emit_insn (gen_selv2sf4
3128 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3129 emit_insn (gen_subv2sf3 (tmp1, min, max));
3130 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3131 return;
3132 case NE:
3133 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3134 return;
3135 case UNLE:
3136 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3137 return;
3138 case UNLT:
3139 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3140 return;
3141 case UNGE:
3142 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3143 return;
3144 case UNGT:
3145 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3146 return;
3147 default:
3148 gcc_unreachable ();
3149 }
3150
3151 return;
3152}
3153
3154/* Emit vector conditional expression.
3155 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3156 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3157
3158int
3159paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3160 rtx cond, rtx cc_op0, rtx cc_op1)
3161{
3162 enum rtx_code rcode = GET_CODE (cond);
3163
3164 if (!TARGET_PAIRED_FLOAT)
3165 return 0;
3166
3167 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3168
3169 return 1;
3170}
3171
7a4eca66
DE
3172/* Initialize vector TARGET to VALS. */
3173
3174void
3175rs6000_expand_vector_init (rtx target, rtx vals)
3176{
3177 enum machine_mode mode = GET_MODE (target);
3178 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3179 int n_elts = GET_MODE_NUNITS (mode);
3180 int n_var = 0, one_var = -1;
3181 bool all_same = true, all_const_zero = true;
3182 rtx x, mem;
3183 int i;
3184
3185 for (i = 0; i < n_elts; ++i)
3186 {
3187 x = XVECEXP (vals, 0, i);
3188 if (!CONSTANT_P (x))
3189 ++n_var, one_var = i;
3190 else if (x != CONST0_RTX (inner_mode))
3191 all_const_zero = false;
3192
3193 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3194 all_same = false;
3195 }
3196
3197 if (n_var == 0)
3198 {
501fb355 3199 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3200 if (mode != V4SFmode && all_const_zero)
3201 {
3202 /* Zero register. */
3203 emit_insn (gen_rtx_SET (VOIDmode, target,
3204 gen_rtx_XOR (mode, target, target)));
3205 return;
3206 }
501fb355 3207 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3208 {
3209 /* Splat immediate. */
501fb355 3210 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3211 return;
3212 }
3213 else if (all_same)
3214 ; /* Splat vector element. */
3215 else
3216 {
3217 /* Load from constant pool. */
501fb355 3218 emit_move_insn (target, const_vec);
7a4eca66
DE
3219 return;
3220 }
3221 }
3222
3223 /* Store value to stack temp. Load vector element. Splat. */
3224 if (all_same)
3225 {
3226 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3227 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3228 XVECEXP (vals, 0, 0));
3229 x = gen_rtx_UNSPEC (VOIDmode,
3230 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3231 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3232 gen_rtvec (2,
3233 gen_rtx_SET (VOIDmode,
3234 target, mem),
3235 x)));
3236 x = gen_rtx_VEC_SELECT (inner_mode, target,
3237 gen_rtx_PARALLEL (VOIDmode,
3238 gen_rtvec (1, const0_rtx)));
3239 emit_insn (gen_rtx_SET (VOIDmode, target,
3240 gen_rtx_VEC_DUPLICATE (mode, x)));
3241 return;
3242 }
3243
3244 /* One field is non-constant. Load constant then overwrite
3245 varying field. */
3246 if (n_var == 1)
3247 {
3248 rtx copy = copy_rtx (vals);
3249
57b51d4d 3250 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3251 varying element. */
3252 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3253 rs6000_expand_vector_init (target, copy);
3254
3255 /* Insert variable. */
3256 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3257 return;
3258 }
3259
3260 /* Construct the vector in memory one field at a time
3261 and load the whole vector. */
3262 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3263 for (i = 0; i < n_elts; i++)
3264 emit_move_insn (adjust_address_nv (mem, inner_mode,
3265 i * GET_MODE_SIZE (inner_mode)),
3266 XVECEXP (vals, 0, i));
3267 emit_move_insn (target, mem);
3268}
3269
3270/* Set field ELT of TARGET to VAL. */
3271
3272void
3273rs6000_expand_vector_set (rtx target, rtx val, int elt)
3274{
3275 enum machine_mode mode = GET_MODE (target);
3276 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3277 rtx reg = gen_reg_rtx (mode);
3278 rtx mask, mem, x;
3279 int width = GET_MODE_SIZE (inner_mode);
3280 int i;
3281
3282 /* Load single variable value. */
3283 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3284 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3285 x = gen_rtx_UNSPEC (VOIDmode,
3286 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3287 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3288 gen_rtvec (2,
3289 gen_rtx_SET (VOIDmode,
3290 reg, mem),
3291 x)));
3292
3293 /* Linear sequence. */
3294 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3295 for (i = 0; i < 16; ++i)
3296 XVECEXP (mask, 0, i) = GEN_INT (i);
3297
3298 /* Set permute mask to insert element into target. */
3299 for (i = 0; i < width; ++i)
3300 XVECEXP (mask, 0, elt*width + i)
3301 = GEN_INT (i + 0x10);
3302 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3303 x = gen_rtx_UNSPEC (mode,
3304 gen_rtvec (3, target, reg,
3305 force_reg (V16QImode, x)),
3306 UNSPEC_VPERM);
3307 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3308}
3309
3310/* Extract field ELT from VEC into TARGET. */
3311
3312void
3313rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3314{
3315 enum machine_mode mode = GET_MODE (vec);
3316 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3317 rtx mem, x;
3318
3319 /* Allocate mode-sized buffer. */
3320 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3321
3322 /* Add offset to field within buffer matching vector element. */
3323 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3324
3325 /* Store single field into mode-sized buffer. */
3326 x = gen_rtx_UNSPEC (VOIDmode,
3327 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3328 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3329 gen_rtvec (2,
3330 gen_rtx_SET (VOIDmode,
3331 mem, vec),
3332 x)));
3333 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3334}
3335
0ba1b2ff
AM
3336/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3337 implement ANDing by the mask IN. */
3338void
a2369ed3 3339build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3340{
3341#if HOST_BITS_PER_WIDE_INT >= 64
3342 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3343 int shift;
3344
37409796 3345 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3346
3347 c = INTVAL (in);
3348 if (c & 1)
3349 {
3350 /* Assume c initially something like 0x00fff000000fffff. The idea
3351 is to rotate the word so that the middle ^^^^^^ group of zeros
3352 is at the MS end and can be cleared with an rldicl mask. We then
3353 rotate back and clear off the MS ^^ group of zeros with a
3354 second rldicl. */
3355 c = ~c; /* c == 0xff000ffffff00000 */
3356 lsb = c & -c; /* lsb == 0x0000000000100000 */
3357 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3358 c = ~c; /* c == 0x00fff000000fffff */
3359 c &= -lsb; /* c == 0x00fff00000000000 */
3360 lsb = c & -c; /* lsb == 0x0000100000000000 */
3361 c = ~c; /* c == 0xff000fffffffffff */
3362 c &= -lsb; /* c == 0xff00000000000000 */
3363 shift = 0;
3364 while ((lsb >>= 1) != 0)
3365 shift++; /* shift == 44 on exit from loop */
3366 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3367 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3368 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3369 }
3370 else
0ba1b2ff
AM
3371 {
3372 /* Assume c initially something like 0xff000f0000000000. The idea
3373 is to rotate the word so that the ^^^ middle group of zeros
3374 is at the LS end and can be cleared with an rldicr mask. We then
3375 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3376 a second rldicr. */
3377 lsb = c & -c; /* lsb == 0x0000010000000000 */
3378 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3379 c = ~c; /* c == 0x00fff0ffffffffff */
3380 c &= -lsb; /* c == 0x00fff00000000000 */
3381 lsb = c & -c; /* lsb == 0x0000100000000000 */
3382 c = ~c; /* c == 0xff000fffffffffff */
3383 c &= -lsb; /* c == 0xff00000000000000 */
3384 shift = 0;
3385 while ((lsb >>= 1) != 0)
3386 shift++; /* shift == 44 on exit from loop */
3387 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3388 m1 >>= shift; /* m1 == 0x0000000000000fff */
3389 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3390 }
3391
3392 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3393 masks will be all 1's. We are guaranteed more than one transition. */
3394 out[0] = GEN_INT (64 - shift);
3395 out[1] = GEN_INT (m1);
3396 out[2] = GEN_INT (shift);
3397 out[3] = GEN_INT (m2);
3398#else
045572c7
GK
3399 (void)in;
3400 (void)out;
37409796 3401 gcc_unreachable ();
0ba1b2ff 3402#endif
a260abc9
DE
3403}
3404
54b695e7 3405/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3406
3407bool
54b695e7
AH
3408invalid_e500_subreg (rtx op, enum machine_mode mode)
3409{
61c76239
JM
3410 if (TARGET_E500_DOUBLE)
3411 {
17caeff2 3412 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3413 subreg:TI and reg:TF. Decimal float modes are like integer
3414 modes (only low part of each register used) for this
3415 purpose. */
61c76239 3416 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3417 && (mode == SImode || mode == DImode || mode == TImode
3418 || mode == DDmode || mode == TDmode)
61c76239 3419 && REG_P (SUBREG_REG (op))
17caeff2 3420 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3421 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3422 return true;
3423
17caeff2
JM
3424 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3425 reg:TI. */
61c76239 3426 if (GET_CODE (op) == SUBREG
4f011e1e 3427 && (mode == DFmode || mode == TFmode)
61c76239 3428 && REG_P (SUBREG_REG (op))
17caeff2 3429 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3430 || GET_MODE (SUBREG_REG (op)) == TImode
3431 || GET_MODE (SUBREG_REG (op)) == DDmode
3432 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3433 return true;
3434 }
54b695e7 3435
61c76239
JM
3436 if (TARGET_SPE
3437 && GET_CODE (op) == SUBREG
3438 && mode == SImode
54b695e7 3439 && REG_P (SUBREG_REG (op))
14502dad 3440 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3441 return true;
3442
3443 return false;
3444}
3445
58182de3 3446/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3447 field is an FP double while the FP fields remain word aligned. */
3448
19d66194 3449unsigned int
fa5b0972
AM
3450rs6000_special_round_type_align (tree type, unsigned int computed,
3451 unsigned int specified)
95727fb8 3452{
fa5b0972 3453 unsigned int align = MAX (computed, specified);
95727fb8 3454 tree field = TYPE_FIELDS (type);
95727fb8 3455
bb8df8a6 3456 /* Skip all non field decls */
85962ac8 3457 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3458 field = TREE_CHAIN (field);
3459
fa5b0972
AM
3460 if (field != NULL && field != type)
3461 {
3462 type = TREE_TYPE (field);
3463 while (TREE_CODE (type) == ARRAY_TYPE)
3464 type = TREE_TYPE (type);
3465
3466 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3467 align = MAX (align, 64);
3468 }
95727fb8 3469
fa5b0972 3470 return align;
95727fb8
AP
3471}
3472
58182de3
GK
3473/* Darwin increases record alignment to the natural alignment of
3474 the first field. */
3475
3476unsigned int
3477darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3478 unsigned int specified)
3479{
3480 unsigned int align = MAX (computed, specified);
3481
3482 if (TYPE_PACKED (type))
3483 return align;
3484
3485 /* Find the first field, looking down into aggregates. */
3486 do {
3487 tree field = TYPE_FIELDS (type);
3488 /* Skip all non field decls */
3489 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3490 field = TREE_CHAIN (field);
3491 if (! field)
3492 break;
3493 type = TREE_TYPE (field);
3494 while (TREE_CODE (type) == ARRAY_TYPE)
3495 type = TREE_TYPE (type);
3496 } while (AGGREGATE_TYPE_P (type));
3497
3498 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3499 align = MAX (align, TYPE_ALIGN (type));
3500
3501 return align;
3502}
3503
a4f6c312 3504/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3505
3506int
f676971a 3507small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3508 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3509{
38c1f2d7 3510#if TARGET_ELF
5f59ecb7 3511 rtx sym_ref;
7509c759 3512
d9407988 3513 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3514 return 0;
a54d04b7 3515
f607bc57 3516 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3517 return 0;
3518
2aa42e6e
NF
3519 /* Vector and float memory instructions have a limited offset on the
3520 SPE, so using a vector or float variable directly as an operand is
3521 not useful. */
3522 if (TARGET_SPE
3523 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3524 return 0;
3525
88228c4b
MM
3526 if (GET_CODE (op) == SYMBOL_REF)
3527 sym_ref = op;
3528
3529 else if (GET_CODE (op) != CONST
3530 || GET_CODE (XEXP (op, 0)) != PLUS
3531 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3532 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3533 return 0;
3534
88228c4b 3535 else
dbf55e53
MM
3536 {
3537 rtx sum = XEXP (op, 0);
3538 HOST_WIDE_INT summand;
3539
3540 /* We have to be careful here, because it is the referenced address
c4ad648e 3541 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3542 summand = INTVAL (XEXP (sum, 1));
307b599c 3543 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3544 return 0;
dbf55e53
MM
3545
3546 sym_ref = XEXP (sum, 0);
3547 }
88228c4b 3548
20bfcd69 3549 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3550#else
3551 return 0;
3552#endif
7509c759 3553}
46c07df8 3554
3a1f863f 3555/* Return true if either operand is a general purpose register. */
46c07df8 3556
3a1f863f
DE
3557bool
3558gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3559{
3a1f863f
DE
3560 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3561 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3562}
3563
9ebbca7d 3564\f
c6c3dba9 3565/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address_p. */
4d588c14 3566
4d588c14 3567static bool
a2369ed3 3568constant_pool_expr_p (rtx op)
9ebbca7d 3569{
2e4316da
RS
3570 rtx base, offset;
3571
3572 split_const (op, &base, &offset);
3573 return (GET_CODE (base) == SYMBOL_REF
3574 && CONSTANT_POOL_ADDRESS_P (base)
3575 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
9ebbca7d
GK
3576}
3577
48d72335 3578bool
a2369ed3 3579toc_relative_expr_p (rtx op)
9ebbca7d 3580{
2e4316da
RS
3581 rtx base, offset;
3582
3583 if (GET_CODE (op) != CONST)
3584 return false;
3585
3586 split_const (op, &base, &offset);
3587 return (GET_CODE (base) == UNSPEC
3588 && XINT (base, 1) == UNSPEC_TOCREL);
4d588c14
RH
3589}
3590
4d588c14 3591bool
a2369ed3 3592legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3593{
3594 return (TARGET_TOC
3595 && GET_CODE (x) == PLUS
3596 && GET_CODE (XEXP (x, 0)) == REG
3597 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2e4316da 3598 && toc_relative_expr_p (XEXP (x, 1)));
4d588c14
RH
3599}
3600
d04b6e6e
EB
3601static bool
3602legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3603{
3604 return (DEFAULT_ABI == ABI_V4
3605 && !flag_pic && !TARGET_TOC
3606 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3607 && small_data_operand (x, mode));
3608}
3609
60cdabab
DE
3610/* SPE offset addressing is limited to 5-bits worth of double words. */
3611#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3612
76d2b81d
DJ
3613bool
3614rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3615{
3616 unsigned HOST_WIDE_INT offset, extra;
3617
3618 if (GET_CODE (x) != PLUS)
3619 return false;
3620 if (GET_CODE (XEXP (x, 0)) != REG)
3621 return false;
3622 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3623 return false;
60cdabab
DE
3624 if (legitimate_constant_pool_address_p (x))
3625 return true;
4d588c14
RH
3626 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3627 return false;
3628
3629 offset = INTVAL (XEXP (x, 1));
3630 extra = 0;
3631 switch (mode)
3632 {
3633 case V16QImode:
3634 case V8HImode:
3635 case V4SFmode:
3636 case V4SImode:
7a4eca66 3637 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3638 constant offset zero should not occur due to canonicalization. */
3639 return false;
4d588c14
RH
3640
3641 case V4HImode:
3642 case V2SImode:
3643 case V1DImode:
3644 case V2SFmode:
d42a3bae 3645 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3646 constant offset zero should not occur due to canonicalization. */
d42a3bae 3647 if (TARGET_PAIRED_FLOAT)
1a23970d 3648 return false;
4d588c14
RH
3649 /* SPE vector modes. */
3650 return SPE_CONST_OFFSET_OK (offset);
3651
3652 case DFmode:
4d4cbc0e
AH
3653 if (TARGET_E500_DOUBLE)
3654 return SPE_CONST_OFFSET_OK (offset);
3655
4f011e1e 3656 case DDmode:
4d588c14 3657 case DImode:
54b695e7
AH
3658 /* On e500v2, we may have:
3659
3660 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3661
3662 Which gets addressed with evldd instructions. */
3663 if (TARGET_E500_DOUBLE)
3664 return SPE_CONST_OFFSET_OK (offset);
3665
7393f7f8 3666 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3667 extra = 4;
3668 else if (offset & 3)
3669 return false;
3670 break;
3671
3672 case TFmode:
17caeff2
JM
3673 if (TARGET_E500_DOUBLE)
3674 return (SPE_CONST_OFFSET_OK (offset)
3675 && SPE_CONST_OFFSET_OK (offset + 8));
3676
4f011e1e 3677 case TDmode:
4d588c14 3678 case TImode:
7393f7f8 3679 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3680 extra = 12;
3681 else if (offset & 3)
3682 return false;
3683 else
3684 extra = 8;
3685 break;
3686
3687 default:
3688 break;
3689 }
3690
b1917422
AM
3691 offset += 0x8000;
3692 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3693}
3694
6fb5fa3c 3695bool
a2369ed3 3696legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3697{
3698 rtx op0, op1;
3699
3700 if (GET_CODE (x) != PLUS)
3701 return false;
850e8d3d 3702
4d588c14
RH
3703 op0 = XEXP (x, 0);
3704 op1 = XEXP (x, 1);
3705
bf00cc0f 3706 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3707 replaced with proper base and index regs. */
3708 if (!strict
3709 && reload_in_progress
3710 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3711 && REG_P (op1))
3712 return true;
3713
3714 return (REG_P (op0) && REG_P (op1)
3715 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3716 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3717 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3718 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3719}
3720
001b9eb6
PH
3721bool
3722avoiding_indexed_address_p (enum machine_mode mode)
3723{
3724 /* Avoid indexed addressing for modes that have non-indexed
3725 load/store instruction forms. */
3726 return TARGET_AVOID_XFORM && !ALTIVEC_VECTOR_MODE (mode);
3727}
3728
48d72335 3729inline bool
a2369ed3 3730legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3731{
3732 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3733}
3734
48d72335 3735bool
4c81e946
FJ
3736macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3737{
c4ad648e 3738 if (!TARGET_MACHO || !flag_pic
9390387d 3739 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3740 return false;
3741 x = XEXP (x, 0);
4c81e946
FJ
3742
3743 if (GET_CODE (x) != LO_SUM)
3744 return false;
3745 if (GET_CODE (XEXP (x, 0)) != REG)
3746 return false;
3747 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3748 return false;
3749 x = XEXP (x, 1);
3750
3751 return CONSTANT_P (x);
3752}
3753
4d588c14 3754static bool
a2369ed3 3755legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3756{
3757 if (GET_CODE (x) != LO_SUM)
3758 return false;
3759 if (GET_CODE (XEXP (x, 0)) != REG)
3760 return false;
3761 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3762 return false;
54b695e7 3763 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3764 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3765 || mode == DDmode || mode == TDmode
17caeff2 3766 || mode == DImode))
f82f556d 3767 return false;
4d588c14
RH
3768 x = XEXP (x, 1);
3769
8622e235 3770 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3771 {
a29077da 3772 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3773 return false;
3774 if (TARGET_TOC)
3775 return false;
3776 if (GET_MODE_NUNITS (mode) != 1)
3777 return false;
5e5f01b9 3778 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3779 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
696e45ba 3780 && !(TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
4d4447b5 3781 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3782 return false;
3783
3784 return CONSTANT_P (x);
3785 }
3786
3787 return false;
3788}
3789
3790
9ebbca7d
GK
3791/* Try machine-dependent ways of modifying an illegitimate address
3792 to be legitimate. If we find one, return the new, valid address.
3793 This is used from only one place: `memory_address' in explow.c.
3794
a4f6c312
SS
3795 OLDX is the address as it was before break_out_memory_refs was
3796 called. In some cases it is useful to look at this to decide what
3797 needs to be done.
9ebbca7d 3798
a4f6c312
SS
3799 It is always safe for this function to do nothing. It exists to
3800 recognize opportunities to optimize the output.
9ebbca7d
GK
3801
3802 On RS/6000, first check for the sum of a register with a constant
3803 integer that is out of range. If so, generate code to add the
3804 constant with the low-order 16 bits masked to the register and force
3805 this result into another register (this can be done with `cau').
3806 Then generate an address of REG+(CONST&0xffff), allowing for the
3807 possibility of bit 16 being a one.
3808
3809 Then check for the sum of a register and something not constant, try to
3810 load the other things into a register and return the sum. */
4d588c14 3811
9ebbca7d 3812rtx
a2369ed3
DJ
3813rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3814 enum machine_mode mode)
0ac081f6 3815{
c4501e62
JJ
3816 if (GET_CODE (x) == SYMBOL_REF)
3817 {
3818 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3819 if (model != 0)
3820 return rs6000_legitimize_tls_address (x, model);
3821 }
3822
f676971a 3823 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3824 && GET_CODE (XEXP (x, 0)) == REG
3825 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb 3826 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
7da13f1d
NF
3827 && !((TARGET_POWERPC64
3828 && (mode == DImode || mode == TImode)
3829 && (INTVAL (XEXP (x, 1)) & 3) != 0)
3830 || SPE_VECTOR_MODE (mode)
efc05e3c 3831 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3832 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3833 || mode == DImode || mode == DDmode
3834 || mode == TDmode))))
f676971a 3835 {
9ebbca7d
GK
3836 HOST_WIDE_INT high_int, low_int;
3837 rtx sum;
a65c591c
DE
3838 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3839 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3840 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3841 GEN_INT (high_int)), 0);
3842 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3843 }
f676971a 3844 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3845 && GET_CODE (XEXP (x, 0)) == REG
3846 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3847 && GET_MODE_NUNITS (mode) == 1
696e45ba 3848 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 3849 || TARGET_POWERPC64
efc05e3c 3850 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3851 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3852 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 3853 && !avoiding_indexed_address_p (mode)
efc05e3c
PB
3854 && mode != TImode
3855 && mode != TFmode
3856 && mode != TDmode)
9ebbca7d
GK
3857 {
3858 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3859 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3860 }
0ac081f6
AH
3861 else if (ALTIVEC_VECTOR_MODE (mode))
3862 {
3863 rtx reg;
3864
3865 /* Make sure both operands are registers. */
3866 if (GET_CODE (x) == PLUS)
9f85ed45 3867 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3868 force_reg (Pmode, XEXP (x, 1)));
3869
3870 reg = force_reg (Pmode, x);
3871 return reg;
3872 }
4d4cbc0e 3873 else if (SPE_VECTOR_MODE (mode)
17caeff2 3874 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3875 || mode == DDmode || mode == TDmode
54b695e7 3876 || mode == DImode)))
a3170dc6 3877 {
54b695e7 3878 if (mode == DImode)
506d7b68 3879 return x;
a3170dc6
AH
3880 /* We accept [reg + reg] and [reg + OFFSET]. */
3881
3882 if (GET_CODE (x) == PLUS)
61dd226f
NF
3883 {
3884 rtx op1 = XEXP (x, 0);
3885 rtx op2 = XEXP (x, 1);
3886 rtx y;
3887
3888 op1 = force_reg (Pmode, op1);
3889
3890 if (GET_CODE (op2) != REG
3891 && (GET_CODE (op2) != CONST_INT
3892 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3893 || (GET_MODE_SIZE (mode) > 8
3894 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3895 op2 = force_reg (Pmode, op2);
3896
3897 /* We can't always do [reg + reg] for these, because [reg +
3898 reg + offset] is not a legitimate addressing mode. */
3899 y = gen_rtx_PLUS (Pmode, op1, op2);
3900
4f011e1e 3901 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3902 return force_reg (Pmode, y);
3903 else
3904 return y;
3905 }
a3170dc6
AH
3906
3907 return force_reg (Pmode, x);
3908 }
f1384257
AM
3909 else if (TARGET_ELF
3910 && TARGET_32BIT
3911 && TARGET_NO_TOC
3912 && ! flag_pic
9ebbca7d 3913 && GET_CODE (x) != CONST_INT
f676971a 3914 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3915 && CONSTANT_P (x)
6ac7bf2c
GK
3916 && GET_MODE_NUNITS (mode) == 1
3917 && (GET_MODE_BITSIZE (mode) <= 32
696e45ba 3918 || ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3919 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3920 {
3921 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3922 emit_insn (gen_elf_high (reg, x));
3923 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3924 }
ee890fe2
SS
3925 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3926 && ! flag_pic
ab82a49f
AP
3927#if TARGET_MACHO
3928 && ! MACHO_DYNAMIC_NO_PIC_P
3929#endif
ee890fe2 3930 && GET_CODE (x) != CONST_INT
f676971a 3931 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3932 && CONSTANT_P (x)
506a7bc8 3933 && GET_MODE_NUNITS (mode) == 1
696e45ba 3934 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3935 || (mode != DFmode && mode != DDmode))
f676971a 3936 && mode != DImode
ee890fe2
SS
3937 && mode != TImode)
3938 {
3939 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3940 emit_insn (gen_macho_high (reg, x));
3941 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3942 }
f676971a 3943 else if (TARGET_TOC
0cdc04e8 3944 && GET_CODE (x) == SYMBOL_REF
4d588c14 3945 && constant_pool_expr_p (x)
a9098fd0 3946 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3947 {
3948 return create_TOC_reference (x);
3949 }
3950 else
506d7b68 3951 return x;
9ebbca7d 3952}
258bfae2 3953
fdbe66f2 3954/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3955 We need to emit DTP-relative relocations. */
3956
fdbe66f2 3957static void
c973d557
JJ
3958rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3959{
3960 switch (size)
3961 {
3962 case 4:
3963 fputs ("\t.long\t", file);
3964 break;
3965 case 8:
3966 fputs (DOUBLE_INT_ASM_OP, file);
3967 break;
3968 default:
37409796 3969 gcc_unreachable ();
c973d557
JJ
3970 }
3971 output_addr_const (file, x);
3972 fputs ("@dtprel+0x8000", file);
3973}
3974
c4501e62
JJ
3975/* Construct the SYMBOL_REF for the tls_get_addr function. */
3976
3977static GTY(()) rtx rs6000_tls_symbol;
3978static rtx
863d938c 3979rs6000_tls_get_addr (void)
c4501e62
JJ
3980{
3981 if (!rs6000_tls_symbol)
3982 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3983
3984 return rs6000_tls_symbol;
3985}
3986
3987/* Construct the SYMBOL_REF for TLS GOT references. */
3988
3989static GTY(()) rtx rs6000_got_symbol;
3990static rtx
863d938c 3991rs6000_got_sym (void)
c4501e62
JJ
3992{
3993 if (!rs6000_got_symbol)
3994 {
3995 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3996 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3997 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3998 }
c4501e62
JJ
3999
4000 return rs6000_got_symbol;
4001}
4002
4003/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
4004 this (thread-local) address. */
4005
4006static rtx
a2369ed3 4007rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
4008{
4009 rtx dest, insn;
4010
4011 dest = gen_reg_rtx (Pmode);
4012 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
4013 {
4014 rtx tlsreg;
4015
4016 if (TARGET_64BIT)
4017 {
4018 tlsreg = gen_rtx_REG (Pmode, 13);
4019 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
4020 }
4021 else
4022 {
4023 tlsreg = gen_rtx_REG (Pmode, 2);
4024 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
4025 }
4026 emit_insn (insn);
4027 }
4028 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
4029 {
4030 rtx tlsreg, tmp;
4031
4032 tmp = gen_reg_rtx (Pmode);
4033 if (TARGET_64BIT)
4034 {
4035 tlsreg = gen_rtx_REG (Pmode, 13);
4036 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
4037 }
4038 else
4039 {
4040 tlsreg = gen_rtx_REG (Pmode, 2);
4041 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
4042 }
4043 emit_insn (insn);
4044 if (TARGET_64BIT)
4045 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
4046 else
4047 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
4048 emit_insn (insn);
4049 }
4050 else
4051 {
4052 rtx r3, got, tga, tmp1, tmp2, eqv;
4053
4fed8f8f
AM
4054 /* We currently use relocations like @got@tlsgd for tls, which
4055 means the linker will handle allocation of tls entries, placing
4056 them in the .got section. So use a pointer to the .got section,
4057 not one to secondary TOC sections used by 64-bit -mminimal-toc,
4058 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 4059 if (TARGET_64BIT)
972f427b 4060 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
4061 else
4062 {
4063 if (flag_pic == 1)
4064 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
4065 else
4066 {
4067 rtx gsym = rs6000_got_sym ();
4068 got = gen_reg_rtx (Pmode);
4069 if (flag_pic == 0)
4070 rs6000_emit_move (got, gsym, Pmode);
4071 else
4072 {
e65a3857 4073 rtx tmp3, mem;
c4501e62
JJ
4074 rtx first, last;
4075
c4501e62
JJ
4076 tmp1 = gen_reg_rtx (Pmode);
4077 tmp2 = gen_reg_rtx (Pmode);
4078 tmp3 = gen_reg_rtx (Pmode);
542a8afa 4079 mem = gen_const_mem (Pmode, tmp1);
c4501e62 4080
e65a3857
DE
4081 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
4082 emit_move_insn (tmp1,
1de43f85 4083 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
4084 emit_move_insn (tmp2, mem);
4085 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
4086 last = emit_move_insn (got, tmp3);
bd94cb6e 4087 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
4088 }
4089 }
4090 }
4091
4092 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4093 {
4094 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4095 tga = rs6000_tls_get_addr ();
4096
4097 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4098 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4099 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4100 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4101 else if (DEFAULT_ABI == ABI_V4)
4102 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 4103 else
02135bc1
SB
4104 gcc_unreachable ();
4105
c4501e62 4106 start_sequence ();
c4501e62 4107 insn = emit_call_insn (insn);
becfd6e5 4108 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4109 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4110 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4111 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4112 insn = get_insns ();
4113 end_sequence ();
4114 emit_libcall_block (insn, dest, r3, addr);
4115 }
4116 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4117 {
4118 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4119 tga = rs6000_tls_get_addr ();
4120
4121 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4122 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4123 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4124 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4125 else if (DEFAULT_ABI == ABI_V4)
4126 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 4127 else
02135bc1
SB
4128 gcc_unreachable ();
4129
c4501e62 4130 start_sequence ();
c4501e62 4131 insn = emit_call_insn (insn);
becfd6e5 4132 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4133 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4134 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4135 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4136 insn = get_insns ();
4137 end_sequence ();
4138 tmp1 = gen_reg_rtx (Pmode);
4139 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4140 UNSPEC_TLSLD);
4141 emit_libcall_block (insn, tmp1, r3, eqv);
4142 if (rs6000_tls_size == 16)
4143 {
4144 if (TARGET_64BIT)
4145 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4146 else
4147 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4148 }
4149 else if (rs6000_tls_size == 32)
4150 {
4151 tmp2 = gen_reg_rtx (Pmode);
4152 if (TARGET_64BIT)
4153 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4154 else
4155 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4156 emit_insn (insn);
4157 if (TARGET_64BIT)
4158 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4159 else
4160 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4161 }
4162 else
4163 {
4164 tmp2 = gen_reg_rtx (Pmode);
4165 if (TARGET_64BIT)
4166 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4167 else
4168 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4169 emit_insn (insn);
4170 insn = gen_rtx_SET (Pmode, dest,
4171 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4172 }
4173 emit_insn (insn);
4174 }
4175 else
4176 {
a7b376ee 4177 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4178 tmp2 = gen_reg_rtx (Pmode);
4179 if (TARGET_64BIT)
4180 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4181 else
4182 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4183 emit_insn (insn);
4184 if (TARGET_64BIT)
4185 insn = gen_tls_tls_64 (dest, tmp2, addr);
4186 else
4187 insn = gen_tls_tls_32 (dest, tmp2, addr);
4188 emit_insn (insn);
4189 }
4190 }
4191
4192 return dest;
4193}
4194
c4501e62
JJ
4195/* Return 1 if X contains a thread-local symbol. */
4196
4197bool
a2369ed3 4198rs6000_tls_referenced_p (rtx x)
c4501e62 4199{
cd413cab
AP
4200 if (! TARGET_HAVE_TLS)
4201 return false;
4202
c4501e62
JJ
4203 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4204}
4205
4206/* Return 1 if *X is a thread-local symbol. This is the same as
4207 rs6000_tls_symbol_ref except for the type of the unused argument. */
4208
9390387d 4209static int
a2369ed3 4210rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4211{
4212 return RS6000_SYMBOL_REF_TLS_P (*x);
4213}
4214
24ea750e
DJ
4215/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4216 replace the input X, or the original X if no replacement is called for.
4217 The output parameter *WIN is 1 if the calling macro should goto WIN,
4218 0 if it should not.
4219
4220 For RS/6000, we wish to handle large displacements off a base
4221 register by splitting the addend across an addiu/addis and the mem insn.
4222 This cuts number of extra insns needed from 3 to 1.
4223
4224 On Darwin, we use this to generate code for floating point constants.
4225 A movsf_low is generated so we wind up with 2 instructions rather than 3.
08a6a74b
RS
4226 The Darwin code is inside #if TARGET_MACHO because only then are the
4227 machopic_* functions defined. */
24ea750e 4228rtx
f676971a 4229rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4230 int opnum, int type,
4231 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4232{
f676971a 4233 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4234 if (GET_CODE (x) == PLUS
4235 && GET_CODE (XEXP (x, 0)) == PLUS
4236 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4237 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4238 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4239 {
4240 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4241 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4242 opnum, (enum reload_type)type);
24ea750e
DJ
4243 *win = 1;
4244 return x;
4245 }
3deb2758 4246
24ea750e
DJ
4247#if TARGET_MACHO
4248 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4249 && GET_CODE (x) == LO_SUM
4250 && GET_CODE (XEXP (x, 0)) == PLUS
4251 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4252 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
24ea750e 4253 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
08a6a74b 4254 && machopic_operand_p (XEXP (x, 1)))
24ea750e
DJ
4255 {
4256 /* Result of previous invocation of this function on Darwin
6f317ef3 4257 floating point constant. */
24ea750e 4258 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4259 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4260 opnum, (enum reload_type)type);
24ea750e
DJ
4261 *win = 1;
4262 return x;
4263 }
4264#endif
4937d02d
DE
4265
4266 /* Force ld/std non-word aligned offset into base register by wrapping
4267 in offset 0. */
4268 if (GET_CODE (x) == PLUS
4269 && GET_CODE (XEXP (x, 0)) == REG
4270 && REGNO (XEXP (x, 0)) < 32
c6c3dba9 4271 && INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 1)
4937d02d
DE
4272 && GET_CODE (XEXP (x, 1)) == CONST_INT
4273 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4274 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4275 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4276 && TARGET_POWERPC64)
4277 {
4278 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4279 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4280 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4281 opnum, (enum reload_type) type);
4282 *win = 1;
4283 return x;
4284 }
4285
24ea750e
DJ
4286 if (GET_CODE (x) == PLUS
4287 && GET_CODE (XEXP (x, 0)) == REG
4288 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
c6c3dba9 4289 && INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 1)
78c875e8 4290 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4291 && !SPE_VECTOR_MODE (mode)
17caeff2 4292 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4293 || mode == DDmode || mode == TDmode
54b695e7 4294 || mode == DImode))
78c875e8 4295 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4296 {
4297 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4298 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4299 HOST_WIDE_INT high
c4ad648e 4300 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4301
4302 /* Check for 32-bit overflow. */
4303 if (high + low != val)
c4ad648e 4304 {
24ea750e
DJ
4305 *win = 0;
4306 return x;
4307 }
4308
4309 /* Reload the high part into a base reg; leave the low part
c4ad648e 4310 in the mem directly. */
24ea750e
DJ
4311
4312 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4313 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4314 GEN_INT (high)),
4315 GEN_INT (low));
24ea750e
DJ
4316
4317 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4318 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4319 opnum, (enum reload_type)type);
24ea750e
DJ
4320 *win = 1;
4321 return x;
4322 }
4937d02d 4323
24ea750e 4324 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4325 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4326 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4327#if TARGET_MACHO
4328 && DEFAULT_ABI == ABI_DARWIN
a29077da 4329 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4330#else
4331 && DEFAULT_ABI == ABI_V4
4332 && !flag_pic
4333#endif
7393f7f8 4334 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4335 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4336 without fprs. */
0d8c1c97 4337 && mode != TFmode
7393f7f8 4338 && mode != TDmode
7b5d92b2 4339 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4340 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
696e45ba 4341 || (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)))
24ea750e 4342 {
8308679f 4343#if TARGET_MACHO
a29077da
GK
4344 if (flag_pic)
4345 {
08a6a74b 4346 rtx offset = machopic_gen_offset (x);
a29077da
GK
4347 x = gen_rtx_LO_SUM (GET_MODE (x),
4348 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4349 gen_rtx_HIGH (Pmode, offset)), offset);
4350 }
4351 else
8308679f 4352#endif
a29077da 4353 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4354 gen_rtx_HIGH (Pmode, x), x);
a29077da 4355
24ea750e 4356 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4357 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4358 opnum, (enum reload_type)type);
24ea750e
DJ
4359 *win = 1;
4360 return x;
4361 }
4937d02d 4362
dec1f3aa
DE
4363 /* Reload an offset address wrapped by an AND that represents the
4364 masking of the lower bits. Strip the outer AND and let reload
4365 convert the offset address into an indirect address. */
4366 if (TARGET_ALTIVEC
4367 && ALTIVEC_VECTOR_MODE (mode)
4368 && GET_CODE (x) == AND
4369 && GET_CODE (XEXP (x, 0)) == PLUS
4370 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4371 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4372 && GET_CODE (XEXP (x, 1)) == CONST_INT
4373 && INTVAL (XEXP (x, 1)) == -16)
4374 {
4375 x = XEXP (x, 0);
4376 *win = 1;
4377 return x;
4378 }
4379
24ea750e 4380 if (TARGET_TOC
0cdc04e8 4381 && GET_CODE (x) == SYMBOL_REF
4d588c14 4382 && constant_pool_expr_p (x)
c1f11548 4383 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4384 {
194c524a 4385 x = create_TOC_reference (x);
24ea750e
DJ
4386 *win = 1;
4387 return x;
4388 }
4389 *win = 0;
4390 return x;
f676971a 4391}
24ea750e 4392
331d9186 4393/* TARGET_LEGITIMATE_ADDRESS_P recognizes an RTL expression
258bfae2
FS
4394 that is a valid memory address for an instruction.
4395 The MODE argument is the machine mode for the MEM expression
4396 that wants to use this address.
4397
4398 On the RS/6000, there are four valid address: a SYMBOL_REF that
4399 refers to a constant pool entry of an address (or the sum of it
4400 plus a constant), a short (16-bit signed) constant plus a register,
4401 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4402 auto-increment. For DFmode, DDmode and DImode with a constant plus
4403 register, we must ensure that both words are addressable or PowerPC64
4404 with offset word aligned.
258bfae2 4405
4d4447b5 4406 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4407 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4408 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2 4409 during assembly output. */
c6c3dba9
PB
4410bool
4411rs6000_legitimate_address_p (enum machine_mode mode, rtx x, bool reg_ok_strict)
258bfae2 4412{
850e8d3d
DN
4413 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4414 if (TARGET_ALTIVEC
4415 && ALTIVEC_VECTOR_MODE (mode)
4416 && GET_CODE (x) == AND
4417 && GET_CODE (XEXP (x, 1)) == CONST_INT
4418 && INTVAL (XEXP (x, 1)) == -16)
4419 x = XEXP (x, 0);
4420
c4501e62
JJ
4421 if (RS6000_SYMBOL_REF_TLS_P (x))
4422 return 0;
4d588c14 4423 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4424 return 1;
4425 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4426 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4427 && !SPE_VECTOR_MODE (mode)
429ec7dc 4428 && mode != TFmode
7393f7f8 4429 && mode != TDmode
54b695e7 4430 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4431 && !(TARGET_E500_DOUBLE
4432 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4433 && TARGET_UPDATE
4d588c14 4434 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4435 return 1;
d04b6e6e 4436 if (legitimate_small_data_p (mode, x))
258bfae2 4437 return 1;
4d588c14 4438 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4439 return 1;
4440 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4441 if (! reg_ok_strict
4442 && GET_CODE (x) == PLUS
4443 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4444 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4445 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4446 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4447 return 1;
76d2b81d 4448 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4449 return 1;
4450 if (mode != TImode
76d2b81d 4451 && mode != TFmode
7393f7f8 4452 && mode != TDmode
960c5c79 4453 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 4454 || TARGET_POWERPC64
4f011e1e
JM
4455 || (mode != DFmode && mode != DDmode)
4456 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4457 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 4458 && !avoiding_indexed_address_p (mode)
4d588c14 4459 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4460 return 1;
6fb5fa3c
DB
4461 if (GET_CODE (x) == PRE_MODIFY
4462 && mode != TImode
4463 && mode != TFmode
4464 && mode != TDmode
696e45ba 4465 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
6fb5fa3c 4466 || TARGET_POWERPC64
4d4447b5 4467 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4468 && (TARGET_POWERPC64 || mode != DImode)
4469 && !ALTIVEC_VECTOR_MODE (mode)
4470 && !SPE_VECTOR_MODE (mode)
4471 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4472 && !(TARGET_E500_DOUBLE
4473 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4474 && TARGET_UPDATE
4475 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4476 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
001b9eb6
PH
4477 || (!avoiding_indexed_address_p (mode)
4478 && legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict)))
6fb5fa3c
DB
4479 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4480 return 1;
4d588c14 4481 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4482 return 1;
4483 return 0;
4484}
4d588c14
RH
4485
4486/* Go to LABEL if ADDR (a legitimate address expression)
4487 has an effect that depends on the machine mode it is used for.
4488
4489 On the RS/6000 this is true of all integral offsets (since AltiVec
4490 modes don't allow them) or is a pre-increment or decrement.
4491
4492 ??? Except that due to conceptual problems in offsettable_address_p
4493 we can't really report the problems of integral offsets. So leave
f676971a 4494 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4495 sub-words of a TFmode operand, which is what we had before. */
4496
4497bool
a2369ed3 4498rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4499{
4500 switch (GET_CODE (addr))
4501 {
4502 case PLUS:
4503 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4504 {
4505 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4506 return val + 12 + 0x8000 >= 0x10000;
4507 }
4508 break;
4509
4510 case LO_SUM:
4511 return true;
4512
619fe064 4513 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4514 case PRE_MODIFY:
4515 return TARGET_UPDATE;
4d588c14
RH
4516
4517 default:
4518 break;
4519 }
4520
4521 return false;
4522}
d8ecbcdb 4523
944258eb
RS
4524/* Implement FIND_BASE_TERM. */
4525
4526rtx
4527rs6000_find_base_term (rtx op)
4528{
4529 rtx base, offset;
4530
4531 split_const (op, &base, &offset);
4532 if (GET_CODE (base) == UNSPEC)
4533 switch (XINT (base, 1))
4534 {
4535 case UNSPEC_TOCREL:
4536 case UNSPEC_MACHOPIC_OFFSET:
4537 /* OP represents SYM [+ OFFSET] - ANCHOR. SYM is the base term
4538 for aliasing purposes. */
4539 return XVECEXP (base, 0, 0);
4540 }
4541
4542 return op;
4543}
4544
d04b6e6e
EB
4545/* More elaborate version of recog's offsettable_memref_p predicate
4546 that works around the ??? note of rs6000_mode_dependent_address.
4547 In particular it accepts
4548
4549 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4550
4551 in 32-bit mode, that the recog predicate rejects. */
4552
4553bool
4554rs6000_offsettable_memref_p (rtx op)
4555{
4556 if (!MEM_P (op))
4557 return false;
4558
4559 /* First mimic offsettable_memref_p. */
4560 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4561 return true;
4562
4563 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4564 the latter predicate knows nothing about the mode of the memory
4565 reference and, therefore, assumes that it is the largest supported
4566 mode (TFmode). As a consequence, legitimate offsettable memory
4567 references are rejected. rs6000_legitimate_offset_address_p contains
4568 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4569 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4570}
4571
d8ecbcdb
AH
4572/* Return number of consecutive hard regs needed starting at reg REGNO
4573 to hold something of mode MODE.
4574 This is ordinarily the length in words of a value of mode MODE
4575 but can be less for certain modes in special long registers.
4576
4577 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4578 scalar instructions. The upper 32 bits are only available to the
4579 SIMD instructions.
4580
4581 POWER and PowerPC GPRs hold 32 bits worth;
4582 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4583
4584int
4585rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4586{
4587 if (FP_REGNO_P (regno))
4588 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4589
4590 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4591 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4592
4593 if (ALTIVEC_REGNO_P (regno))
4594 return
4595 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4596
8521c414
JM
4597 /* The value returned for SCmode in the E500 double case is 2 for
4598 ABI compatibility; storing an SCmode value in a single register
4599 would require function_arg and rs6000_spe_function_arg to handle
4600 SCmode so as to pass the value correctly in a pair of
4601 registers. */
4f011e1e
JM
4602 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4603 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4604 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4605
d8ecbcdb
AH
4606 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4607}
2aa4498c
AH
4608
4609/* Change register usage conditional on target flags. */
4610void
4611rs6000_conditional_register_usage (void)
4612{
4613 int i;
4614
4615 /* Set MQ register fixed (already call_used) if not POWER
4616 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4617 be allocated. */
4618 if (! TARGET_POWER)
4619 fixed_regs[64] = 1;
4620
7c9ac5c0 4621 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4622 if (TARGET_64BIT)
4623 fixed_regs[13] = call_used_regs[13]
4624 = call_really_used_regs[13] = 1;
4625
4626 /* Conditionally disable FPRs. */
4627 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4628 for (i = 32; i < 64; i++)
4629 fixed_regs[i] = call_used_regs[i]
c4ad648e 4630 = call_really_used_regs[i] = 1;
2aa4498c 4631
7c9ac5c0
PH
4632 /* The TOC register is not killed across calls in a way that is
4633 visible to the compiler. */
4634 if (DEFAULT_ABI == ABI_AIX)
4635 call_really_used_regs[2] = 0;
4636
2aa4498c
AH
4637 if (DEFAULT_ABI == ABI_V4
4638 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4639 && flag_pic == 2)
4640 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4641
4642 if (DEFAULT_ABI == ABI_V4
4643 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4644 && flag_pic == 1)
4645 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4646 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4647 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4648
4649 if (DEFAULT_ABI == ABI_DARWIN
4650 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4651 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4652 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4653 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4654
b4db40bf
JJ
4655 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4656 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4657 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4658
2aa4498c
AH
4659 if (TARGET_SPE)
4660 {
4661 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4662 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4663 registers in prologues and epilogues. We no longer use r14
4664 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4665 pool for link-compatibility with older versions of GCC. Once
4666 "old" code has died out, we can return r14 to the allocation
4667 pool. */
4668 fixed_regs[14]
4669 = call_used_regs[14]
4670 = call_really_used_regs[14] = 1;
2aa4498c
AH
4671 }
4672
0db747be 4673 if (!TARGET_ALTIVEC)
2aa4498c
AH
4674 {
4675 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4676 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4677 call_really_used_regs[VRSAVE_REGNO] = 1;
4678 }
4679
0db747be
DE
4680 if (TARGET_ALTIVEC)
4681 global_regs[VSCR_REGNO] = 1;
4682
2aa4498c 4683 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4684 {
4685 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4686 call_used_regs[i] = call_really_used_regs[i] = 1;
4687
4688 /* AIX reserves VR20:31 in non-extended ABI mode. */
4689 if (TARGET_XCOFF)
4690 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4691 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4692 }
2aa4498c 4693}
fb4d4348 4694\f
a4f6c312
SS
4695/* Try to output insns to set TARGET equal to the constant C if it can
4696 be done in less than N insns. Do all computations in MODE.
4697 Returns the place where the output has been placed if it can be
4698 done and the insns have been emitted. If it would take more than N
4699 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4700
4701rtx
f676971a 4702rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4703 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4704{
af8cb5c5 4705 rtx result, insn, set;
2bfcf297
DB
4706 HOST_WIDE_INT c0, c1;
4707
37409796 4708 switch (mode)
2bfcf297 4709 {
37409796
NS
4710 case QImode:
4711 case HImode:
2bfcf297 4712 if (dest == NULL)
c4ad648e 4713 dest = gen_reg_rtx (mode);
2bfcf297
DB
4714 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4715 return dest;
bb8df8a6 4716
37409796 4717 case SImode:
b3a13419 4718 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4719
d448860e 4720 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4721 GEN_INT (INTVAL (source)
4722 & (~ (HOST_WIDE_INT) 0xffff))));
4723 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4724 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4725 GEN_INT (INTVAL (source) & 0xffff))));
4726 result = dest;
37409796
NS
4727 break;
4728
4729 case DImode:
4730 switch (GET_CODE (source))
af8cb5c5 4731 {
37409796 4732 case CONST_INT:
af8cb5c5
DE
4733 c0 = INTVAL (source);
4734 c1 = -(c0 < 0);
37409796 4735 break;
bb8df8a6 4736
37409796 4737 case CONST_DOUBLE:
2bfcf297 4738#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4739 c0 = CONST_DOUBLE_LOW (source);
4740 c1 = -(c0 < 0);
2bfcf297 4741#else
af8cb5c5
DE
4742 c0 = CONST_DOUBLE_LOW (source);
4743 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4744#endif
37409796
NS
4745 break;
4746
4747 default:
4748 gcc_unreachable ();
af8cb5c5 4749 }
af8cb5c5
DE
4750
4751 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4752 break;
4753
4754 default:
4755 gcc_unreachable ();
2bfcf297 4756 }
2bfcf297 4757
af8cb5c5
DE
4758 insn = get_last_insn ();
4759 set = single_set (insn);
4760 if (! CONSTANT_P (SET_SRC (set)))
4761 set_unique_reg_note (insn, REG_EQUAL, source);
4762
4763 return result;
2bfcf297
DB
4764}
4765
4766/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4767 fall back to a straight forward decomposition. We do this to avoid
4768 exponential run times encountered when looking for longer sequences
4769 with rs6000_emit_set_const. */
4770static rtx
a2369ed3 4771rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4772{
4773 if (!TARGET_POWERPC64)
4774 {
4775 rtx operand1, operand2;
4776
4777 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4778 DImode);
d448860e 4779 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4780 DImode);
4781 emit_move_insn (operand1, GEN_INT (c1));
4782 emit_move_insn (operand2, GEN_INT (c2));
4783 }
4784 else
4785 {
bc06712d 4786 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4787
bc06712d 4788 ud1 = c1 & 0xffff;
f921c9c9 4789 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4790#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4791 c2 = c1 >> 32;
2bfcf297 4792#endif
bc06712d 4793 ud3 = c2 & 0xffff;
f921c9c9 4794 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4795
f676971a 4796 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4797 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4798 {
bc06712d 4799 if (ud1 & 0x8000)
b78d48dd 4800 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4801 else
4802 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4803 }
2bfcf297 4804
f676971a 4805 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4806 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4807 {
bc06712d 4808 if (ud2 & 0x8000)
f676971a 4809 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4810 - 0x80000000));
252b88f7 4811 else
bc06712d
TR
4812 emit_move_insn (dest, GEN_INT (ud2 << 16));
4813 if (ud1 != 0)
d448860e
JH
4814 emit_move_insn (copy_rtx (dest),
4815 gen_rtx_IOR (DImode, copy_rtx (dest),
4816 GEN_INT (ud1)));
252b88f7 4817 }
f676971a 4818 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4819 || (ud4 == 0 && ! (ud3 & 0x8000)))
4820 {
4821 if (ud3 & 0x8000)
f676971a 4822 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4823 - 0x80000000));
4824 else
4825 emit_move_insn (dest, GEN_INT (ud3 << 16));
4826
4827 if (ud2 != 0)
d448860e
JH
4828 emit_move_insn (copy_rtx (dest),
4829 gen_rtx_IOR (DImode, copy_rtx (dest),
4830 GEN_INT (ud2)));
4831 emit_move_insn (copy_rtx (dest),
4832 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4833 GEN_INT (16)));
bc06712d 4834 if (ud1 != 0)
d448860e
JH
4835 emit_move_insn (copy_rtx (dest),
4836 gen_rtx_IOR (DImode, copy_rtx (dest),
4837 GEN_INT (ud1)));
bc06712d 4838 }
f676971a 4839 else
bc06712d
TR
4840 {
4841 if (ud4 & 0x8000)
f676971a 4842 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4843 - 0x80000000));
4844 else
4845 emit_move_insn (dest, GEN_INT (ud4 << 16));
4846
4847 if (ud3 != 0)
d448860e
JH
4848 emit_move_insn (copy_rtx (dest),
4849 gen_rtx_IOR (DImode, copy_rtx (dest),
4850 GEN_INT (ud3)));
2bfcf297 4851
d448860e
JH
4852 emit_move_insn (copy_rtx (dest),
4853 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4854 GEN_INT (32)));
bc06712d 4855 if (ud2 != 0)
d448860e
JH
4856 emit_move_insn (copy_rtx (dest),
4857 gen_rtx_IOR (DImode, copy_rtx (dest),
4858 GEN_INT (ud2 << 16)));
bc06712d 4859 if (ud1 != 0)
d448860e
JH
4860 emit_move_insn (copy_rtx (dest),
4861 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4862 }
4863 }
2bfcf297
DB
4864 return dest;
4865}
4866
76d2b81d 4867/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4868 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4869
4870static void
4871rs6000_eliminate_indexed_memrefs (rtx operands[2])
4872{
4873 if (GET_CODE (operands[0]) == MEM
4874 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4875 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4876 && ! reload_in_progress)
4877 operands[0]
4878 = replace_equiv_address (operands[0],
4879 copy_addr_to_reg (XEXP (operands[0], 0)));
4880
4881 if (GET_CODE (operands[1]) == MEM
4882 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4883 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4884 && ! reload_in_progress)
4885 operands[1]
4886 = replace_equiv_address (operands[1],
4887 copy_addr_to_reg (XEXP (operands[1], 0)));
4888}
4889
fb4d4348
GK
4890/* Emit a move from SOURCE to DEST in mode MODE. */
4891void
a2369ed3 4892rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4893{
4894 rtx operands[2];
4895 operands[0] = dest;
4896 operands[1] = source;
f676971a 4897
fb4d4348
GK
4898 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4899 if (GET_CODE (operands[1]) == CONST_DOUBLE
4900 && ! FLOAT_MODE_P (mode)
4901 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4902 {
4903 /* FIXME. This should never happen. */
4904 /* Since it seems that it does, do the safe thing and convert
4905 to a CONST_INT. */
2496c7bd 4906 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4907 }
37409796
NS
4908 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4909 || FLOAT_MODE_P (mode)
4910 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4911 || CONST_DOUBLE_LOW (operands[1]) < 0)
4912 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4913 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4914
c9e8cb32
DD
4915 /* Check if GCC is setting up a block move that will end up using FP
4916 registers as temporaries. We must make sure this is acceptable. */
4917 if (GET_CODE (operands[0]) == MEM
4918 && GET_CODE (operands[1]) == MEM
4919 && mode == DImode
41543739
GK
4920 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4921 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4922 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4923 ? 32 : MEM_ALIGN (operands[0])))
4924 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4925 ? 32
41543739
GK
4926 : MEM_ALIGN (operands[1]))))
4927 && ! MEM_VOLATILE_P (operands [0])
4928 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4929 {
41543739
GK
4930 emit_move_insn (adjust_address (operands[0], SImode, 0),
4931 adjust_address (operands[1], SImode, 0));
d448860e
JH
4932 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4933 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4934 return;
4935 }
630d42a0 4936
b3a13419 4937 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4938 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4939 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4940
a3170dc6 4941 if (mode == SFmode && ! TARGET_POWERPC
696e45ba 4942 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
ffc14f31 4943 && GET_CODE (operands[0]) == MEM)
fb4d4348 4944 {
ffc14f31
GK
4945 int regnum;
4946
4947 if (reload_in_progress || reload_completed)
4948 regnum = true_regnum (operands[1]);
4949 else if (GET_CODE (operands[1]) == REG)
4950 regnum = REGNO (operands[1]);
4951 else
4952 regnum = -1;
f676971a 4953
fb4d4348
GK
4954 /* If operands[1] is a register, on POWER it may have
4955 double-precision data in it, so truncate it to single
4956 precision. */
4957 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4958 {
4959 rtx newreg;
b3a13419 4960 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4961 : gen_reg_rtx (mode));
fb4d4348
GK
4962 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4963 operands[1] = newreg;
4964 }
4965 }
4966
c4501e62
JJ
4967 /* Recognize the case where operand[1] is a reference to thread-local
4968 data and load its address to a register. */
84f52ebd 4969 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4970 {
84f52ebd
RH
4971 enum tls_model model;
4972 rtx tmp = operands[1];
4973 rtx addend = NULL;
4974
4975 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4976 {
4977 addend = XEXP (XEXP (tmp, 0), 1);
4978 tmp = XEXP (XEXP (tmp, 0), 0);
4979 }
4980
4981 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4982 model = SYMBOL_REF_TLS_MODEL (tmp);
4983 gcc_assert (model != 0);
4984
4985 tmp = rs6000_legitimize_tls_address (tmp, model);
4986 if (addend)
4987 {
4988 tmp = gen_rtx_PLUS (mode, tmp, addend);
4989 tmp = force_operand (tmp, operands[0]);
4990 }
4991 operands[1] = tmp;
c4501e62
JJ
4992 }
4993
8f4e6caf
RH
4994 /* Handle the case where reload calls us with an invalid address. */
4995 if (reload_in_progress && mode == Pmode
69ef87e2 4996 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4997 || ! nonimmediate_operand (operands[0], mode)))
4998 goto emit_set;
4999
a9baceb1
GK
5000 /* 128-bit constant floating-point values on Darwin should really be
5001 loaded as two parts. */
8521c414 5002 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
5003 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
5004 {
5005 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
5006 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
5007 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
5008 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
5009 simplify_gen_subreg (imode, operands[1], mode, 0),
5010 imode);
5011 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
5012 GET_MODE_SIZE (imode)),
5013 simplify_gen_subreg (imode, operands[1], mode,
5014 GET_MODE_SIZE (imode)),
5015 imode);
a9baceb1
GK
5016 return;
5017 }
5018
e41b2a33
PB
5019 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
5020 cfun->machine->sdmode_stack_slot =
5021 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
5022
5023 if (reload_in_progress
5024 && mode == SDmode
5025 && MEM_P (operands[0])
5026 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
5027 && REG_P (operands[1]))
5028 {
5029 if (FP_REGNO_P (REGNO (operands[1])))
5030 {
5031 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
5032 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5033 emit_insn (gen_movsd_store (mem, operands[1]));
5034 }
5035 else if (INT_REGNO_P (REGNO (operands[1])))
5036 {
5037 rtx mem = adjust_address_nv (operands[0], mode, 4);
5038 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5039 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
5040 }
5041 else
5042 gcc_unreachable();
5043 return;
5044 }
5045 if (reload_in_progress
5046 && mode == SDmode
5047 && REG_P (operands[0])
5048 && MEM_P (operands[1])
5049 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
5050 {
5051 if (FP_REGNO_P (REGNO (operands[0])))
5052 {
5053 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
5054 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5055 emit_insn (gen_movsd_load (operands[0], mem));
5056 }
5057 else if (INT_REGNO_P (REGNO (operands[0])))
5058 {
5059 rtx mem = adjust_address_nv (operands[1], mode, 4);
5060 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5061 emit_insn (gen_movsd_hardfloat (operands[0], mem));
5062 }
5063 else
5064 gcc_unreachable();
5065 return;
5066 }
5067
fb4d4348
GK
5068 /* FIXME: In the long term, this switch statement should go away
5069 and be replaced by a sequence of tests based on things like
5070 mode == Pmode. */
5071 switch (mode)
5072 {
5073 case HImode:
5074 case QImode:
5075 if (CONSTANT_P (operands[1])
5076 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 5077 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
5078 break;
5079
06f4e019 5080 case TFmode:
7393f7f8 5081 case TDmode:
76d2b81d
DJ
5082 rs6000_eliminate_indexed_memrefs (operands);
5083 /* fall through */
5084
fb4d4348 5085 case DFmode:
7393f7f8 5086 case DDmode:
fb4d4348 5087 case SFmode:
e41b2a33 5088 case SDmode:
f676971a 5089 if (CONSTANT_P (operands[1])
fb4d4348 5090 && ! easy_fp_constant (operands[1], mode))
a9098fd0 5091 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5092 break;
f676971a 5093
0ac081f6
AH
5094 case V16QImode:
5095 case V8HImode:
5096 case V4SFmode:
5097 case V4SImode:
a3170dc6
AH
5098 case V4HImode:
5099 case V2SFmode:
5100 case V2SImode:
00a892b8 5101 case V1DImode:
69ef87e2 5102 if (CONSTANT_P (operands[1])
d744e06e 5103 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
5104 operands[1] = force_const_mem (mode, operands[1]);
5105 break;
f676971a 5106
fb4d4348 5107 case SImode:
a9098fd0 5108 case DImode:
fb4d4348
GK
5109 /* Use default pattern for address of ELF small data */
5110 if (TARGET_ELF
a9098fd0 5111 && mode == Pmode
f607bc57 5112 && DEFAULT_ABI == ABI_V4
f676971a 5113 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
5114 || GET_CODE (operands[1]) == CONST)
5115 && small_data_operand (operands[1], mode))
fb4d4348
GK
5116 {
5117 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5118 return;
5119 }
5120
f607bc57 5121 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
5122 && mode == Pmode && mode == SImode
5123 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
5124 {
5125 emit_insn (gen_movsi_got (operands[0], operands[1]));
5126 return;
5127 }
5128
ee890fe2 5129 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
5130 && TARGET_NO_TOC
5131 && ! flag_pic
a9098fd0 5132 && mode == Pmode
fb4d4348
GK
5133 && CONSTANT_P (operands[1])
5134 && GET_CODE (operands[1]) != HIGH
5135 && GET_CODE (operands[1]) != CONST_INT)
5136 {
b3a13419
ILT
5137 rtx target = (!can_create_pseudo_p ()
5138 ? operands[0]
5139 : gen_reg_rtx (mode));
fb4d4348
GK
5140
5141 /* If this is a function address on -mcall-aixdesc,
5142 convert it to the address of the descriptor. */
5143 if (DEFAULT_ABI == ABI_AIX
5144 && GET_CODE (operands[1]) == SYMBOL_REF
5145 && XSTR (operands[1], 0)[0] == '.')
5146 {
5147 const char *name = XSTR (operands[1], 0);
5148 rtx new_ref;
5149 while (*name == '.')
5150 name++;
5151 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5152 CONSTANT_POOL_ADDRESS_P (new_ref)
5153 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 5154 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 5155 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 5156 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
5157 operands[1] = new_ref;
5158 }
7509c759 5159
ee890fe2
SS
5160 if (DEFAULT_ABI == ABI_DARWIN)
5161 {
ab82a49f
AP
5162#if TARGET_MACHO
5163 if (MACHO_DYNAMIC_NO_PIC_P)
5164 {
5165 /* Take care of any required data indirection. */
5166 operands[1] = rs6000_machopic_legitimize_pic_address (
5167 operands[1], mode, operands[0]);
5168 if (operands[0] != operands[1])
5169 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 5170 operands[0], operands[1]));
ab82a49f
AP
5171 return;
5172 }
5173#endif
b8a55285
AP
5174 emit_insn (gen_macho_high (target, operands[1]));
5175 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5176 return;
5177 }
5178
fb4d4348
GK
5179 emit_insn (gen_elf_high (target, operands[1]));
5180 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5181 return;
5182 }
5183
a9098fd0
GK
5184 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5185 and we have put it in the TOC, we just need to make a TOC-relative
5186 reference to it. */
5187 if (TARGET_TOC
5188 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5189 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5190 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5191 get_pool_mode (operands[1])))
fb4d4348 5192 {
a9098fd0 5193 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5194 }
a9098fd0
GK
5195 else if (mode == Pmode
5196 && CONSTANT_P (operands[1])
38886f37
AO
5197 && ((GET_CODE (operands[1]) != CONST_INT
5198 && ! easy_fp_constant (operands[1], mode))
5199 || (GET_CODE (operands[1]) == CONST_INT
5200 && num_insns_constant (operands[1], mode) > 2)
5201 || (GET_CODE (operands[0]) == REG
5202 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5203 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5204 && ! legitimate_constant_pool_address_p (operands[1])
5205 && ! toc_relative_expr_p (operands[1]))
fb4d4348 5206 {
fb4d4348 5207
c859cda6 5208#if TARGET_MACHO
ee890fe2 5209 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5210 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5211 {
ee890fe2
SS
5212 operands[1] =
5213 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5214 operands[0]);
5215 if (operands[0] != operands[1])
5216 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5217 return;
5218 }
c859cda6 5219#endif
ee890fe2 5220
fb4d4348
GK
5221 /* If we are to limit the number of things we put in the TOC and
5222 this is a symbol plus a constant we can add in one insn,
5223 just put the symbol in the TOC and add the constant. Don't do
5224 this if reload is in progress. */
5225 if (GET_CODE (operands[1]) == CONST
5226 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5227 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5228 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5229 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5230 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5231 && ! side_effects_p (operands[0]))
5232 {
a4f6c312
SS
5233 rtx sym =
5234 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5235 rtx other = XEXP (XEXP (operands[1], 0), 1);
5236
a9098fd0
GK
5237 sym = force_reg (mode, sym);
5238 if (mode == SImode)
5239 emit_insn (gen_addsi3 (operands[0], sym, other));
5240 else
5241 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5242 return;
5243 }
5244
a9098fd0 5245 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5246
f676971a 5247 if (TARGET_TOC
0cdc04e8 5248 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5249 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5250 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5251 get_pool_constant (XEXP (operands[1], 0)),
5252 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5253 {
ba4828e0 5254 operands[1]
542a8afa 5255 = gen_const_mem (mode,
c4ad648e 5256 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5257 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5258 }
fb4d4348
GK
5259 }
5260 break;
a9098fd0 5261
fb4d4348 5262 case TImode:
76d2b81d
DJ
5263 rs6000_eliminate_indexed_memrefs (operands);
5264
27dc0551
DE
5265 if (TARGET_POWER)
5266 {
5267 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5268 gen_rtvec (2,
5269 gen_rtx_SET (VOIDmode,
5270 operands[0], operands[1]),
5271 gen_rtx_CLOBBER (VOIDmode,
5272 gen_rtx_SCRATCH (SImode)))));
5273 return;
5274 }
fb4d4348
GK
5275 break;
5276
5277 default:
37409796 5278 gcc_unreachable ();
fb4d4348
GK
5279 }
5280
a9098fd0
GK
5281 /* Above, we may have called force_const_mem which may have returned
5282 an invalid address. If we can, fix this up; otherwise, reload will
5283 have to deal with it. */
8f4e6caf
RH
5284 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5285 operands[1] = validize_mem (operands[1]);
a9098fd0 5286
8f4e6caf 5287 emit_set:
fb4d4348
GK
5288 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5289}
4697a36c 5290\f
2858f73a
GK
5291/* Nonzero if we can use a floating-point register to pass this arg. */
5292#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5293 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a 5294 && (CUM)->fregno <= FP_ARG_MAX_REG \
56f4cc04 5295 && TARGET_HARD_FLOAT && TARGET_FPRS)
2858f73a
GK
5296
5297/* Nonzero if we can use an AltiVec register to pass this arg. */
5298#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5299 (ALTIVEC_VECTOR_MODE (MODE) \
5300 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5301 && TARGET_ALTIVEC_ABI \
83953138 5302 && (NAMED))
2858f73a 5303
c6e8c921
GK
5304/* Return a nonzero value to say to return the function value in
5305 memory, just as large structures are always returned. TYPE will be
5306 the data type of the value, and FNTYPE will be the type of the
5307 function doing the returning, or @code{NULL} for libcalls.
5308
5309 The AIX ABI for the RS/6000 specifies that all structures are
5310 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5311 specifies that structures <= 8 bytes are returned in r3/r4, but a
5312 draft put them in memory, and GCC used to implement the draft
df01da37 5313 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5314 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5315 compatibility can change DRAFT_V4_STRUCT_RET to override the
5316 default, and -m switches get the final word. See
5317 rs6000_override_options for more details.
5318
5319 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5320 long double support is enabled. These values are returned in memory.
5321
5322 int_size_in_bytes returns -1 for variable size objects, which go in
5323 memory always. The cast to unsigned makes -1 > 8. */
5324
5325static bool
586de218 5326rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5327{
594a51fe
SS
5328 /* In the darwin64 abi, try to use registers for larger structs
5329 if possible. */
0b5383eb 5330 if (rs6000_darwin64_abi
594a51fe 5331 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5332 && int_size_in_bytes (type) > 0)
5333 {
5334 CUMULATIVE_ARGS valcum;
5335 rtx valret;
5336
5337 valcum.words = 0;
5338 valcum.fregno = FP_ARG_MIN_REG;
5339 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5340 /* Do a trial code generation as if this were going to be passed
5341 as an argument; if any part goes in memory, we return NULL. */
5342 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5343 if (valret)
5344 return false;
5345 /* Otherwise fall through to more conventional ABI rules. */
5346 }
594a51fe 5347
c6e8c921 5348 if (AGGREGATE_TYPE_P (type)
df01da37 5349 && (aix_struct_return
c6e8c921
GK
5350 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5351 return true;
b693336b 5352
bada2eb8
DE
5353 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5354 modes only exist for GCC vector types if -maltivec. */
5355 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5356 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5357 return false;
5358
b693336b
PB
5359 /* Return synthetic vectors in memory. */
5360 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5361 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5362 {
5363 static bool warned_for_return_big_vectors = false;
5364 if (!warned_for_return_big_vectors)
5365 {
d4ee4d25 5366 warning (0, "GCC vector returned by reference: "
b693336b
PB
5367 "non-standard ABI extension with no compatibility guarantee");
5368 warned_for_return_big_vectors = true;
5369 }
5370 return true;
5371 }
5372
602ea4d3 5373 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5374 return true;
ad630bef 5375
c6e8c921
GK
5376 return false;
5377}
5378
4697a36c
MM
5379/* Initialize a variable CUM of type CUMULATIVE_ARGS
5380 for a call to a function whose data type is FNTYPE.
5381 For a library call, FNTYPE is 0.
5382
5383 For incoming args we set the number of arguments in the prototype large
1c20ae99 5384 so we never return a PARALLEL. */
4697a36c
MM
5385
5386void
f676971a 5387init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5388 rtx libname ATTRIBUTE_UNUSED, int incoming,
5389 int libcall, int n_named_args)
4697a36c
MM
5390{
5391 static CUMULATIVE_ARGS zero_cumulative;
5392
5393 *cum = zero_cumulative;
5394 cum->words = 0;
5395 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5396 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5397 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5398 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5399 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5400 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5401 cum->stdarg = fntype
5402 && (TYPE_ARG_TYPES (fntype) != 0
5403 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5404 != void_type_node));
4697a36c 5405
0f6937fe
AM
5406 cum->nargs_prototype = 0;
5407 if (incoming || cum->prototype)
5408 cum->nargs_prototype = n_named_args;
4697a36c 5409
a5c76ee6 5410 /* Check for a longcall attribute. */
3eb4e360
AM
5411 if ((!fntype && rs6000_default_long_calls)
5412 || (fntype
5413 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5414 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5415 cum->call_cookie |= CALL_LONG;
6a4cee5f 5416
4697a36c
MM
5417 if (TARGET_DEBUG_ARG)
5418 {
5419 fprintf (stderr, "\ninit_cumulative_args:");
5420 if (fntype)
5421 {
5422 tree ret_type = TREE_TYPE (fntype);
5423 fprintf (stderr, " ret code = %s,",
5424 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5425 }
5426
6a4cee5f
MM
5427 if (cum->call_cookie & CALL_LONG)
5428 fprintf (stderr, " longcall,");
5429
4697a36c
MM
5430 fprintf (stderr, " proto = %d, nargs = %d\n",
5431 cum->prototype, cum->nargs_prototype);
5432 }
f676971a 5433
c4ad648e
AM
5434 if (fntype
5435 && !TARGET_ALTIVEC
5436 && TARGET_ALTIVEC_ABI
5437 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5438 {
c85ce869 5439 error ("cannot return value in vector register because"
c4ad648e 5440 " altivec instructions are disabled, use -maltivec"
c85ce869 5441 " to enable them");
c4ad648e 5442 }
4697a36c
MM
5443}
5444\f
fe984136
RH
5445/* Return true if TYPE must be passed on the stack and not in registers. */
5446
5447static bool
586de218 5448rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5449{
5450 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5451 return must_pass_in_stack_var_size (mode, type);
5452 else
5453 return must_pass_in_stack_var_size_or_pad (mode, type);
5454}
5455
c229cba9
DE
5456/* If defined, a C expression which determines whether, and in which
5457 direction, to pad out an argument with extra space. The value
5458 should be of type `enum direction': either `upward' to pad above
5459 the argument, `downward' to pad below, or `none' to inhibit
5460 padding.
5461
5462 For the AIX ABI structs are always stored left shifted in their
5463 argument slot. */
5464
9ebbca7d 5465enum direction
586de218 5466function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5467{
6e985040
AM
5468#ifndef AGGREGATE_PADDING_FIXED
5469#define AGGREGATE_PADDING_FIXED 0
5470#endif
5471#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5472#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5473#endif
5474
5475 if (!AGGREGATE_PADDING_FIXED)
5476 {
5477 /* GCC used to pass structures of the same size as integer types as
5478 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5479 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5480 passed padded downward, except that -mstrict-align further
5481 muddied the water in that multi-component structures of 2 and 4
5482 bytes in size were passed padded upward.
5483
5484 The following arranges for best compatibility with previous
5485 versions of gcc, but removes the -mstrict-align dependency. */
5486 if (BYTES_BIG_ENDIAN)
5487 {
5488 HOST_WIDE_INT size = 0;
5489
5490 if (mode == BLKmode)
5491 {
5492 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5493 size = int_size_in_bytes (type);
5494 }
5495 else
5496 size = GET_MODE_SIZE (mode);
5497
5498 if (size == 1 || size == 2 || size == 4)
5499 return downward;
5500 }
5501 return upward;
5502 }
5503
5504 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5505 {
5506 if (type != 0 && AGGREGATE_TYPE_P (type))
5507 return upward;
5508 }
c229cba9 5509
d3704c46
KH
5510 /* Fall back to the default. */
5511 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5512}
5513
b6c9286a 5514/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5515 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5516 PARM_BOUNDARY is used for all arguments.
f676971a 5517
84e9ad15
AM
5518 V.4 wants long longs and doubles to be double word aligned. Just
5519 testing the mode size is a boneheaded way to do this as it means
5520 that other types such as complex int are also double word aligned.
5521 However, we're stuck with this because changing the ABI might break
5522 existing library interfaces.
5523
b693336b
PB
5524 Doubleword align SPE vectors.
5525 Quadword align Altivec vectors.
5526 Quadword align large synthetic vector types. */
b6c9286a
MM
5527
5528int
b693336b 5529function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5530{
84e9ad15
AM
5531 if (DEFAULT_ABI == ABI_V4
5532 && (GET_MODE_SIZE (mode) == 8
5533 || (TARGET_HARD_FLOAT
5534 && TARGET_FPRS
7393f7f8 5535 && (mode == TFmode || mode == TDmode))))
4ed78545 5536 return 64;
ad630bef
DE
5537 else if (SPE_VECTOR_MODE (mode)
5538 || (type && TREE_CODE (type) == VECTOR_TYPE
5539 && int_size_in_bytes (type) >= 8
5540 && int_size_in_bytes (type) < 16))
e1f83b4d 5541 return 64;
ad630bef
DE
5542 else if (ALTIVEC_VECTOR_MODE (mode)
5543 || (type && TREE_CODE (type) == VECTOR_TYPE
5544 && int_size_in_bytes (type) >= 16))
0ac081f6 5545 return 128;
0b5383eb
DJ
5546 else if (rs6000_darwin64_abi && mode == BLKmode
5547 && type && TYPE_ALIGN (type) > 64)
5548 return 128;
9ebbca7d 5549 else
b6c9286a 5550 return PARM_BOUNDARY;
b6c9286a 5551}
c53bdcf5 5552
294bd182
AM
5553/* For a function parm of MODE and TYPE, return the starting word in
5554 the parameter area. NWORDS of the parameter area are already used. */
5555
5556static unsigned int
5557rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5558{
5559 unsigned int align;
5560 unsigned int parm_offset;
5561
5562 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5563 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5564 return nwords + (-(parm_offset + nwords) & align);
5565}
5566
c53bdcf5
AM
5567/* Compute the size (in words) of a function argument. */
5568
5569static unsigned long
5570rs6000_arg_size (enum machine_mode mode, tree type)
5571{
5572 unsigned long size;
5573
5574 if (mode != BLKmode)
5575 size = GET_MODE_SIZE (mode);
5576 else
5577 size = int_size_in_bytes (type);
5578
5579 if (TARGET_32BIT)
5580 return (size + 3) >> 2;
5581 else
5582 return (size + 7) >> 3;
5583}
b6c9286a 5584\f
0b5383eb 5585/* Use this to flush pending int fields. */
594a51fe
SS
5586
5587static void
0b5383eb
DJ
5588rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5589 HOST_WIDE_INT bitpos)
594a51fe 5590{
0b5383eb
DJ
5591 unsigned int startbit, endbit;
5592 int intregs, intoffset;
5593 enum machine_mode mode;
594a51fe 5594
0b5383eb
DJ
5595 if (cum->intoffset == -1)
5596 return;
594a51fe 5597
0b5383eb
DJ
5598 intoffset = cum->intoffset;
5599 cum->intoffset = -1;
5600
5601 if (intoffset % BITS_PER_WORD != 0)
5602 {
5603 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5604 MODE_INT, 0);
5605 if (mode == BLKmode)
594a51fe 5606 {
0b5383eb
DJ
5607 /* We couldn't find an appropriate mode, which happens,
5608 e.g., in packed structs when there are 3 bytes to load.
5609 Back intoffset back to the beginning of the word in this
5610 case. */
5611 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5612 }
594a51fe 5613 }
0b5383eb
DJ
5614
5615 startbit = intoffset & -BITS_PER_WORD;
5616 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5617 intregs = (endbit - startbit) / BITS_PER_WORD;
5618 cum->words += intregs;
5619}
5620
5621/* The darwin64 ABI calls for us to recurse down through structs,
5622 looking for elements passed in registers. Unfortunately, we have
5623 to track int register count here also because of misalignments
5624 in powerpc alignment mode. */
5625
5626static void
5627rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5628 tree type,
5629 HOST_WIDE_INT startbitpos)
5630{
5631 tree f;
5632
5633 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5634 if (TREE_CODE (f) == FIELD_DECL)
5635 {
5636 HOST_WIDE_INT bitpos = startbitpos;
5637 tree ftype = TREE_TYPE (f);
70fb00df
AP
5638 enum machine_mode mode;
5639 if (ftype == error_mark_node)
5640 continue;
5641 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5642
5643 if (DECL_SIZE (f) != 0
5644 && host_integerp (bit_position (f), 1))
5645 bitpos += int_bit_position (f);
5646
5647 /* ??? FIXME: else assume zero offset. */
5648
5649 if (TREE_CODE (ftype) == RECORD_TYPE)
5650 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5651 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5652 {
5653 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5654 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5655 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5656 }
5657 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5658 {
5659 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5660 cum->vregno++;
5661 cum->words += 2;
5662 }
5663 else if (cum->intoffset == -1)
5664 cum->intoffset = bitpos;
5665 }
594a51fe
SS
5666}
5667
4697a36c
MM
5668/* Update the data in CUM to advance over an argument
5669 of mode MODE and data type TYPE.
b2d04ecf
AM
5670 (TYPE is null for libcalls where that information may not be available.)
5671
5672 Note that for args passed by reference, function_arg will be called
5673 with MODE and TYPE set to that of the pointer to the arg, not the arg
5674 itself. */
4697a36c
MM
5675
5676void
f676971a 5677function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5678 tree type, int named, int depth)
4697a36c 5679{
0b5383eb
DJ
5680 int size;
5681
594a51fe
SS
5682 /* Only tick off an argument if we're not recursing. */
5683 if (depth == 0)
5684 cum->nargs_prototype--;
4697a36c 5685
ad630bef
DE
5686 if (TARGET_ALTIVEC_ABI
5687 && (ALTIVEC_VECTOR_MODE (mode)
5688 || (type && TREE_CODE (type) == VECTOR_TYPE
5689 && int_size_in_bytes (type) == 16)))
0ac081f6 5690 {
4ed78545
AM
5691 bool stack = false;
5692
2858f73a 5693 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5694 {
6d0ef01e
HP
5695 cum->vregno++;
5696 if (!TARGET_ALTIVEC)
c85ce869 5697 error ("cannot pass argument in vector register because"
6d0ef01e 5698 " altivec instructions are disabled, use -maltivec"
c85ce869 5699 " to enable them");
4ed78545
AM
5700
5701 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5702 even if it is going to be passed in a vector register.
4ed78545
AM
5703 Darwin does the same for variable-argument functions. */
5704 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5705 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5706 stack = true;
6d0ef01e 5707 }
4ed78545
AM
5708 else
5709 stack = true;
5710
5711 if (stack)
c4ad648e 5712 {
a594a19c 5713 int align;
f676971a 5714
2858f73a
GK
5715 /* Vector parameters must be 16-byte aligned. This places
5716 them at 2 mod 4 in terms of words in 32-bit mode, since
5717 the parameter save area starts at offset 24 from the
5718 stack. In 64-bit mode, they just have to start on an
5719 even word, since the parameter save area is 16-byte
5720 aligned. Space for GPRs is reserved even if the argument
5721 will be passed in memory. */
5722 if (TARGET_32BIT)
4ed78545 5723 align = (2 - cum->words) & 3;
2858f73a
GK
5724 else
5725 align = cum->words & 1;
c53bdcf5 5726 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5727
a594a19c
GK
5728 if (TARGET_DEBUG_ARG)
5729 {
f676971a 5730 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5731 cum->words, align);
5732 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5733 cum->nargs_prototype, cum->prototype,
2858f73a 5734 GET_MODE_NAME (mode));
a594a19c
GK
5735 }
5736 }
0ac081f6 5737 }
a4b0320c 5738 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5739 && !cum->stdarg
5740 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5741 cum->sysv_gregno++;
594a51fe
SS
5742
5743 else if (rs6000_darwin64_abi
5744 && mode == BLKmode
0b5383eb
DJ
5745 && TREE_CODE (type) == RECORD_TYPE
5746 && (size = int_size_in_bytes (type)) > 0)
5747 {
5748 /* Variable sized types have size == -1 and are
5749 treated as if consisting entirely of ints.
5750 Pad to 16 byte boundary if needed. */
5751 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5752 && (cum->words % 2) != 0)
5753 cum->words++;
5754 /* For varargs, we can just go up by the size of the struct. */
5755 if (!named)
5756 cum->words += (size + 7) / 8;
5757 else
5758 {
5759 /* It is tempting to say int register count just goes up by
5760 sizeof(type)/8, but this is wrong in a case such as
5761 { int; double; int; } [powerpc alignment]. We have to
5762 grovel through the fields for these too. */
5763 cum->intoffset = 0;
5764 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5765 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5766 size * BITS_PER_UNIT);
5767 }
5768 }
f607bc57 5769 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5770 {
a3170dc6 5771 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
5772 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
5773 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
5774 || (mode == TFmode && !TARGET_IEEEQUAD)
5775 || mode == SDmode || mode == DDmode || mode == TDmode))
4697a36c 5776 {
2d83f070
JJ
5777 /* _Decimal128 must use an even/odd register pair. This assumes
5778 that the register number is odd when fregno is odd. */
5779 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5780 cum->fregno++;
5781
5782 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5783 <= FP_ARG_V4_MAX_REG)
602ea4d3 5784 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5785 else
5786 {
602ea4d3 5787 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5788 if (mode == DFmode || mode == TFmode
5789 || mode == DDmode || mode == TDmode)
c4ad648e 5790 cum->words += cum->words & 1;
c53bdcf5 5791 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5792 }
4697a36c 5793 }
4cc833b7
RH
5794 else
5795 {
b2d04ecf 5796 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5797 int gregno = cum->sysv_gregno;
5798
4ed78545
AM
5799 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5800 (r7,r8) or (r9,r10). As does any other 2 word item such
5801 as complex int due to a historical mistake. */
5802 if (n_words == 2)
5803 gregno += (1 - gregno) & 1;
4cc833b7 5804
4ed78545 5805 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5806 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5807 {
4ed78545
AM
5808 /* Long long and SPE vectors are aligned on the stack.
5809 So are other 2 word items such as complex int due to
5810 a historical mistake. */
4cc833b7
RH
5811 if (n_words == 2)
5812 cum->words += cum->words & 1;
5813 cum->words += n_words;
5814 }
4697a36c 5815
4cc833b7
RH
5816 /* Note: continuing to accumulate gregno past when we've started
5817 spilling to the stack indicates the fact that we've started
5818 spilling to the stack to expand_builtin_saveregs. */
5819 cum->sysv_gregno = gregno + n_words;
5820 }
4697a36c 5821
4cc833b7
RH
5822 if (TARGET_DEBUG_ARG)
5823 {
5824 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5825 cum->words, cum->fregno);
5826 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5827 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5828 fprintf (stderr, "mode = %4s, named = %d\n",
5829 GET_MODE_NAME (mode), named);
5830 }
4697a36c
MM
5831 }
5832 else
4cc833b7 5833 {
b2d04ecf 5834 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5835 int start_words = cum->words;
5836 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5837
294bd182 5838 cum->words = align_words + n_words;
4697a36c 5839
ebb109ad 5840 if (SCALAR_FLOAT_MODE_P (mode)
56f4cc04 5841 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5842 {
5843 /* _Decimal128 must be passed in an even/odd float register pair.
5844 This assumes that the register number is odd when fregno is
5845 odd. */
5846 if (mode == TDmode && (cum->fregno % 2) == 1)
5847 cum->fregno++;
5848 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5849 }
4cc833b7
RH
5850
5851 if (TARGET_DEBUG_ARG)
5852 {
5853 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5854 cum->words, cum->fregno);
5855 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5856 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5857 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5858 named, align_words - start_words, depth);
4cc833b7
RH
5859 }
5860 }
4697a36c 5861}
a6c9bed4 5862
f82f556d
AH
5863static rtx
5864spe_build_register_parallel (enum machine_mode mode, int gregno)
5865{
17caeff2 5866 rtx r1, r3, r5, r7;
f82f556d 5867
37409796 5868 switch (mode)
f82f556d 5869 {
37409796 5870 case DFmode:
54b695e7
AH
5871 r1 = gen_rtx_REG (DImode, gregno);
5872 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5873 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5874
5875 case DCmode:
17caeff2 5876 case TFmode:
54b695e7
AH
5877 r1 = gen_rtx_REG (DImode, gregno);
5878 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5879 r3 = gen_rtx_REG (DImode, gregno + 2);
5880 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5881 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5882
17caeff2
JM
5883 case TCmode:
5884 r1 = gen_rtx_REG (DImode, gregno);
5885 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5886 r3 = gen_rtx_REG (DImode, gregno + 2);
5887 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5888 r5 = gen_rtx_REG (DImode, gregno + 4);
5889 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5890 r7 = gen_rtx_REG (DImode, gregno + 6);
5891 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5892 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5893
37409796
NS
5894 default:
5895 gcc_unreachable ();
f82f556d 5896 }
f82f556d 5897}
b78d48dd 5898
f82f556d 5899/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5900static rtx
f676971a 5901rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5902 tree type)
a6c9bed4 5903{
f82f556d
AH
5904 int gregno = cum->sysv_gregno;
5905
5906 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5907 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5908 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5909 || mode == DCmode || mode == TCmode))
f82f556d 5910 {
b5870bee
AH
5911 int n_words = rs6000_arg_size (mode, type);
5912
f82f556d 5913 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5914 if (mode == DFmode)
b5870bee 5915 gregno += (1 - gregno) & 1;
f82f556d 5916
b5870bee
AH
5917 /* Multi-reg args are not split between registers and stack. */
5918 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5919 return NULL_RTX;
5920
5921 return spe_build_register_parallel (mode, gregno);
5922 }
a6c9bed4
AH
5923 if (cum->stdarg)
5924 {
c53bdcf5 5925 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5926
5927 /* SPE vectors are put in odd registers. */
5928 if (n_words == 2 && (gregno & 1) == 0)
5929 gregno += 1;
5930
5931 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5932 {
5933 rtx r1, r2;
5934 enum machine_mode m = SImode;
5935
5936 r1 = gen_rtx_REG (m, gregno);
5937 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5938 r2 = gen_rtx_REG (m, gregno + 1);
5939 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5940 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5941 }
5942 else
b78d48dd 5943 return NULL_RTX;
a6c9bed4
AH
5944 }
5945 else
5946 {
f82f556d
AH
5947 if (gregno <= GP_ARG_MAX_REG)
5948 return gen_rtx_REG (mode, gregno);
a6c9bed4 5949 else
b78d48dd 5950 return NULL_RTX;
a6c9bed4
AH
5951 }
5952}
5953
0b5383eb
DJ
5954/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5955 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5956
0b5383eb 5957static void
bb8df8a6 5958rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5959 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5960{
0b5383eb
DJ
5961 enum machine_mode mode;
5962 unsigned int regno;
5963 unsigned int startbit, endbit;
5964 int this_regno, intregs, intoffset;
5965 rtx reg;
594a51fe 5966
0b5383eb
DJ
5967 if (cum->intoffset == -1)
5968 return;
5969
5970 intoffset = cum->intoffset;
5971 cum->intoffset = -1;
5972
5973 /* If this is the trailing part of a word, try to only load that
5974 much into the register. Otherwise load the whole register. Note
5975 that in the latter case we may pick up unwanted bits. It's not a
5976 problem at the moment but may wish to revisit. */
5977
5978 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5979 {
0b5383eb
DJ
5980 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5981 MODE_INT, 0);
5982 if (mode == BLKmode)
5983 {
5984 /* We couldn't find an appropriate mode, which happens,
5985 e.g., in packed structs when there are 3 bytes to load.
5986 Back intoffset back to the beginning of the word in this
5987 case. */
5988 intoffset = intoffset & -BITS_PER_WORD;
5989 mode = word_mode;
5990 }
5991 }
5992 else
5993 mode = word_mode;
5994
5995 startbit = intoffset & -BITS_PER_WORD;
5996 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5997 intregs = (endbit - startbit) / BITS_PER_WORD;
5998 this_regno = cum->words + intoffset / BITS_PER_WORD;
5999
6000 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
6001 cum->use_stack = 1;
bb8df8a6 6002
0b5383eb
DJ
6003 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
6004 if (intregs <= 0)
6005 return;
6006
6007 intoffset /= BITS_PER_UNIT;
6008 do
6009 {
6010 regno = GP_ARG_MIN_REG + this_regno;
6011 reg = gen_rtx_REG (mode, regno);
6012 rvec[(*k)++] =
6013 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
6014
6015 this_regno += 1;
6016 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
6017 mode = word_mode;
6018 intregs -= 1;
6019 }
6020 while (intregs > 0);
6021}
6022
6023/* Recursive workhorse for the following. */
6024
6025static void
586de218 6026rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
6027 HOST_WIDE_INT startbitpos, rtx rvec[],
6028 int *k)
6029{
6030 tree f;
6031
6032 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6033 if (TREE_CODE (f) == FIELD_DECL)
6034 {
6035 HOST_WIDE_INT bitpos = startbitpos;
6036 tree ftype = TREE_TYPE (f);
70fb00df
AP
6037 enum machine_mode mode;
6038 if (ftype == error_mark_node)
6039 continue;
6040 mode = TYPE_MODE (ftype);
0b5383eb
DJ
6041
6042 if (DECL_SIZE (f) != 0
6043 && host_integerp (bit_position (f), 1))
6044 bitpos += int_bit_position (f);
6045
6046 /* ??? FIXME: else assume zero offset. */
6047
6048 if (TREE_CODE (ftype) == RECORD_TYPE)
6049 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
6050 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 6051 {
0b5383eb
DJ
6052#if 0
6053 switch (mode)
594a51fe 6054 {
0b5383eb
DJ
6055 case SCmode: mode = SFmode; break;
6056 case DCmode: mode = DFmode; break;
6057 case TCmode: mode = TFmode; break;
6058 default: break;
594a51fe 6059 }
0b5383eb
DJ
6060#endif
6061 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6062 rvec[(*k)++]
bb8df8a6 6063 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
6064 gen_rtx_REG (mode, cum->fregno++),
6065 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 6066 if (mode == TFmode || mode == TDmode)
0b5383eb 6067 cum->fregno++;
594a51fe 6068 }
0b5383eb
DJ
6069 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
6070 {
6071 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6072 rvec[(*k)++]
bb8df8a6
EC
6073 = gen_rtx_EXPR_LIST (VOIDmode,
6074 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
6075 GEN_INT (bitpos / BITS_PER_UNIT));
6076 }
6077 else if (cum->intoffset == -1)
6078 cum->intoffset = bitpos;
6079 }
6080}
594a51fe 6081
0b5383eb
DJ
6082/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
6083 the register(s) to be used for each field and subfield of a struct
6084 being passed by value, along with the offset of where the
6085 register's value may be found in the block. FP fields go in FP
6086 register, vector fields go in vector registers, and everything
bb8df8a6 6087 else goes in int registers, packed as in memory.
8ff40a74 6088
0b5383eb
DJ
6089 This code is also used for function return values. RETVAL indicates
6090 whether this is the case.
8ff40a74 6091
a4d05547 6092 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 6093 calling convention. */
594a51fe 6094
0b5383eb 6095static rtx
586de218 6096rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
6097 int named, bool retval)
6098{
6099 rtx rvec[FIRST_PSEUDO_REGISTER];
6100 int k = 1, kbase = 1;
6101 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6102 /* This is a copy; modifications are not visible to our caller. */
6103 CUMULATIVE_ARGS copy_cum = *orig_cum;
6104 CUMULATIVE_ARGS *cum = &copy_cum;
6105
6106 /* Pad to 16 byte boundary if needed. */
6107 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6108 && (cum->words % 2) != 0)
6109 cum->words++;
6110
6111 cum->intoffset = 0;
6112 cum->use_stack = 0;
6113 cum->named = named;
6114
6115 /* Put entries into rvec[] for individual FP and vector fields, and
6116 for the chunks of memory that go in int regs. Note we start at
6117 element 1; 0 is reserved for an indication of using memory, and
6118 may or may not be filled in below. */
6119 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6120 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6121
6122 /* If any part of the struct went on the stack put all of it there.
6123 This hack is because the generic code for
6124 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6125 parts of the struct are not at the beginning. */
6126 if (cum->use_stack)
6127 {
6128 if (retval)
6129 return NULL_RTX; /* doesn't go in registers at all */
6130 kbase = 0;
6131 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6132 }
6133 if (k > 1 || cum->use_stack)
6134 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
6135 else
6136 return NULL_RTX;
6137}
6138
b78d48dd
FJ
6139/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6140
6141static rtx
ec6376ab 6142rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 6143{
ec6376ab
AM
6144 int n_units;
6145 int i, k;
6146 rtx rvec[GP_ARG_NUM_REG + 1];
6147
6148 if (align_words >= GP_ARG_NUM_REG)
6149 return NULL_RTX;
6150
6151 n_units = rs6000_arg_size (mode, type);
6152
6153 /* Optimize the simple case where the arg fits in one gpr, except in
6154 the case of BLKmode due to assign_parms assuming that registers are
6155 BITS_PER_WORD wide. */
6156 if (n_units == 0
6157 || (n_units == 1 && mode != BLKmode))
6158 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6159
6160 k = 0;
6161 if (align_words + n_units > GP_ARG_NUM_REG)
6162 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6163 using a magic NULL_RTX component.
79773478
AM
6164 This is not strictly correct. Only some of the arg belongs in
6165 memory, not all of it. However, the normal scheme using
6166 function_arg_partial_nregs can result in unusual subregs, eg.
6167 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6168 store the whole arg to memory is often more efficient than code
6169 to store pieces, and we know that space is available in the right
6170 place for the whole arg. */
ec6376ab
AM
6171 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6172
6173 i = 0;
6174 do
36a454e1 6175 {
ec6376ab
AM
6176 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6177 rtx off = GEN_INT (i++ * 4);
6178 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6179 }
ec6376ab
AM
6180 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6181
6182 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6183}
6184
4697a36c
MM
6185/* Determine where to put an argument to a function.
6186 Value is zero to push the argument on the stack,
6187 or a hard register in which to store the argument.
6188
6189 MODE is the argument's machine mode.
6190 TYPE is the data type of the argument (as a tree).
6191 This is null for libcalls where that information may
6192 not be available.
6193 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6194 the preceding args and about the function being called. It is
6195 not modified in this routine.
4697a36c
MM
6196 NAMED is nonzero if this argument is a named parameter
6197 (otherwise it is an extra parameter matching an ellipsis).
6198
6199 On RS/6000 the first eight words of non-FP are normally in registers
6200 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6201 Under V.4, the first 8 FP args are in registers.
6202
6203 If this is floating-point and no prototype is specified, we use
6204 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6205 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6206 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6207 doesn't support PARALLEL anyway.
6208
6209 Note that for args passed by reference, function_arg will be called
6210 with MODE and TYPE set to that of the pointer to the arg, not the arg
6211 itself. */
4697a36c 6212
9390387d 6213rtx
f676971a 6214function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6215 tree type, int named)
4697a36c 6216{
4cc833b7 6217 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6218
a4f6c312
SS
6219 /* Return a marker to indicate whether CR1 needs to set or clear the
6220 bit that V.4 uses to say fp args were passed in registers.
6221 Assume that we don't need the marker for software floating point,
6222 or compiler generated library calls. */
4697a36c
MM
6223 if (mode == VOIDmode)
6224 {
f607bc57 6225 if (abi == ABI_V4
b9599e46 6226 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6227 && (cum->stdarg
6228 || (cum->nargs_prototype < 0
6229 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6230 {
a3170dc6
AH
6231 /* For the SPE, we need to crxor CR6 always. */
6232 if (TARGET_SPE_ABI)
6233 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6234 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6235 return GEN_INT (cum->call_cookie
6236 | ((cum->fregno == FP_ARG_MIN_REG)
6237 ? CALL_V4_SET_FP_ARGS
6238 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6239 }
4697a36c 6240
7509c759 6241 return GEN_INT (cum->call_cookie);
4697a36c
MM
6242 }
6243
0b5383eb
DJ
6244 if (rs6000_darwin64_abi && mode == BLKmode
6245 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6246 {
0b5383eb 6247 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6248 if (rslt != NULL_RTX)
6249 return rslt;
6250 /* Else fall through to usual handling. */
6251 }
6252
2858f73a 6253 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6254 if (TARGET_64BIT && ! cum->prototype)
6255 {
c4ad648e
AM
6256 /* Vector parameters get passed in vector register
6257 and also in GPRs or memory, in absence of prototype. */
6258 int align_words;
6259 rtx slot;
6260 align_words = (cum->words + 1) & ~1;
6261
6262 if (align_words >= GP_ARG_NUM_REG)
6263 {
6264 slot = NULL_RTX;
6265 }
6266 else
6267 {
6268 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6269 }
6270 return gen_rtx_PARALLEL (mode,
6271 gen_rtvec (2,
6272 gen_rtx_EXPR_LIST (VOIDmode,
6273 slot, const0_rtx),
6274 gen_rtx_EXPR_LIST (VOIDmode,
6275 gen_rtx_REG (mode, cum->vregno),
6276 const0_rtx)));
c72d6c26
HP
6277 }
6278 else
6279 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6280 else if (TARGET_ALTIVEC_ABI
6281 && (ALTIVEC_VECTOR_MODE (mode)
6282 || (type && TREE_CODE (type) == VECTOR_TYPE
6283 && int_size_in_bytes (type) == 16)))
0ac081f6 6284 {
2858f73a 6285 if (named || abi == ABI_V4)
a594a19c 6286 return NULL_RTX;
0ac081f6 6287 else
a594a19c
GK
6288 {
6289 /* Vector parameters to varargs functions under AIX or Darwin
6290 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6291 int align, align_words, n_words;
6292 enum machine_mode part_mode;
a594a19c
GK
6293
6294 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6295 2 mod 4 in terms of words in 32-bit mode, since the parameter
6296 save area starts at offset 24 from the stack. In 64-bit mode,
6297 they just have to start on an even word, since the parameter
6298 save area is 16-byte aligned. */
6299 if (TARGET_32BIT)
4ed78545 6300 align = (2 - cum->words) & 3;
2858f73a
GK
6301 else
6302 align = cum->words & 1;
a594a19c
GK
6303 align_words = cum->words + align;
6304
6305 /* Out of registers? Memory, then. */
6306 if (align_words >= GP_ARG_NUM_REG)
6307 return NULL_RTX;
ec6376ab
AM
6308
6309 if (TARGET_32BIT && TARGET_POWERPC64)
6310 return rs6000_mixed_function_arg (mode, type, align_words);
6311
2858f73a
GK
6312 /* The vector value goes in GPRs. Only the part of the
6313 value in GPRs is reported here. */
ec6376ab
AM
6314 part_mode = mode;
6315 n_words = rs6000_arg_size (mode, type);
6316 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6317 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6318 is either wholly in GPRs or half in GPRs and half not. */
6319 part_mode = DImode;
ec6376ab
AM
6320
6321 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6322 }
0ac081f6 6323 }
f82f556d
AH
6324 else if (TARGET_SPE_ABI && TARGET_SPE
6325 && (SPE_VECTOR_MODE (mode)
18f63bfa 6326 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6327 || mode == DCmode
6328 || mode == TFmode
6329 || mode == TCmode))))
a6c9bed4 6330 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6331
f607bc57 6332 else if (abi == ABI_V4)
4697a36c 6333 {
a3170dc6 6334 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
6335 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6336 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
cf8e1455
DE
6337 || (mode == TFmode && !TARGET_IEEEQUAD)
6338 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6339 {
2d83f070
JJ
6340 /* _Decimal128 must use an even/odd register pair. This assumes
6341 that the register number is odd when fregno is odd. */
6342 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6343 cum->fregno++;
6344
6345 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6346 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6347 return gen_rtx_REG (mode, cum->fregno);
6348 else
b78d48dd 6349 return NULL_RTX;
4cc833b7
RH
6350 }
6351 else
6352 {
b2d04ecf 6353 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6354 int gregno = cum->sysv_gregno;
6355
4ed78545
AM
6356 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6357 (r7,r8) or (r9,r10). As does any other 2 word item such
6358 as complex int due to a historical mistake. */
6359 if (n_words == 2)
6360 gregno += (1 - gregno) & 1;
4cc833b7 6361
4ed78545 6362 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6363 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6364 return NULL_RTX;
ec6376ab
AM
6365
6366 if (TARGET_32BIT && TARGET_POWERPC64)
6367 return rs6000_mixed_function_arg (mode, type,
6368 gregno - GP_ARG_MIN_REG);
6369 return gen_rtx_REG (mode, gregno);
4cc833b7 6370 }
4697a36c 6371 }
4cc833b7
RH
6372 else
6373 {
294bd182 6374 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6375
2d83f070
JJ
6376 /* _Decimal128 must be passed in an even/odd float register pair.
6377 This assumes that the register number is odd when fregno is odd. */
6378 if (mode == TDmode && (cum->fregno % 2) == 1)
6379 cum->fregno++;
6380
2858f73a 6381 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6382 {
ec6376ab
AM
6383 rtx rvec[GP_ARG_NUM_REG + 1];
6384 rtx r;
6385 int k;
c53bdcf5
AM
6386 bool needs_psave;
6387 enum machine_mode fmode = mode;
c53bdcf5
AM
6388 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6389
6390 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6391 {
c53bdcf5
AM
6392 /* Currently, we only ever need one reg here because complex
6393 doubles are split. */
7393f7f8
BE
6394 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6395 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6396
7393f7f8
BE
6397 /* Long double or _Decimal128 split over regs and memory. */
6398 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6399 }
c53bdcf5
AM
6400
6401 /* Do we also need to pass this arg in the parameter save
6402 area? */
6403 needs_psave = (type
6404 && (cum->nargs_prototype <= 0
6405 || (DEFAULT_ABI == ABI_AIX
de17c25f 6406 && TARGET_XL_COMPAT
c53bdcf5
AM
6407 && align_words >= GP_ARG_NUM_REG)));
6408
6409 if (!needs_psave && mode == fmode)
ec6376ab 6410 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6411
ec6376ab 6412 k = 0;
c53bdcf5
AM
6413 if (needs_psave)
6414 {
ec6376ab 6415 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6416 This piece must come first, before the fprs. */
c53bdcf5
AM
6417 if (align_words < GP_ARG_NUM_REG)
6418 {
6419 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6420
6421 if (align_words + n_words > GP_ARG_NUM_REG
6422 || (TARGET_32BIT && TARGET_POWERPC64))
6423 {
6424 /* If this is partially on the stack, then we only
6425 include the portion actually in registers here. */
6426 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6427 rtx off;
79773478
AM
6428 int i = 0;
6429 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6430 /* Not all of the arg fits in gprs. Say that it
6431 goes in memory too, using a magic NULL_RTX
6432 component. Also see comment in
6433 rs6000_mixed_function_arg for why the normal
6434 function_arg_partial_nregs scheme doesn't work
6435 in this case. */
6436 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6437 const0_rtx);
ec6376ab
AM
6438 do
6439 {
6440 r = gen_rtx_REG (rmode,
6441 GP_ARG_MIN_REG + align_words);
2e6c9641 6442 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6443 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6444 }
6445 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6446 }
6447 else
6448 {
6449 /* The whole arg fits in gprs. */
6450 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6451 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6452 }
c53bdcf5 6453 }
ec6376ab
AM
6454 else
6455 /* It's entirely in memory. */
6456 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6457 }
6458
ec6376ab
AM
6459 /* Describe where this piece goes in the fprs. */
6460 r = gen_rtx_REG (fmode, cum->fregno);
6461 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6462
6463 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6464 }
6465 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6466 {
ec6376ab
AM
6467 if (TARGET_32BIT && TARGET_POWERPC64)
6468 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6469
4eeca74f
AM
6470 if (mode == BLKmode)
6471 mode = Pmode;
6472
b2d04ecf
AM
6473 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6474 }
4cc833b7
RH
6475 else
6476 return NULL_RTX;
4697a36c 6477 }
4697a36c
MM
6478}
6479\f
ec6376ab 6480/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6481 the number of bytes passed in registers. For args passed entirely in
6482 registers or entirely in memory, zero. When an arg is described by a
6483 PARALLEL, perhaps using more than one register type, this function
6484 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6485
78a52f11
RH
6486static int
6487rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6488 tree type, bool named)
4697a36c 6489{
c53bdcf5 6490 int ret = 0;
ec6376ab 6491 int align_words;
c53bdcf5 6492
f607bc57 6493 if (DEFAULT_ABI == ABI_V4)
4697a36c 6494 return 0;
4697a36c 6495
c53bdcf5
AM
6496 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6497 && cum->nargs_prototype >= 0)
6498 return 0;
6499
0b5383eb
DJ
6500 /* In this complicated case we just disable the partial_nregs code. */
6501 if (rs6000_darwin64_abi && mode == BLKmode
6502 && TREE_CODE (type) == RECORD_TYPE
6503 && int_size_in_bytes (type) > 0)
6504 return 0;
6505
294bd182 6506 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6507
79773478
AM
6508 if (USE_FP_FOR_ARG_P (cum, mode, type))
6509 {
fb63c729
AM
6510 /* If we are passing this arg in the fixed parameter save area
6511 (gprs or memory) as well as fprs, then this function should
79773478
AM
6512 return the number of partial bytes passed in the parameter
6513 save area rather than partial bytes passed in fprs. */
6514 if (type
6515 && (cum->nargs_prototype <= 0
6516 || (DEFAULT_ABI == ABI_AIX
6517 && TARGET_XL_COMPAT
6518 && align_words >= GP_ARG_NUM_REG)))
6519 return 0;
6520 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6521 > FP_ARG_MAX_REG + 1)
ac7e839c 6522 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6523 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6524 return 0;
6525 }
6526
ec6376ab
AM
6527 if (align_words < GP_ARG_NUM_REG
6528 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6529 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6530
c53bdcf5 6531 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6532 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6533
c53bdcf5 6534 return ret;
4697a36c
MM
6535}
6536\f
6537/* A C expression that indicates when an argument must be passed by
6538 reference. If nonzero for an argument, a copy of that argument is
6539 made in memory and a pointer to the argument is passed instead of
6540 the argument itself. The pointer is passed in whatever way is
6541 appropriate for passing a pointer to that type.
6542
b2d04ecf
AM
6543 Under V.4, aggregates and long double are passed by reference.
6544
6545 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6546 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6547
6548 As an extension to all ABIs, variable sized types are passed by
6549 reference. */
4697a36c 6550
8cd5a4e0 6551static bool
f676971a 6552rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6553 enum machine_mode mode, const_tree type,
bada2eb8 6554 bool named ATTRIBUTE_UNUSED)
4697a36c 6555{
602ea4d3 6556 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6557 {
6558 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6559 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6560 return 1;
6561 }
6562
6563 if (!type)
6564 return 0;
4697a36c 6565
bada2eb8
DE
6566 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6567 {
6568 if (TARGET_DEBUG_ARG)
6569 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6570 return 1;
6571 }
6572
6573 if (int_size_in_bytes (type) < 0)
6574 {
6575 if (TARGET_DEBUG_ARG)
6576 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6577 return 1;
6578 }
6579
6580 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6581 modes only exist for GCC vector types if -maltivec. */
6582 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6583 {
6584 if (TARGET_DEBUG_ARG)
6585 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6586 return 1;
6587 }
b693336b
PB
6588
6589 /* Pass synthetic vectors in memory. */
bada2eb8 6590 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6591 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6592 {
6593 static bool warned_for_pass_big_vectors = false;
6594 if (TARGET_DEBUG_ARG)
6595 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6596 if (!warned_for_pass_big_vectors)
6597 {
d4ee4d25 6598 warning (0, "GCC vector passed by reference: "
b693336b
PB
6599 "non-standard ABI extension with no compatibility guarantee");
6600 warned_for_pass_big_vectors = true;
6601 }
6602 return 1;
6603 }
6604
b2d04ecf 6605 return 0;
4697a36c 6606}
5985c7a6
FJ
6607
6608static void
2d9db8eb 6609rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6610{
6611 int i;
6612 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6613
6614 if (nregs == 0)
6615 return;
6616
c4ad648e 6617 for (i = 0; i < nregs; i++)
5985c7a6 6618 {
9390387d 6619 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6620 if (reload_completed)
c4ad648e
AM
6621 {
6622 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6623 tem = NULL_RTX;
6624 else
6625 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6626 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6627 }
5985c7a6
FJ
6628 else
6629 tem = replace_equiv_address (tem, XEXP (tem, 0));
6630
37409796 6631 gcc_assert (tem);
5985c7a6
FJ
6632
6633 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6634 }
6635}
4697a36c
MM
6636\f
6637/* Perform any needed actions needed for a function that is receiving a
f676971a 6638 variable number of arguments.
4697a36c
MM
6639
6640 CUM is as above.
6641
6642 MODE and TYPE are the mode and type of the current parameter.
6643
6644 PRETEND_SIZE is a variable that should be set to the amount of stack
6645 that must be pushed by the prolog to pretend that our caller pushed
6646 it.
6647
6648 Normally, this macro will push all remaining incoming registers on the
6649 stack and set PRETEND_SIZE to the length of the registers pushed. */
6650
c6e8c921 6651static void
f676971a 6652setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6653 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6654 int no_rtl)
4697a36c 6655{
4cc833b7
RH
6656 CUMULATIVE_ARGS next_cum;
6657 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6658 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6659 int first_reg_offset;
6660 alias_set_type set;
4697a36c 6661
f31bf321 6662 /* Skip the last named argument. */
d34c5b80 6663 next_cum = *cum;
594a51fe 6664 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6665
f607bc57 6666 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6667 {
5b667039
JJ
6668 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6669
60e2d0ca 6670 if (! no_rtl)
5b667039
JJ
6671 {
6672 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6673 HOST_WIDE_INT offset = 0;
6674
6675 /* Try to optimize the size of the varargs save area.
6676 The ABI requires that ap.reg_save_area is doubleword
6677 aligned, but we don't need to allocate space for all
6678 the bytes, only those to which we actually will save
6679 anything. */
6680 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6681 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6682 if (TARGET_HARD_FLOAT && TARGET_FPRS
6683 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6684 && cfun->va_list_fpr_size)
6685 {
6686 if (gpr_reg_num)
6687 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6688 * UNITS_PER_FP_WORD;
6689 if (cfun->va_list_fpr_size
6690 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6691 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6692 else
6693 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6694 * UNITS_PER_FP_WORD;
6695 }
6696 if (gpr_reg_num)
6697 {
6698 offset = -((first_reg_offset * reg_size) & ~7);
6699 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6700 {
6701 gpr_reg_num = cfun->va_list_gpr_size;
6702 if (reg_size == 4 && (first_reg_offset & 1))
6703 gpr_reg_num++;
6704 }
6705 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6706 }
6707 else if (fpr_size)
6708 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6709 * UNITS_PER_FP_WORD
6710 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6711
5b667039
JJ
6712 if (gpr_size + fpr_size)
6713 {
6714 rtx reg_save_area
6715 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6716 gcc_assert (GET_CODE (reg_save_area) == MEM);
6717 reg_save_area = XEXP (reg_save_area, 0);
6718 if (GET_CODE (reg_save_area) == PLUS)
6719 {
6720 gcc_assert (XEXP (reg_save_area, 0)
6721 == virtual_stack_vars_rtx);
6722 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6723 offset += INTVAL (XEXP (reg_save_area, 1));
6724 }
6725 else
6726 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6727 }
6728
6729 cfun->machine->varargs_save_offset = offset;
6730 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6731 }
4697a36c 6732 }
60e2d0ca 6733 else
4697a36c 6734 {
d34c5b80 6735 first_reg_offset = next_cum.words;
4cc833b7 6736 save_area = virtual_incoming_args_rtx;
4697a36c 6737
fe984136 6738 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6739 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6740 }
4697a36c 6741
dfafc897 6742 set = get_varargs_alias_set ();
9d30f3c1
JJ
6743 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6744 && cfun->va_list_gpr_size)
4cc833b7 6745 {
9d30f3c1
JJ
6746 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6747
6748 if (va_list_gpr_counter_field)
6749 {
6750 /* V4 va_list_gpr_size counts number of registers needed. */
6751 if (nregs > cfun->va_list_gpr_size)
6752 nregs = cfun->va_list_gpr_size;
6753 }
6754 else
6755 {
6756 /* char * va_list instead counts number of bytes needed. */
6757 if (nregs > cfun->va_list_gpr_size / reg_size)
6758 nregs = cfun->va_list_gpr_size / reg_size;
6759 }
6760
dfafc897 6761 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6762 plus_constant (save_area,
13e2e16e
DE
6763 first_reg_offset * reg_size));
6764 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6765 set_mem_alias_set (mem, set);
8ac61af7 6766 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6767
f676971a 6768 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6769 nregs);
4697a36c
MM
6770 }
6771
4697a36c 6772 /* Save FP registers if needed. */
f607bc57 6773 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6774 && TARGET_HARD_FLOAT && TARGET_FPRS
6775 && ! no_rtl
9d30f3c1
JJ
6776 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6777 && cfun->va_list_fpr_size)
4697a36c 6778 {
9d30f3c1 6779 int fregno = next_cum.fregno, nregs;
9ebbca7d 6780 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6781 rtx lab = gen_label_rtx ();
5b667039
JJ
6782 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6783 * UNITS_PER_FP_WORD);
4697a36c 6784
c4ad648e
AM
6785 emit_jump_insn
6786 (gen_rtx_SET (VOIDmode,
6787 pc_rtx,
6788 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6789 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6790 const0_rtx),
39403d82 6791 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6792 pc_rtx)));
6793
9d30f3c1
JJ
6794 for (nregs = 0;
6795 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6796 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6797 {
696e45ba
ME
6798 mem = gen_rtx_MEM ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6799 ? DFmode : SFmode,
6800 plus_constant (save_area, off));
6801 MEM_NOTRAP_P (mem) = 1;
6802 set_mem_alias_set (mem, set);
6803 set_mem_align (mem, GET_MODE_ALIGNMENT (
6804 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6805 ? DFmode : SFmode));
6806 emit_move_insn (mem, gen_rtx_REG (
6807 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6808 ? DFmode : SFmode, fregno));
4697a36c 6809 }
4cc833b7
RH
6810
6811 emit_label (lab);
4697a36c 6812 }
4697a36c 6813}
4697a36c 6814
dfafc897 6815/* Create the va_list data type. */
2c4974b7 6816
c35d187f
RH
6817static tree
6818rs6000_build_builtin_va_list (void)
dfafc897 6819{
64c2816f 6820 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6821
9ebbca7d
GK
6822 /* For AIX, prefer 'char *' because that's what the system
6823 header files like. */
f607bc57 6824 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6825 return build_pointer_type (char_type_node);
dfafc897 6826
f1e639b1 6827 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
71856685
AH
6828 type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
6829 get_identifier ("__va_list_tag"), record);
0f4fd75d 6830
71856685 6831 f_gpr = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6832 unsigned_char_type_node);
71856685 6833 f_fpr = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6834 unsigned_char_type_node);
64c2816f
DT
6835 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6836 every user file. */
71856685
AH
6837 f_res = build_decl (BUILTINS_LOCATION, FIELD_DECL,
6838 get_identifier ("reserved"), short_unsigned_type_node);
6839 f_ovf = build_decl (BUILTINS_LOCATION, FIELD_DECL,
6840 get_identifier ("overflow_arg_area"),
dfafc897 6841 ptr_type_node);
71856685
AH
6842 f_sav = build_decl (BUILTINS_LOCATION, FIELD_DECL,
6843 get_identifier ("reg_save_area"),
dfafc897
FS
6844 ptr_type_node);
6845
9d30f3c1
JJ
6846 va_list_gpr_counter_field = f_gpr;
6847 va_list_fpr_counter_field = f_fpr;
6848
dfafc897
FS
6849 DECL_FIELD_CONTEXT (f_gpr) = record;
6850 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6851 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6852 DECL_FIELD_CONTEXT (f_ovf) = record;
6853 DECL_FIELD_CONTEXT (f_sav) = record;
6854
bab45a51
FS
6855 TREE_CHAIN (record) = type_decl;
6856 TYPE_NAME (record) = type_decl;
dfafc897
FS
6857 TYPE_FIELDS (record) = f_gpr;
6858 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6859 TREE_CHAIN (f_fpr) = f_res;
6860 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6861 TREE_CHAIN (f_ovf) = f_sav;
6862
6863 layout_type (record);
6864
6865 /* The correct type is an array type of one element. */
6866 return build_array_type (record, build_index_type (size_zero_node));
6867}
6868
6869/* Implement va_start. */
6870
d7bd8aeb 6871static void
a2369ed3 6872rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6873{
dfafc897 6874 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6875 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6876 tree gpr, fpr, ovf, sav, t;
2c4974b7 6877
dfafc897 6878 /* Only SVR4 needs something special. */
f607bc57 6879 if (DEFAULT_ABI != ABI_V4)
dfafc897 6880 {
e5faf155 6881 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6882 return;
6883 }
6884
973a648b 6885 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6886 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6887 f_res = TREE_CHAIN (f_fpr);
6888 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6889 f_sav = TREE_CHAIN (f_ovf);
6890
872a65b5 6891 valist = build_va_arg_indirect_ref (valist);
47a25a46 6892 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6893 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6894 f_fpr, NULL_TREE);
6895 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6896 f_ovf, NULL_TREE);
6897 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6898 f_sav, NULL_TREE);
dfafc897
FS
6899
6900 /* Count number of gp and fp argument registers used. */
38173d38
JH
6901 words = crtl->args.info.words;
6902 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6903 GP_ARG_NUM_REG);
38173d38 6904 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6905 FP_ARG_NUM_REG);
dfafc897
FS
6906
6907 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6908 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6909 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6910 words, n_gpr, n_fpr);
dfafc897 6911
9d30f3c1
JJ
6912 if (cfun->va_list_gpr_size)
6913 {
726a989a 6914 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
47a25a46 6915 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6916 TREE_SIDE_EFFECTS (t) = 1;
6917 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6918 }
58c8adc1 6919
9d30f3c1
JJ
6920 if (cfun->va_list_fpr_size)
6921 {
726a989a 6922 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 6923 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6924 TREE_SIDE_EFFECTS (t) = 1;
6925 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6926 }
dfafc897
FS
6927
6928 /* Find the overflow area. */
6929 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6930 if (words != 0)
5be014d5
AP
6931 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6932 size_int (words * UNITS_PER_WORD));
726a989a 6933 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6934 TREE_SIDE_EFFECTS (t) = 1;
6935 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6936
9d30f3c1
JJ
6937 /* If there were no va_arg invocations, don't set up the register
6938 save area. */
6939 if (!cfun->va_list_gpr_size
6940 && !cfun->va_list_fpr_size
6941 && n_gpr < GP_ARG_NUM_REG
6942 && n_fpr < FP_ARG_V4_MAX_REG)
6943 return;
6944
dfafc897
FS
6945 /* Find the register save area. */
6946 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6947 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6948 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6949 size_int (cfun->machine->varargs_save_offset));
726a989a 6950 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
6951 TREE_SIDE_EFFECTS (t) = 1;
6952 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6953}
6954
6955/* Implement va_arg. */
6956
23a60a04 6957tree
726a989a
RB
6958rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6959 gimple_seq *post_p)
cd3ce9b4 6960{
cd3ce9b4
JM
6961 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6962 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6963 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6964 tree lab_false, lab_over, addr;
6965 int align;
6966 tree ptrtype = build_pointer_type (type);
7393f7f8 6967 int regalign = 0;
726a989a 6968 gimple stmt;
cd3ce9b4 6969
08b0dc1b
RH
6970 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6971 {
6972 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6973 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6974 }
6975
cd3ce9b4
JM
6976 if (DEFAULT_ABI != ABI_V4)
6977 {
08b0dc1b 6978 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6979 {
6980 tree elem_type = TREE_TYPE (type);
6981 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6982 int elem_size = GET_MODE_SIZE (elem_mode);
6983
6984 if (elem_size < UNITS_PER_WORD)
6985 {
23a60a04 6986 tree real_part, imag_part;
726a989a 6987 gimple_seq post = NULL;
cd3ce9b4 6988
23a60a04
JM
6989 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6990 &post);
6991 /* Copy the value into a temporary, lest the formal temporary
6992 be reused out from under us. */
6993 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
726a989a 6994 gimple_seq_add_seq (pre_p, post);
cd3ce9b4 6995
23a60a04
JM
6996 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6997 post_p);
cd3ce9b4 6998
47a25a46 6999 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
7000 }
7001 }
7002
23a60a04 7003 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
7004 }
7005
7006 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7007 f_fpr = TREE_CHAIN (f_gpr);
7008 f_res = TREE_CHAIN (f_fpr);
7009 f_ovf = TREE_CHAIN (f_res);
7010 f_sav = TREE_CHAIN (f_ovf);
7011
872a65b5 7012 valist = build_va_arg_indirect_ref (valist);
47a25a46 7013 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
7014 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
7015 f_fpr, NULL_TREE);
7016 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
7017 f_ovf, NULL_TREE);
7018 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
7019 f_sav, NULL_TREE);
cd3ce9b4
JM
7020
7021 size = int_size_in_bytes (type);
7022 rsize = (size + 3) / 4;
7023 align = 1;
7024
08b0dc1b 7025 if (TARGET_HARD_FLOAT && TARGET_FPRS
696e45ba
ME
7026 && ((TARGET_SINGLE_FLOAT && TYPE_MODE (type) == SFmode)
7027 || (TARGET_DOUBLE_FLOAT
7028 && (TYPE_MODE (type) == DFmode
7029 || TYPE_MODE (type) == TFmode
7030 || TYPE_MODE (type) == SDmode
7031 || TYPE_MODE (type) == DDmode
7032 || TYPE_MODE (type) == TDmode))))
cd3ce9b4
JM
7033 {
7034 /* FP args go in FP registers, if present. */
cd3ce9b4 7035 reg = fpr;
602ea4d3 7036 n_reg = (size + 7) / 8;
696e45ba
ME
7037 sav_ofs = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4) * 4;
7038 sav_scale = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4);
e41b2a33 7039 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
7040 align = 8;
7041 }
7042 else
7043 {
7044 /* Otherwise into GP registers. */
cd3ce9b4
JM
7045 reg = gpr;
7046 n_reg = rsize;
7047 sav_ofs = 0;
7048 sav_scale = 4;
7049 if (n_reg == 2)
7050 align = 8;
7051 }
7052
7053 /* Pull the value out of the saved registers.... */
7054
7055 lab_over = NULL;
7056 addr = create_tmp_var (ptr_type_node, "addr");
7057 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7058
7059 /* AltiVec vectors never go in registers when -mabi=altivec. */
7060 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
7061 align = 16;
7062 else
7063 {
71856685
AH
7064 lab_false = create_artificial_label (input_location);
7065 lab_over = create_artificial_label (input_location);
cd3ce9b4
JM
7066
7067 /* Long long and SPE vectors are aligned in the registers.
7068 As are any other 2 gpr item such as complex int due to a
7069 historical mistake. */
7070 u = reg;
602ea4d3 7071 if (n_reg == 2 && reg == gpr)
cd3ce9b4 7072 {
7393f7f8 7073 regalign = 1;
726a989a 7074 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7075 build_int_cst (TREE_TYPE (reg), n_reg - 1));
726a989a
RB
7076 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
7077 unshare_expr (reg), u);
cd3ce9b4 7078 }
7393f7f8
BE
7079 /* _Decimal128 is passed in even/odd fpr pairs; the stored
7080 reg number is 0 for f1, so we want to make it odd. */
7081 else if (reg == fpr && TYPE_MODE (type) == TDmode)
7082 {
726a989a 7083 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
383e91e4 7084 build_int_cst (TREE_TYPE (reg), 1));
726a989a 7085 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7393f7f8 7086 }
cd3ce9b4 7087
95674810 7088 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
7089 t = build2 (GE_EXPR, boolean_type_node, u, t);
7090 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7091 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7092 gimplify_and_add (t, pre_p);
7093
7094 t = sav;
7095 if (sav_ofs)
5be014d5 7096 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 7097
726a989a 7098 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7099 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
7100 u = fold_convert (sizetype, u);
7101 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7102 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 7103
e41b2a33
PB
7104 /* _Decimal32 varargs are located in the second word of the 64-bit
7105 FP register for 32-bit binaries. */
4f011e1e
JM
7106 if (!TARGET_POWERPC64
7107 && TARGET_HARD_FLOAT && TARGET_FPRS
7108 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
7109 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7110
726a989a 7111 gimplify_assign (addr, t, pre_p);
cd3ce9b4 7112
726a989a 7113 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
cd3ce9b4 7114
726a989a
RB
7115 stmt = gimple_build_label (lab_false);
7116 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4 7117
7393f7f8 7118 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
7119 {
7120 /* Ensure that we don't find any more args in regs.
7393f7f8 7121 Alignment has taken care of for special cases. */
726a989a 7122 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
cd3ce9b4
JM
7123 }
7124 }
7125
7126 /* ... otherwise out of the overflow area. */
7127
7128 /* Care for on-stack alignment if needed. */
7129 t = ovf;
7130 if (align != 1)
7131 {
5be014d5
AP
7132 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7133 t = fold_convert (sizetype, t);
4a90aeeb 7134 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
7135 size_int (-align));
7136 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
7137 }
7138 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7139
726a989a 7140 gimplify_assign (unshare_expr (addr), t, pre_p);
cd3ce9b4 7141
5be014d5 7142 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
726a989a 7143 gimplify_assign (unshare_expr (ovf), t, pre_p);
cd3ce9b4
JM
7144
7145 if (lab_over)
7146 {
726a989a
RB
7147 stmt = gimple_build_label (lab_over);
7148 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4
JM
7149 }
7150
0cfbc62b
JM
7151 if (STRICT_ALIGNMENT
7152 && (TYPE_ALIGN (type)
7153 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7154 {
7155 /* The value (of type complex double, for example) may not be
7156 aligned in memory in the saved registers, so copy via a
7157 temporary. (This is the same code as used for SPARC.) */
7158 tree tmp = create_tmp_var (type, "va_arg_tmp");
7159 tree dest_addr = build_fold_addr_expr (tmp);
7160
5039610b
SL
7161 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7162 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
7163
7164 gimplify_and_add (copy, pre_p);
7165 addr = dest_addr;
7166 }
7167
08b0dc1b 7168 addr = fold_convert (ptrtype, addr);
872a65b5 7169 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
7170}
7171
0ac081f6
AH
7172/* Builtins. */
7173
58646b77
PB
7174static void
7175def_builtin (int mask, const char *name, tree type, int code)
7176{
96038623 7177 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
7178 {
7179 if (rs6000_builtin_decls[code])
7180 abort ();
7181
7182 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7183 add_builtin_function (name, type, code, BUILT_IN_MD,
7184 NULL, NULL_TREE);
58646b77
PB
7185 }
7186}
0ac081f6 7187
24408032
AH
7188/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7189
2212663f 7190static const struct builtin_description bdesc_3arg[] =
24408032
AH
7191{
7192 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7193 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7194 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7195 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7196 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7197 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7198 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7199 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7200 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7201 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7202 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7203 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7204 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7205 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7206 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7207 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7208 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7209 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7210 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7211 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7212 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7213 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7214 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7215
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7217 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7218 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7219 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7220 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7221 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7222 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7223 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7231
7232 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7233 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7234 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7235 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7236 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7237 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7238 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7239 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7240 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7241};
2212663f 7242
95385cbb
AH
7243/* DST operations: void foo (void *, const int, const char). */
7244
7245static const struct builtin_description bdesc_dst[] =
7246{
7247 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7248 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7249 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7250 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7251
7252 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7253 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7254 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7255 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7256};
7257
2212663f 7258/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7259
a3170dc6 7260static struct builtin_description bdesc_2arg[] =
0ac081f6 7261{
f18c054f
DB
7262 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7263 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7264 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7265 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7266 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7267 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7268 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7269 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7270 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7271 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7272 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7273 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7274 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7275 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7276 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7277 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7278 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7279 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7280 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7281 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7282 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7283 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7284 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7285 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7286 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7287 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7288 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7289 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7290 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7291 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7292 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7293 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7294 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7295 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7296 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7297 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7298 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7299 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7300 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7301 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7302 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7303 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7304 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7305 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7306 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7307 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7308 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7309 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7310 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7311 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7312 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7313 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7314 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7315 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7316 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7317 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7318 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7319 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7320 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7321 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7322 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7323 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7324 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7325 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7326 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7327 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7328 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7329 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7330 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7331 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7332 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7333 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7334 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7335 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7336 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7337 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7338 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7339 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7340 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7341 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7342 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7343 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7344 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7345 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7346 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7347 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7348 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7349 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7350 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7351 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7352 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7353 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7354 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7355 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7356 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7357 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7358 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7359 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7360 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7361 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7362 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7363 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7364 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7365 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7366 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7367 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7368 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7369 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7370 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7371 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7372 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7373
58646b77
PB
7374 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7375 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7376 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7377 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7378 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7379 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7380 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7381 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7382 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7411 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7412 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7413 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7414 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7415 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7416 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7417 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7418 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7419 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7420 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7421 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7422 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7423 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7424 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7425 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7426 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7427 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7428 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7429 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7430 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7431 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7432 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7433 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7434 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7435 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7436 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7437 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7441 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7442 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7449 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7450 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7451 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7452 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7453 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7454 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7455 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7456 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7457 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7458 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7459 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7479 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7480 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7481 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7482 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7483 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7484 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7485 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7486 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7487 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7488 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7489 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7490 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7491 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7492 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7493 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7494 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7495 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7496 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7497 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7498 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7499 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7500 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7501
96038623
DE
7502 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7503 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7504 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7505 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7506 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7507 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7508 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7509 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7510 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7511 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7512
a3170dc6
AH
7513 /* Place holder, leave as first spe builtin. */
7514 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7515 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7516 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7517 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7518 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7519 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7520 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7521 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7522 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7523 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7524 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7525 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7526 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7527 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7528 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7529 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7530 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7531 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7532 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7533 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7534 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7535 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7536 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7537 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7538 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7539 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7540 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7541 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7542 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7543 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7544 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7545 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7546 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7547 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7548 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7549 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7550 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7551 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7552 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7553 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7554 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7555 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7556 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7557 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7558 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7559 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7560 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7561 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7562 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7563 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7564 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7565 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7566 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7567 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7568 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7569 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7570 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7571 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7572 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7573 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7574 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7575 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7576 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7577 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7578 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7579 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7580 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7581 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7582 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7583 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7584 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7585 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7586 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7587 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7588 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7589 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7590 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7591 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7592 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7593 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7594 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7595 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7596 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7597 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7598 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7599 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7600 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7601 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7602 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7603 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7604 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7605 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7606 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7607 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7608 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7609 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7610 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7611 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7612 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7613 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7614 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7615 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7616 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7617 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7618 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7619 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7620 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7621 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7622 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7623
7624 /* SPE binary operations expecting a 5-bit unsigned literal. */
7625 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7626
7627 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7628 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7629 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7630 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7631 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7632 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7633 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7634 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7635 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7636 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7637 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7638 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7639 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7640 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7641 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7642 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7643 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7644 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7645 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7646 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7647 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7648 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7649 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7650 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7651 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7652 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7653
7654 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7655 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7656};
7657
7658/* AltiVec predicates. */
7659
7660struct builtin_description_predicates
7661{
7662 const unsigned int mask;
7663 const enum insn_code icode;
7664 const char *opcode;
7665 const char *const name;
7666 const enum rs6000_builtins code;
7667};
7668
7669static const struct builtin_description_predicates bdesc_altivec_preds[] =
7670{
7671 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7672 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7673 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7674 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7675 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7676 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7677 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7678 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7679 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7680 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7681 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7682 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7683 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7684
81f40b79
ILT
7685 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7686 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7687 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7688};
24408032 7689
a3170dc6
AH
7690/* SPE predicates. */
7691static struct builtin_description bdesc_spe_predicates[] =
7692{
7693 /* Place-holder. Leave as first. */
7694 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7695 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7696 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7697 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7698 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7699 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7700 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7701 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7702 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7703 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7704 /* Place-holder. Leave as last. */
7705 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7706};
7707
7708/* SPE evsel predicates. */
7709static struct builtin_description bdesc_spe_evsel[] =
7710{
7711 /* Place-holder. Leave as first. */
7712 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7713 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7714 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7715 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7716 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7717 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7718 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7719 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7720 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7721 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7722 /* Place-holder. Leave as last. */
7723 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7724};
7725
96038623
DE
7726/* PAIRED predicates. */
7727static const struct builtin_description bdesc_paired_preds[] =
7728{
7729 /* Place-holder. Leave as first. */
7730 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7731 /* Place-holder. Leave as last. */
7732 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7733};
7734
b6d08ca1 7735/* ABS* operations. */
100c4561
AH
7736
7737static const struct builtin_description bdesc_abs[] =
7738{
7739 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7740 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7741 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7742 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7743 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7744 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7745 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7746};
7747
617e0e1d
DB
7748/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7749 foo (VECa). */
24408032 7750
a3170dc6 7751static struct builtin_description bdesc_1arg[] =
2212663f 7752{
617e0e1d
DB
7753 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7754 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7755 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7756 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7757 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7758 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7759 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7760 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7761 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7762 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7763 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7764 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7765 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7766 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7767 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7768 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7769 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7770
58646b77
PB
7771 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7772 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7773 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7774 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7775 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7776 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7777 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7778 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7779 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7780 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7781 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7782 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7790
a3170dc6
AH
7791 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7792 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7793 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7794 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7795 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7796 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7797 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7798 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7799 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7800 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7801 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7802 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7803 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7804 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7805 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7806 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7807 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7808 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7809 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7810 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7811 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7812 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7813 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7814 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7815 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7816 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7817 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7818 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7819 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7820 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7821
7822 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7823 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7824
7825 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7826 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7827 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7828 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7829 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7830};
7831
7832static rtx
5039610b 7833rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7834{
7835 rtx pat;
5039610b 7836 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7837 rtx op0 = expand_normal (arg0);
2212663f
DB
7838 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7839 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7840
0559cc77
DE
7841 if (icode == CODE_FOR_nothing)
7842 /* Builtin not supported on this processor. */
7843 return 0;
7844
20e26713
AH
7845 /* If we got invalid arguments bail out before generating bad rtl. */
7846 if (arg0 == error_mark_node)
9a171fcd 7847 return const0_rtx;
20e26713 7848
0559cc77
DE
7849 if (icode == CODE_FOR_altivec_vspltisb
7850 || icode == CODE_FOR_altivec_vspltish
7851 || icode == CODE_FOR_altivec_vspltisw
7852 || icode == CODE_FOR_spe_evsplatfi
7853 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7854 {
7855 /* Only allow 5-bit *signed* literals. */
b44140e7 7856 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7857 || INTVAL (op0) > 15
7858 || INTVAL (op0) < -16)
b44140e7
AH
7859 {
7860 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7861 return const0_rtx;
b44140e7 7862 }
b44140e7
AH
7863 }
7864
c62f2db5 7865 if (target == 0
2212663f
DB
7866 || GET_MODE (target) != tmode
7867 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7868 target = gen_reg_rtx (tmode);
7869
7870 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7871 op0 = copy_to_mode_reg (mode0, op0);
7872
7873 pat = GEN_FCN (icode) (target, op0);
7874 if (! pat)
7875 return 0;
7876 emit_insn (pat);
0ac081f6 7877
2212663f
DB
7878 return target;
7879}
ae4b4a02 7880
100c4561 7881static rtx
5039610b 7882altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7883{
7884 rtx pat, scratch1, scratch2;
5039610b 7885 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7886 rtx op0 = expand_normal (arg0);
100c4561
AH
7887 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7888 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7889
7890 /* If we have invalid arguments, bail out before generating bad rtl. */
7891 if (arg0 == error_mark_node)
9a171fcd 7892 return const0_rtx;
100c4561
AH
7893
7894 if (target == 0
7895 || GET_MODE (target) != tmode
7896 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7897 target = gen_reg_rtx (tmode);
7898
7899 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7900 op0 = copy_to_mode_reg (mode0, op0);
7901
7902 scratch1 = gen_reg_rtx (mode0);
7903 scratch2 = gen_reg_rtx (mode0);
7904
7905 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7906 if (! pat)
7907 return 0;
7908 emit_insn (pat);
7909
7910 return target;
7911}
7912
0ac081f6 7913static rtx
5039610b 7914rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7915{
7916 rtx pat;
5039610b
SL
7917 tree arg0 = CALL_EXPR_ARG (exp, 0);
7918 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7919 rtx op0 = expand_normal (arg0);
7920 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7921 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7922 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7923 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7924
0559cc77
DE
7925 if (icode == CODE_FOR_nothing)
7926 /* Builtin not supported on this processor. */
7927 return 0;
7928
20e26713
AH
7929 /* If we got invalid arguments bail out before generating bad rtl. */
7930 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7931 return const0_rtx;
20e26713 7932
0559cc77
DE
7933 if (icode == CODE_FOR_altivec_vcfux
7934 || icode == CODE_FOR_altivec_vcfsx
7935 || icode == CODE_FOR_altivec_vctsxs
7936 || icode == CODE_FOR_altivec_vctuxs
7937 || icode == CODE_FOR_altivec_vspltb
7938 || icode == CODE_FOR_altivec_vsplth
7939 || icode == CODE_FOR_altivec_vspltw
7940 || icode == CODE_FOR_spe_evaddiw
7941 || icode == CODE_FOR_spe_evldd
7942 || icode == CODE_FOR_spe_evldh
7943 || icode == CODE_FOR_spe_evldw
7944 || icode == CODE_FOR_spe_evlhhesplat
7945 || icode == CODE_FOR_spe_evlhhossplat
7946 || icode == CODE_FOR_spe_evlhhousplat
7947 || icode == CODE_FOR_spe_evlwhe
7948 || icode == CODE_FOR_spe_evlwhos
7949 || icode == CODE_FOR_spe_evlwhou
7950 || icode == CODE_FOR_spe_evlwhsplat
7951 || icode == CODE_FOR_spe_evlwwsplat
7952 || icode == CODE_FOR_spe_evrlwi
7953 || icode == CODE_FOR_spe_evslwi
7954 || icode == CODE_FOR_spe_evsrwis
f5119d10 7955 || icode == CODE_FOR_spe_evsubifw
0559cc77 7956 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7957 {
7958 /* Only allow 5-bit unsigned literals. */
8bb418a3 7959 STRIP_NOPS (arg1);
b44140e7
AH
7960 if (TREE_CODE (arg1) != INTEGER_CST
7961 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7962 {
7963 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7964 return const0_rtx;
b44140e7 7965 }
b44140e7
AH
7966 }
7967
c62f2db5 7968 if (target == 0
0ac081f6
AH
7969 || GET_MODE (target) != tmode
7970 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7971 target = gen_reg_rtx (tmode);
7972
7973 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7974 op0 = copy_to_mode_reg (mode0, op0);
7975 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7976 op1 = copy_to_mode_reg (mode1, op1);
7977
7978 pat = GEN_FCN (icode) (target, op0, op1);
7979 if (! pat)
7980 return 0;
7981 emit_insn (pat);
7982
7983 return target;
7984}
6525c0e7 7985
ae4b4a02 7986static rtx
f676971a 7987altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7988 tree exp, rtx target)
ae4b4a02
AH
7989{
7990 rtx pat, scratch;
5039610b
SL
7991 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7992 tree arg0 = CALL_EXPR_ARG (exp, 1);
7993 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7994 rtx op0 = expand_normal (arg0);
7995 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7996 enum machine_mode tmode = SImode;
7997 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7998 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7999 int cr6_form_int;
8000
8001 if (TREE_CODE (cr6_form) != INTEGER_CST)
8002 {
8003 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 8004 return const0_rtx;
ae4b4a02
AH
8005 }
8006 else
8007 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
8008
37409796 8009 gcc_assert (mode0 == mode1);
ae4b4a02
AH
8010
8011 /* If we have invalid arguments, bail out before generating bad rtl. */
8012 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 8013 return const0_rtx;
ae4b4a02
AH
8014
8015 if (target == 0
8016 || GET_MODE (target) != tmode
8017 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8018 target = gen_reg_rtx (tmode);
8019
8020 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8021 op0 = copy_to_mode_reg (mode0, op0);
8022 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8023 op1 = copy_to_mode_reg (mode1, op1);
8024
8025 scratch = gen_reg_rtx (mode0);
8026
8027 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 8028 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
8029 if (! pat)
8030 return 0;
8031 emit_insn (pat);
8032
8033 /* The vec_any* and vec_all* predicates use the same opcodes for two
8034 different operations, but the bits in CR6 will be different
8035 depending on what information we want. So we have to play tricks
8036 with CR6 to get the right bits out.
8037
8038 If you think this is disgusting, look at the specs for the
8039 AltiVec predicates. */
8040
c4ad648e
AM
8041 switch (cr6_form_int)
8042 {
8043 case 0:
8044 emit_insn (gen_cr6_test_for_zero (target));
8045 break;
8046 case 1:
8047 emit_insn (gen_cr6_test_for_zero_reverse (target));
8048 break;
8049 case 2:
8050 emit_insn (gen_cr6_test_for_lt (target));
8051 break;
8052 case 3:
8053 emit_insn (gen_cr6_test_for_lt_reverse (target));
8054 break;
8055 default:
8056 error ("argument 1 of __builtin_altivec_predicate is out of range");
8057 break;
8058 }
ae4b4a02
AH
8059
8060 return target;
8061}
8062
96038623
DE
8063static rtx
8064paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
8065{
8066 rtx pat, addr;
8067 tree arg0 = CALL_EXPR_ARG (exp, 0);
8068 tree arg1 = CALL_EXPR_ARG (exp, 1);
8069 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8070 enum machine_mode mode0 = Pmode;
8071 enum machine_mode mode1 = Pmode;
8072 rtx op0 = expand_normal (arg0);
8073 rtx op1 = expand_normal (arg1);
8074
8075 if (icode == CODE_FOR_nothing)
8076 /* Builtin not supported on this processor. */
8077 return 0;
8078
8079 /* If we got invalid arguments bail out before generating bad rtl. */
8080 if (arg0 == error_mark_node || arg1 == error_mark_node)
8081 return const0_rtx;
8082
8083 if (target == 0
8084 || GET_MODE (target) != tmode
8085 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8086 target = gen_reg_rtx (tmode);
8087
8088 op1 = copy_to_mode_reg (mode1, op1);
8089
8090 if (op0 == const0_rtx)
8091 {
8092 addr = gen_rtx_MEM (tmode, op1);
8093 }
8094 else
8095 {
8096 op0 = copy_to_mode_reg (mode0, op0);
8097 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8098 }
8099
8100 pat = GEN_FCN (icode) (target, addr);
8101
8102 if (! pat)
8103 return 0;
8104 emit_insn (pat);
8105
8106 return target;
8107}
8108
b4a62fa0 8109static rtx
0b61703c 8110altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
b4a62fa0
SB
8111{
8112 rtx pat, addr;
5039610b
SL
8113 tree arg0 = CALL_EXPR_ARG (exp, 0);
8114 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
8115 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8116 enum machine_mode mode0 = Pmode;
8117 enum machine_mode mode1 = Pmode;
84217346
MD
8118 rtx op0 = expand_normal (arg0);
8119 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
8120
8121 if (icode == CODE_FOR_nothing)
8122 /* Builtin not supported on this processor. */
8123 return 0;
8124
8125 /* If we got invalid arguments bail out before generating bad rtl. */
8126 if (arg0 == error_mark_node || arg1 == error_mark_node)
8127 return const0_rtx;
8128
8129 if (target == 0
8130 || GET_MODE (target) != tmode
8131 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8132 target = gen_reg_rtx (tmode);
8133
f676971a 8134 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
8135
8136 if (op0 == const0_rtx)
8137 {
0b61703c 8138 addr = gen_rtx_MEM (blk ? BLKmode : tmode, op1);
b4a62fa0
SB
8139 }
8140 else
8141 {
8142 op0 = copy_to_mode_reg (mode0, op0);
0b61703c 8143 addr = gen_rtx_MEM (blk ? BLKmode : tmode, gen_rtx_PLUS (Pmode, op0, op1));
b4a62fa0
SB
8144 }
8145
8146 pat = GEN_FCN (icode) (target, addr);
8147
8148 if (! pat)
8149 return 0;
8150 emit_insn (pat);
8151
8152 return target;
8153}
8154
61bea3b0 8155static rtx
5039610b 8156spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 8157{
5039610b
SL
8158 tree arg0 = CALL_EXPR_ARG (exp, 0);
8159 tree arg1 = CALL_EXPR_ARG (exp, 1);
8160 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8161 rtx op0 = expand_normal (arg0);
8162 rtx op1 = expand_normal (arg1);
8163 rtx op2 = expand_normal (arg2);
61bea3b0
AH
8164 rtx pat;
8165 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8166 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8167 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8168
8169 /* Invalid arguments. Bail before doing anything stoopid! */
8170 if (arg0 == error_mark_node
8171 || arg1 == error_mark_node
8172 || arg2 == error_mark_node)
8173 return const0_rtx;
8174
8175 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8176 op0 = copy_to_mode_reg (mode2, op0);
8177 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8178 op1 = copy_to_mode_reg (mode0, op1);
8179 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8180 op2 = copy_to_mode_reg (mode1, op2);
8181
8182 pat = GEN_FCN (icode) (op1, op2, op0);
8183 if (pat)
8184 emit_insn (pat);
8185 return NULL_RTX;
8186}
8187
96038623
DE
8188static rtx
8189paired_expand_stv_builtin (enum insn_code icode, tree exp)
8190{
8191 tree arg0 = CALL_EXPR_ARG (exp, 0);
8192 tree arg1 = CALL_EXPR_ARG (exp, 1);
8193 tree arg2 = CALL_EXPR_ARG (exp, 2);
8194 rtx op0 = expand_normal (arg0);
8195 rtx op1 = expand_normal (arg1);
8196 rtx op2 = expand_normal (arg2);
8197 rtx pat, addr;
8198 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8199 enum machine_mode mode1 = Pmode;
8200 enum machine_mode mode2 = Pmode;
8201
8202 /* Invalid arguments. Bail before doing anything stoopid! */
8203 if (arg0 == error_mark_node
8204 || arg1 == error_mark_node
8205 || arg2 == error_mark_node)
8206 return const0_rtx;
8207
8208 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8209 op0 = copy_to_mode_reg (tmode, op0);
8210
8211 op2 = copy_to_mode_reg (mode2, op2);
8212
8213 if (op1 == const0_rtx)
8214 {
8215 addr = gen_rtx_MEM (tmode, op2);
8216 }
8217 else
8218 {
8219 op1 = copy_to_mode_reg (mode1, op1);
8220 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8221 }
8222
8223 pat = GEN_FCN (icode) (addr, op0);
8224 if (pat)
8225 emit_insn (pat);
8226 return NULL_RTX;
8227}
8228
6525c0e7 8229static rtx
5039610b 8230altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8231{
5039610b
SL
8232 tree arg0 = CALL_EXPR_ARG (exp, 0);
8233 tree arg1 = CALL_EXPR_ARG (exp, 1);
8234 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8235 rtx op0 = expand_normal (arg0);
8236 rtx op1 = expand_normal (arg1);
8237 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8238 rtx pat, addr;
8239 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8240 enum machine_mode mode1 = Pmode;
8241 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8242
8243 /* Invalid arguments. Bail before doing anything stoopid! */
8244 if (arg0 == error_mark_node
8245 || arg1 == error_mark_node
8246 || arg2 == error_mark_node)
9a171fcd 8247 return const0_rtx;
6525c0e7 8248
b4a62fa0
SB
8249 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8250 op0 = copy_to_mode_reg (tmode, op0);
8251
f676971a 8252 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8253
8254 if (op1 == const0_rtx)
8255 {
8256 addr = gen_rtx_MEM (tmode, op2);
8257 }
8258 else
8259 {
8260 op1 = copy_to_mode_reg (mode1, op1);
8261 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8262 }
6525c0e7 8263
b4a62fa0 8264 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8265 if (pat)
8266 emit_insn (pat);
8267 return NULL_RTX;
8268}
8269
2212663f 8270static rtx
5039610b 8271rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8272{
8273 rtx pat;
5039610b
SL
8274 tree arg0 = CALL_EXPR_ARG (exp, 0);
8275 tree arg1 = CALL_EXPR_ARG (exp, 1);
8276 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8277 rtx op0 = expand_normal (arg0);
8278 rtx op1 = expand_normal (arg1);
8279 rtx op2 = expand_normal (arg2);
2212663f
DB
8280 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8281 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8282 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8283 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8284
774b5662
DE
8285 if (icode == CODE_FOR_nothing)
8286 /* Builtin not supported on this processor. */
8287 return 0;
8288
20e26713
AH
8289 /* If we got invalid arguments bail out before generating bad rtl. */
8290 if (arg0 == error_mark_node
8291 || arg1 == error_mark_node
8292 || arg2 == error_mark_node)
9a171fcd 8293 return const0_rtx;
20e26713 8294
aba5fb01
NS
8295 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8296 || icode == CODE_FOR_altivec_vsldoi_v4si
8297 || icode == CODE_FOR_altivec_vsldoi_v8hi
8298 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8299 {
8300 /* Only allow 4-bit unsigned literals. */
8bb418a3 8301 STRIP_NOPS (arg2);
b44140e7
AH
8302 if (TREE_CODE (arg2) != INTEGER_CST
8303 || TREE_INT_CST_LOW (arg2) & ~0xf)
8304 {
8305 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8306 return const0_rtx;
b44140e7 8307 }
b44140e7
AH
8308 }
8309
c62f2db5 8310 if (target == 0
2212663f
DB
8311 || GET_MODE (target) != tmode
8312 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8313 target = gen_reg_rtx (tmode);
8314
8315 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8316 op0 = copy_to_mode_reg (mode0, op0);
8317 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8318 op1 = copy_to_mode_reg (mode1, op1);
8319 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8320 op2 = copy_to_mode_reg (mode2, op2);
8321
49e39588
RE
8322 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8323 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8324 else
8325 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8326 if (! pat)
8327 return 0;
8328 emit_insn (pat);
8329
8330 return target;
8331}
92898235 8332
3a9b8c7e 8333/* Expand the lvx builtins. */
0ac081f6 8334static rtx
a2369ed3 8335altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8336{
5039610b 8337 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8338 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8339 tree arg0;
8340 enum machine_mode tmode, mode0;
7c3abc73 8341 rtx pat, op0;
3a9b8c7e 8342 enum insn_code icode;
92898235 8343
0ac081f6
AH
8344 switch (fcode)
8345 {
f18c054f 8346 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8347 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8348 break;
f18c054f 8349 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8350 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8351 break;
8352 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8353 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8354 break;
8355 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8356 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8357 break;
8358 default:
8359 *expandedp = false;
8360 return NULL_RTX;
8361 }
0ac081f6 8362
3a9b8c7e 8363 *expandedp = true;
f18c054f 8364
5039610b 8365 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8366 op0 = expand_normal (arg0);
3a9b8c7e
AH
8367 tmode = insn_data[icode].operand[0].mode;
8368 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8369
3a9b8c7e
AH
8370 if (target == 0
8371 || GET_MODE (target) != tmode
8372 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8373 target = gen_reg_rtx (tmode);
24408032 8374
3a9b8c7e
AH
8375 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8376 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8377
3a9b8c7e
AH
8378 pat = GEN_FCN (icode) (target, op0);
8379 if (! pat)
8380 return 0;
8381 emit_insn (pat);
8382 return target;
8383}
f18c054f 8384
3a9b8c7e
AH
8385/* Expand the stvx builtins. */
8386static rtx
f676971a 8387altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8388 bool *expandedp)
3a9b8c7e 8389{
5039610b 8390 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8391 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8392 tree arg0, arg1;
8393 enum machine_mode mode0, mode1;
7c3abc73 8394 rtx pat, op0, op1;
3a9b8c7e 8395 enum insn_code icode;
f18c054f 8396
3a9b8c7e
AH
8397 switch (fcode)
8398 {
8399 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8400 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8401 break;
8402 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8403 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8404 break;
8405 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8406 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8407 break;
8408 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8409 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8410 break;
8411 default:
8412 *expandedp = false;
8413 return NULL_RTX;
8414 }
24408032 8415
5039610b
SL
8416 arg0 = CALL_EXPR_ARG (exp, 0);
8417 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8418 op0 = expand_normal (arg0);
8419 op1 = expand_normal (arg1);
3a9b8c7e
AH
8420 mode0 = insn_data[icode].operand[0].mode;
8421 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8422
3a9b8c7e
AH
8423 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8424 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8425 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8426 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8427
3a9b8c7e
AH
8428 pat = GEN_FCN (icode) (op0, op1);
8429 if (pat)
8430 emit_insn (pat);
f18c054f 8431
3a9b8c7e
AH
8432 *expandedp = true;
8433 return NULL_RTX;
8434}
f18c054f 8435
3a9b8c7e
AH
8436/* Expand the dst builtins. */
8437static rtx
f676971a 8438altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8439 bool *expandedp)
3a9b8c7e 8440{
5039610b 8441 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8442 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8443 tree arg0, arg1, arg2;
8444 enum machine_mode mode0, mode1, mode2;
7c3abc73 8445 rtx pat, op0, op1, op2;
586de218 8446 const struct builtin_description *d;
a3170dc6 8447 size_t i;
f18c054f 8448
3a9b8c7e 8449 *expandedp = false;
f18c054f 8450
3a9b8c7e 8451 /* Handle DST variants. */
586de218 8452 d = bdesc_dst;
3a9b8c7e
AH
8453 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8454 if (d->code == fcode)
8455 {
5039610b
SL
8456 arg0 = CALL_EXPR_ARG (exp, 0);
8457 arg1 = CALL_EXPR_ARG (exp, 1);
8458 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8459 op0 = expand_normal (arg0);
8460 op1 = expand_normal (arg1);
8461 op2 = expand_normal (arg2);
3a9b8c7e
AH
8462 mode0 = insn_data[d->icode].operand[0].mode;
8463 mode1 = insn_data[d->icode].operand[1].mode;
8464 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8465
3a9b8c7e
AH
8466 /* Invalid arguments, bail out before generating bad rtl. */
8467 if (arg0 == error_mark_node
8468 || arg1 == error_mark_node
8469 || arg2 == error_mark_node)
8470 return const0_rtx;
f18c054f 8471
86e7df90 8472 *expandedp = true;
8bb418a3 8473 STRIP_NOPS (arg2);
3a9b8c7e
AH
8474 if (TREE_CODE (arg2) != INTEGER_CST
8475 || TREE_INT_CST_LOW (arg2) & ~0x3)
8476 {
9e637a26 8477 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8478 return const0_rtx;
8479 }
f18c054f 8480
3a9b8c7e 8481 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8482 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8483 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8484 op1 = copy_to_mode_reg (mode1, op1);
24408032 8485
3a9b8c7e
AH
8486 pat = GEN_FCN (d->icode) (op0, op1, op2);
8487 if (pat != 0)
8488 emit_insn (pat);
f18c054f 8489
3a9b8c7e
AH
8490 return NULL_RTX;
8491 }
f18c054f 8492
3a9b8c7e
AH
8493 return NULL_RTX;
8494}
24408032 8495
7a4eca66
DE
8496/* Expand vec_init builtin. */
8497static rtx
5039610b 8498altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8499{
8500 enum machine_mode tmode = TYPE_MODE (type);
8501 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8502 int i, n_elt = GET_MODE_NUNITS (tmode);
8503 rtvec v = rtvec_alloc (n_elt);
8504
8505 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8506 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8507
5039610b 8508 for (i = 0; i < n_elt; ++i)
7a4eca66 8509 {
5039610b 8510 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8511 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8512 }
8513
7a4eca66
DE
8514 if (!target || !register_operand (target, tmode))
8515 target = gen_reg_rtx (tmode);
8516
8517 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8518 return target;
8519}
8520
8521/* Return the integer constant in ARG. Constrain it to be in the range
8522 of the subparts of VEC_TYPE; issue an error if not. */
8523
8524static int
8525get_element_number (tree vec_type, tree arg)
8526{
8527 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8528
8529 if (!host_integerp (arg, 1)
8530 || (elt = tree_low_cst (arg, 1), elt > max))
8531 {
8532 error ("selector must be an integer constant in the range 0..%wi", max);
8533 return 0;
8534 }
8535
8536 return elt;
8537}
8538
8539/* Expand vec_set builtin. */
8540static rtx
5039610b 8541altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8542{
8543 enum machine_mode tmode, mode1;
8544 tree arg0, arg1, arg2;
8545 int elt;
8546 rtx op0, op1;
8547
5039610b
SL
8548 arg0 = CALL_EXPR_ARG (exp, 0);
8549 arg1 = CALL_EXPR_ARG (exp, 1);
8550 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8551
8552 tmode = TYPE_MODE (TREE_TYPE (arg0));
8553 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8554 gcc_assert (VECTOR_MODE_P (tmode));
8555
bbbbb16a
ILT
8556 op0 = expand_expr (arg0, NULL_RTX, tmode, EXPAND_NORMAL);
8557 op1 = expand_expr (arg1, NULL_RTX, mode1, EXPAND_NORMAL);
7a4eca66
DE
8558 elt = get_element_number (TREE_TYPE (arg0), arg2);
8559
8560 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8561 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8562
8563 op0 = force_reg (tmode, op0);
8564 op1 = force_reg (mode1, op1);
8565
8566 rs6000_expand_vector_set (op0, op1, elt);
8567
8568 return op0;
8569}
8570
8571/* Expand vec_ext builtin. */
8572static rtx
5039610b 8573altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8574{
8575 enum machine_mode tmode, mode0;
8576 tree arg0, arg1;
8577 int elt;
8578 rtx op0;
8579
5039610b
SL
8580 arg0 = CALL_EXPR_ARG (exp, 0);
8581 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8582
84217346 8583 op0 = expand_normal (arg0);
7a4eca66
DE
8584 elt = get_element_number (TREE_TYPE (arg0), arg1);
8585
8586 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8587 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8588 gcc_assert (VECTOR_MODE_P (mode0));
8589
8590 op0 = force_reg (mode0, op0);
8591
8592 if (optimize || !target || !register_operand (target, tmode))
8593 target = gen_reg_rtx (tmode);
8594
8595 rs6000_expand_vector_extract (target, op0, elt);
8596
8597 return target;
8598}
8599
3a9b8c7e
AH
8600/* Expand the builtin in EXP and store the result in TARGET. Store
8601 true in *EXPANDEDP if we found a builtin to expand. */
8602static rtx
a2369ed3 8603altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8604{
586de218
KG
8605 const struct builtin_description *d;
8606 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8607 size_t i;
8608 enum insn_code icode;
5039610b 8609 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8610 tree arg0;
8611 rtx op0, pat;
8612 enum machine_mode tmode, mode0;
3a9b8c7e 8613 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8614
58646b77
PB
8615 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8616 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8617 {
8618 *expandedp = true;
ea40ba9c 8619 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8620 return const0_rtx;
8621 }
8622
3a9b8c7e
AH
8623 target = altivec_expand_ld_builtin (exp, target, expandedp);
8624 if (*expandedp)
8625 return target;
0ac081f6 8626
3a9b8c7e
AH
8627 target = altivec_expand_st_builtin (exp, target, expandedp);
8628 if (*expandedp)
8629 return target;
8630
8631 target = altivec_expand_dst_builtin (exp, target, expandedp);
8632 if (*expandedp)
8633 return target;
8634
8635 *expandedp = true;
95385cbb 8636
3a9b8c7e
AH
8637 switch (fcode)
8638 {
6525c0e7 8639 case ALTIVEC_BUILTIN_STVX:
5039610b 8640 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8641 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8642 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8643 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8644 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8645 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8646 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8647 case ALTIVEC_BUILTIN_STVXL:
5039610b 8648 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8649
0b61703c
AP
8650 case ALTIVEC_BUILTIN_STVLX:
8651 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlx, exp);
8652 case ALTIVEC_BUILTIN_STVLXL:
8653 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
8654 case ALTIVEC_BUILTIN_STVRX:
8655 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
8656 case ALTIVEC_BUILTIN_STVRXL:
8657 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
8658
95385cbb
AH
8659 case ALTIVEC_BUILTIN_MFVSCR:
8660 icode = CODE_FOR_altivec_mfvscr;
8661 tmode = insn_data[icode].operand[0].mode;
8662
8663 if (target == 0
8664 || GET_MODE (target) != tmode
8665 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8666 target = gen_reg_rtx (tmode);
f676971a 8667
95385cbb 8668 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8669 if (! pat)
8670 return 0;
8671 emit_insn (pat);
95385cbb
AH
8672 return target;
8673
8674 case ALTIVEC_BUILTIN_MTVSCR:
8675 icode = CODE_FOR_altivec_mtvscr;
5039610b 8676 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8677 op0 = expand_normal (arg0);
95385cbb
AH
8678 mode0 = insn_data[icode].operand[0].mode;
8679
8680 /* If we got invalid arguments bail out before generating bad rtl. */
8681 if (arg0 == error_mark_node)
9a171fcd 8682 return const0_rtx;
95385cbb
AH
8683
8684 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8685 op0 = copy_to_mode_reg (mode0, op0);
8686
8687 pat = GEN_FCN (icode) (op0);
8688 if (pat)
8689 emit_insn (pat);
8690 return NULL_RTX;
3a9b8c7e 8691
95385cbb
AH
8692 case ALTIVEC_BUILTIN_DSSALL:
8693 emit_insn (gen_altivec_dssall ());
8694 return NULL_RTX;
8695
8696 case ALTIVEC_BUILTIN_DSS:
8697 icode = CODE_FOR_altivec_dss;
5039610b 8698 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8699 STRIP_NOPS (arg0);
84217346 8700 op0 = expand_normal (arg0);
95385cbb
AH
8701 mode0 = insn_data[icode].operand[0].mode;
8702
8703 /* If we got invalid arguments bail out before generating bad rtl. */
8704 if (arg0 == error_mark_node)
9a171fcd 8705 return const0_rtx;
95385cbb 8706
b44140e7
AH
8707 if (TREE_CODE (arg0) != INTEGER_CST
8708 || TREE_INT_CST_LOW (arg0) & ~0x3)
8709 {
8710 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8711 return const0_rtx;
b44140e7
AH
8712 }
8713
95385cbb
AH
8714 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8715 op0 = copy_to_mode_reg (mode0, op0);
8716
8717 emit_insn (gen_altivec_dss (op0));
0ac081f6 8718 return NULL_RTX;
7a4eca66
DE
8719
8720 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8721 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8722 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8723 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8724 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8725
8726 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8727 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8728 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8729 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8730 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8731
8732 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8733 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8734 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8735 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8736 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8737
8738 default:
8739 break;
8740 /* Fall through. */
0ac081f6 8741 }
24408032 8742
100c4561 8743 /* Expand abs* operations. */
586de218 8744 d = bdesc_abs;
ca7558fc 8745 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8746 if (d->code == fcode)
5039610b 8747 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8748
ae4b4a02 8749 /* Expand the AltiVec predicates. */
586de218 8750 dp = bdesc_altivec_preds;
ca7558fc 8751 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8752 if (dp->code == fcode)
c4ad648e 8753 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8754 exp, target);
ae4b4a02 8755
6525c0e7
AH
8756 /* LV* are funky. We initialized them differently. */
8757 switch (fcode)
8758 {
8759 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8760 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
0b61703c 8761 exp, target, false);
6525c0e7 8762 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8763 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
0b61703c 8764 exp, target, false);
6525c0e7 8765 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8766 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
0b61703c 8767 exp, target, false);
6525c0e7 8768 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8769 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
0b61703c 8770 exp, target, false);
6525c0e7 8771 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8772 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
0b61703c 8773 exp, target, false);
6525c0e7 8774 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8775 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
0b61703c 8776 exp, target, false);
6525c0e7 8777 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8778 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
0b61703c
AP
8779 exp, target, false);
8780 case ALTIVEC_BUILTIN_LVLX:
8781 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
8782 exp, target, true);
8783 case ALTIVEC_BUILTIN_LVLXL:
8784 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlxl,
8785 exp, target, true);
8786 case ALTIVEC_BUILTIN_LVRX:
8787 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
8788 exp, target, true);
8789 case ALTIVEC_BUILTIN_LVRXL:
8790 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
8791 exp, target, true);
6525c0e7
AH
8792 default:
8793 break;
8794 /* Fall through. */
8795 }
95385cbb 8796
92898235 8797 *expandedp = false;
0ac081f6
AH
8798 return NULL_RTX;
8799}
8800
96038623
DE
8801/* Expand the builtin in EXP and store the result in TARGET. Store
8802 true in *EXPANDEDP if we found a builtin to expand. */
8803static rtx
8804paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8805{
8806 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8807 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8808 const struct builtin_description *d;
96038623
DE
8809 size_t i;
8810
8811 *expandedp = true;
8812
8813 switch (fcode)
8814 {
8815 case PAIRED_BUILTIN_STX:
8816 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8817 case PAIRED_BUILTIN_LX:
8818 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8819 default:
8820 break;
8821 /* Fall through. */
8822 }
8823
8824 /* Expand the paired predicates. */
23a651fc 8825 d = bdesc_paired_preds;
96038623
DE
8826 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8827 if (d->code == fcode)
8828 return paired_expand_predicate_builtin (d->icode, exp, target);
8829
8830 *expandedp = false;
8831 return NULL_RTX;
8832}
8833
a3170dc6
AH
8834/* Binops that need to be initialized manually, but can be expanded
8835 automagically by rs6000_expand_binop_builtin. */
8836static struct builtin_description bdesc_2arg_spe[] =
8837{
8838 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8839 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8840 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8841 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8842 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8843 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8844 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8845 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8846 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8847 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8848 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8849 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8850 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8851 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8852 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8853 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8854 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8855 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8856 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8857 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8858 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8859 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8860};
8861
8862/* Expand the builtin in EXP and store the result in TARGET. Store
8863 true in *EXPANDEDP if we found a builtin to expand.
8864
8865 This expands the SPE builtins that are not simple unary and binary
8866 operations. */
8867static rtx
a2369ed3 8868spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8869{
5039610b 8870 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8871 tree arg1, arg0;
8872 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8873 enum insn_code icode;
8874 enum machine_mode tmode, mode0;
8875 rtx pat, op0;
8876 struct builtin_description *d;
8877 size_t i;
8878
8879 *expandedp = true;
8880
8881 /* Syntax check for a 5-bit unsigned immediate. */
8882 switch (fcode)
8883 {
8884 case SPE_BUILTIN_EVSTDD:
8885 case SPE_BUILTIN_EVSTDH:
8886 case SPE_BUILTIN_EVSTDW:
8887 case SPE_BUILTIN_EVSTWHE:
8888 case SPE_BUILTIN_EVSTWHO:
8889 case SPE_BUILTIN_EVSTWWE:
8890 case SPE_BUILTIN_EVSTWWO:
5039610b 8891 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8892 if (TREE_CODE (arg1) != INTEGER_CST
8893 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8894 {
8895 error ("argument 2 must be a 5-bit unsigned literal");
8896 return const0_rtx;
8897 }
8898 break;
8899 default:
8900 break;
8901 }
8902
00332c9f
AH
8903 /* The evsplat*i instructions are not quite generic. */
8904 switch (fcode)
8905 {
8906 case SPE_BUILTIN_EVSPLATFI:
8907 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8908 exp, target);
00332c9f
AH
8909 case SPE_BUILTIN_EVSPLATI:
8910 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8911 exp, target);
00332c9f
AH
8912 default:
8913 break;
8914 }
8915
a3170dc6
AH
8916 d = (struct builtin_description *) bdesc_2arg_spe;
8917 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8918 if (d->code == fcode)
5039610b 8919 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8920
8921 d = (struct builtin_description *) bdesc_spe_predicates;
8922 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8923 if (d->code == fcode)
5039610b 8924 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8925
8926 d = (struct builtin_description *) bdesc_spe_evsel;
8927 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8928 if (d->code == fcode)
5039610b 8929 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8930
8931 switch (fcode)
8932 {
8933 case SPE_BUILTIN_EVSTDDX:
5039610b 8934 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8935 case SPE_BUILTIN_EVSTDHX:
5039610b 8936 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8937 case SPE_BUILTIN_EVSTDWX:
5039610b 8938 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8939 case SPE_BUILTIN_EVSTWHEX:
5039610b 8940 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8941 case SPE_BUILTIN_EVSTWHOX:
5039610b 8942 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8943 case SPE_BUILTIN_EVSTWWEX:
5039610b 8944 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8945 case SPE_BUILTIN_EVSTWWOX:
5039610b 8946 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8947 case SPE_BUILTIN_EVSTDD:
5039610b 8948 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8949 case SPE_BUILTIN_EVSTDH:
5039610b 8950 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8951 case SPE_BUILTIN_EVSTDW:
5039610b 8952 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8953 case SPE_BUILTIN_EVSTWHE:
5039610b 8954 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8955 case SPE_BUILTIN_EVSTWHO:
5039610b 8956 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8957 case SPE_BUILTIN_EVSTWWE:
5039610b 8958 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8959 case SPE_BUILTIN_EVSTWWO:
5039610b 8960 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8961 case SPE_BUILTIN_MFSPEFSCR:
8962 icode = CODE_FOR_spe_mfspefscr;
8963 tmode = insn_data[icode].operand[0].mode;
8964
8965 if (target == 0
8966 || GET_MODE (target) != tmode
8967 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8968 target = gen_reg_rtx (tmode);
f676971a 8969
a3170dc6
AH
8970 pat = GEN_FCN (icode) (target);
8971 if (! pat)
8972 return 0;
8973 emit_insn (pat);
8974 return target;
8975 case SPE_BUILTIN_MTSPEFSCR:
8976 icode = CODE_FOR_spe_mtspefscr;
5039610b 8977 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8978 op0 = expand_normal (arg0);
a3170dc6
AH
8979 mode0 = insn_data[icode].operand[0].mode;
8980
8981 if (arg0 == error_mark_node)
8982 return const0_rtx;
8983
8984 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8985 op0 = copy_to_mode_reg (mode0, op0);
8986
8987 pat = GEN_FCN (icode) (op0);
8988 if (pat)
8989 emit_insn (pat);
8990 return NULL_RTX;
8991 default:
8992 break;
8993 }
8994
8995 *expandedp = false;
8996 return NULL_RTX;
8997}
8998
96038623
DE
8999static rtx
9000paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
9001{
9002 rtx pat, scratch, tmp;
9003 tree form = CALL_EXPR_ARG (exp, 0);
9004 tree arg0 = CALL_EXPR_ARG (exp, 1);
9005 tree arg1 = CALL_EXPR_ARG (exp, 2);
9006 rtx op0 = expand_normal (arg0);
9007 rtx op1 = expand_normal (arg1);
9008 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9009 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9010 int form_int;
9011 enum rtx_code code;
9012
9013 if (TREE_CODE (form) != INTEGER_CST)
9014 {
9015 error ("argument 1 of __builtin_paired_predicate must be a constant");
9016 return const0_rtx;
9017 }
9018 else
9019 form_int = TREE_INT_CST_LOW (form);
9020
9021 gcc_assert (mode0 == mode1);
9022
9023 if (arg0 == error_mark_node || arg1 == error_mark_node)
9024 return const0_rtx;
9025
9026 if (target == 0
9027 || GET_MODE (target) != SImode
9028 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
9029 target = gen_reg_rtx (SImode);
9030 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
9031 op0 = copy_to_mode_reg (mode0, op0);
9032 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
9033 op1 = copy_to_mode_reg (mode1, op1);
9034
9035 scratch = gen_reg_rtx (CCFPmode);
9036
9037 pat = GEN_FCN (icode) (scratch, op0, op1);
9038 if (!pat)
9039 return const0_rtx;
9040
9041 emit_insn (pat);
9042
9043 switch (form_int)
9044 {
9045 /* LT bit. */
9046 case 0:
9047 code = LT;
9048 break;
9049 /* GT bit. */
9050 case 1:
9051 code = GT;
9052 break;
9053 /* EQ bit. */
9054 case 2:
9055 code = EQ;
9056 break;
9057 /* UN bit. */
9058 case 3:
9059 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9060 return target;
9061 default:
9062 error ("argument 1 of __builtin_paired_predicate is out of range");
9063 return const0_rtx;
9064 }
9065
9066 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9067 emit_move_insn (target, tmp);
9068 return target;
9069}
9070
a3170dc6 9071static rtx
5039610b 9072spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9073{
9074 rtx pat, scratch, tmp;
5039610b
SL
9075 tree form = CALL_EXPR_ARG (exp, 0);
9076 tree arg0 = CALL_EXPR_ARG (exp, 1);
9077 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
9078 rtx op0 = expand_normal (arg0);
9079 rtx op1 = expand_normal (arg1);
a3170dc6
AH
9080 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9081 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9082 int form_int;
9083 enum rtx_code code;
9084
9085 if (TREE_CODE (form) != INTEGER_CST)
9086 {
9087 error ("argument 1 of __builtin_spe_predicate must be a constant");
9088 return const0_rtx;
9089 }
9090 else
9091 form_int = TREE_INT_CST_LOW (form);
9092
37409796 9093 gcc_assert (mode0 == mode1);
a3170dc6
AH
9094
9095 if (arg0 == error_mark_node || arg1 == error_mark_node)
9096 return const0_rtx;
9097
9098 if (target == 0
9099 || GET_MODE (target) != SImode
9100 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
9101 target = gen_reg_rtx (SImode);
9102
9103 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9104 op0 = copy_to_mode_reg (mode0, op0);
9105 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9106 op1 = copy_to_mode_reg (mode1, op1);
9107
9108 scratch = gen_reg_rtx (CCmode);
9109
9110 pat = GEN_FCN (icode) (scratch, op0, op1);
9111 if (! pat)
9112 return const0_rtx;
9113 emit_insn (pat);
9114
9115 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
9116 _lower_. We use one compare, but look in different bits of the
9117 CR for each variant.
9118
9119 There are 2 elements in each SPE simd type (upper/lower). The CR
9120 bits are set as follows:
9121
9122 BIT0 | BIT 1 | BIT 2 | BIT 3
9123 U | L | (U | L) | (U & L)
9124
9125 So, for an "all" relationship, BIT 3 would be set.
9126 For an "any" relationship, BIT 2 would be set. Etc.
9127
9128 Following traditional nomenclature, these bits map to:
9129
9130 BIT0 | BIT 1 | BIT 2 | BIT 3
9131 LT | GT | EQ | OV
9132
9133 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9134 */
9135
9136 switch (form_int)
9137 {
9138 /* All variant. OV bit. */
9139 case 0:
9140 /* We need to get to the OV bit, which is the ORDERED bit. We
9141 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 9142 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
9143 So let's just use another pattern. */
9144 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9145 return target;
9146 /* Any variant. EQ bit. */
9147 case 1:
9148 code = EQ;
9149 break;
9150 /* Upper variant. LT bit. */
9151 case 2:
9152 code = LT;
9153 break;
9154 /* Lower variant. GT bit. */
9155 case 3:
9156 code = GT;
9157 break;
9158 default:
9159 error ("argument 1 of __builtin_spe_predicate is out of range");
9160 return const0_rtx;
9161 }
9162
9163 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9164 emit_move_insn (target, tmp);
9165
9166 return target;
9167}
9168
9169/* The evsel builtins look like this:
9170
9171 e = __builtin_spe_evsel_OP (a, b, c, d);
9172
9173 and work like this:
9174
9175 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9176 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9177*/
9178
9179static rtx
5039610b 9180spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9181{
9182 rtx pat, scratch;
5039610b
SL
9183 tree arg0 = CALL_EXPR_ARG (exp, 0);
9184 tree arg1 = CALL_EXPR_ARG (exp, 1);
9185 tree arg2 = CALL_EXPR_ARG (exp, 2);
9186 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
9187 rtx op0 = expand_normal (arg0);
9188 rtx op1 = expand_normal (arg1);
9189 rtx op2 = expand_normal (arg2);
9190 rtx op3 = expand_normal (arg3);
a3170dc6
AH
9191 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9192 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9193
37409796 9194 gcc_assert (mode0 == mode1);
a3170dc6
AH
9195
9196 if (arg0 == error_mark_node || arg1 == error_mark_node
9197 || arg2 == error_mark_node || arg3 == error_mark_node)
9198 return const0_rtx;
9199
9200 if (target == 0
9201 || GET_MODE (target) != mode0
9202 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9203 target = gen_reg_rtx (mode0);
9204
9205 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9206 op0 = copy_to_mode_reg (mode0, op0);
9207 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9208 op1 = copy_to_mode_reg (mode0, op1);
9209 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9210 op2 = copy_to_mode_reg (mode0, op2);
9211 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9212 op3 = copy_to_mode_reg (mode0, op3);
9213
9214 /* Generate the compare. */
9215 scratch = gen_reg_rtx (CCmode);
9216 pat = GEN_FCN (icode) (scratch, op0, op1);
9217 if (! pat)
9218 return const0_rtx;
9219 emit_insn (pat);
9220
9221 if (mode0 == V2SImode)
9222 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9223 else
9224 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9225
9226 return target;
9227}
9228
0ac081f6
AH
9229/* Expand an expression EXP that calls a built-in function,
9230 with result going to TARGET if that's convenient
9231 (and in mode MODE if that's convenient).
9232 SUBTARGET may be used as the target for computing one of EXP's operands.
9233 IGNORE is nonzero if the value is to be ignored. */
9234
9235static rtx
a2369ed3 9236rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9237 enum machine_mode mode ATTRIBUTE_UNUSED,
9238 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9239{
5039610b 9240 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9241 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9242 const struct builtin_description *d;
92898235
AH
9243 size_t i;
9244 rtx ret;
9245 bool success;
f676971a 9246
9c78b944
DE
9247 if (fcode == RS6000_BUILTIN_RECIP)
9248 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9249
9250 if (fcode == RS6000_BUILTIN_RECIPF)
9251 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9252
9253 if (fcode == RS6000_BUILTIN_RSQRTF)
9254 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9255
7ccf35ed
DN
9256 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9257 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9258 {
9259 int icode = (int) CODE_FOR_altivec_lvsr;
9260 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9261 enum machine_mode mode = insn_data[icode].operand[1].mode;
9262 tree arg;
9263 rtx op, addr, pat;
9264
37409796 9265 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9266
5039610b 9267 arg = CALL_EXPR_ARG (exp, 0);
37409796 9268 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9269 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9270 addr = memory_address (mode, op);
9271 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9272 op = addr;
9273 else
9274 {
9275 /* For the load case need to negate the address. */
9276 op = gen_reg_rtx (GET_MODE (addr));
9277 emit_insn (gen_rtx_SET (VOIDmode, op,
9278 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9279 }
7ccf35ed
DN
9280 op = gen_rtx_MEM (mode, op);
9281
9282 if (target == 0
9283 || GET_MODE (target) != tmode
9284 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9285 target = gen_reg_rtx (tmode);
9286
9287 /*pat = gen_altivec_lvsr (target, op);*/
9288 pat = GEN_FCN (icode) (target, op);
9289 if (!pat)
9290 return 0;
9291 emit_insn (pat);
9292
9293 return target;
9294 }
5039610b
SL
9295
9296 /* FIXME: There's got to be a nicer way to handle this case than
9297 constructing a new CALL_EXPR. */
f57d17f1 9298 if (fcode == ALTIVEC_BUILTIN_VCFUX
7910ae0c
DN
9299 || fcode == ALTIVEC_BUILTIN_VCFSX
9300 || fcode == ALTIVEC_BUILTIN_VCTUXS
9301 || fcode == ALTIVEC_BUILTIN_VCTSXS)
f57d17f1 9302 {
5039610b
SL
9303 if (call_expr_nargs (exp) == 1)
9304 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9305 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9306 }
7ccf35ed 9307
0ac081f6 9308 if (TARGET_ALTIVEC)
92898235
AH
9309 {
9310 ret = altivec_expand_builtin (exp, target, &success);
9311
a3170dc6
AH
9312 if (success)
9313 return ret;
9314 }
9315 if (TARGET_SPE)
9316 {
9317 ret = spe_expand_builtin (exp, target, &success);
9318
92898235
AH
9319 if (success)
9320 return ret;
9321 }
96038623
DE
9322 if (TARGET_PAIRED_FLOAT)
9323 {
9324 ret = paired_expand_builtin (exp, target, &success);
9325
9326 if (success)
9327 return ret;
9328 }
92898235 9329
96038623 9330 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9331
37409796
NS
9332 /* Handle simple unary operations. */
9333 d = (struct builtin_description *) bdesc_1arg;
9334 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9335 if (d->code == fcode)
5039610b 9336 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9337
37409796
NS
9338 /* Handle simple binary operations. */
9339 d = (struct builtin_description *) bdesc_2arg;
9340 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9341 if (d->code == fcode)
5039610b 9342 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9343
37409796 9344 /* Handle simple ternary operations. */
586de218 9345 d = bdesc_3arg;
37409796
NS
9346 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9347 if (d->code == fcode)
5039610b 9348 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9349
37409796 9350 gcc_unreachable ();
0ac081f6
AH
9351}
9352
9353static void
863d938c 9354rs6000_init_builtins (void)
0ac081f6 9355{
5afaa917
NS
9356 tree tdecl;
9357
4a5eab38
PB
9358 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9359 V2SF_type_node = build_vector_type (float_type_node, 2);
9360 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9361 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9362 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9363 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9364 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9365
9366 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9367 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9368 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9369
7c62e993
PB
9370 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9371 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9372 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
b6fc2cdb 9373 opaque_V4SI_type_node = build_opaque_vector_type (intSI_type_node, 4);
3fdaa45a 9374
8bb418a3
ZL
9375 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9376 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9377 'vector unsigned short'. */
9378
8dd16ecc
NS
9379 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9380 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9381 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9382 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9383
58646b77
PB
9384 long_integer_type_internal_node = long_integer_type_node;
9385 long_unsigned_type_internal_node = long_unsigned_type_node;
9386 intQI_type_internal_node = intQI_type_node;
9387 uintQI_type_internal_node = unsigned_intQI_type_node;
9388 intHI_type_internal_node = intHI_type_node;
9389 uintHI_type_internal_node = unsigned_intHI_type_node;
9390 intSI_type_internal_node = intSI_type_node;
9391 uintSI_type_internal_node = unsigned_intSI_type_node;
9392 float_type_internal_node = float_type_node;
9393 void_type_internal_node = void_type_node;
9394
71856685
AH
9395 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9396 get_identifier ("__bool char"),
5afaa917
NS
9397 bool_char_type_node);
9398 TYPE_NAME (bool_char_type_node) = tdecl;
9399 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9400 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9401 get_identifier ("__bool short"),
5afaa917
NS
9402 bool_short_type_node);
9403 TYPE_NAME (bool_short_type_node) = tdecl;
9404 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9405 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9406 get_identifier ("__bool int"),
5afaa917
NS
9407 bool_int_type_node);
9408 TYPE_NAME (bool_int_type_node) = tdecl;
9409 (*lang_hooks.decls.pushdecl) (tdecl);
71856685 9410 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("__pixel"),
5afaa917
NS
9411 pixel_type_node);
9412 TYPE_NAME (pixel_type_node) = tdecl;
9413 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9414
4a5eab38
PB
9415 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9416 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9417 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9418 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3 9419
71856685
AH
9420 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9421 get_identifier ("__vector unsigned char"),
5afaa917
NS
9422 unsigned_V16QI_type_node);
9423 TYPE_NAME (unsigned_V16QI_type_node) = tdecl;
9424 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9425 tdecl = build_decl (BUILTINS_LOCATION,
9426 TYPE_DECL, get_identifier ("__vector signed char"),
5afaa917
NS
9427 V16QI_type_node);
9428 TYPE_NAME (V16QI_type_node) = tdecl;
9429 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9430 tdecl = build_decl (BUILTINS_LOCATION,
9431 TYPE_DECL, get_identifier ("__vector __bool char"),
5afaa917
NS
9432 bool_V16QI_type_node);
9433 TYPE_NAME ( bool_V16QI_type_node) = tdecl;
9434 (*lang_hooks.decls.pushdecl) (tdecl);
9435
71856685
AH
9436 tdecl = build_decl (BUILTINS_LOCATION,
9437 TYPE_DECL, get_identifier ("__vector unsigned short"),
5afaa917
NS
9438 unsigned_V8HI_type_node);
9439 TYPE_NAME (unsigned_V8HI_type_node) = tdecl;
9440 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9441 tdecl = build_decl (BUILTINS_LOCATION,
9442 TYPE_DECL, get_identifier ("__vector signed short"),
5afaa917
NS
9443 V8HI_type_node);
9444 TYPE_NAME (V8HI_type_node) = tdecl;
9445 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9446 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9447 get_identifier ("__vector __bool short"),
5afaa917
NS
9448 bool_V8HI_type_node);
9449 TYPE_NAME (bool_V8HI_type_node) = tdecl;
9450 (*lang_hooks.decls.pushdecl) (tdecl);
9451
71856685
AH
9452 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9453 get_identifier ("__vector unsigned int"),
5afaa917
NS
9454 unsigned_V4SI_type_node);
9455 TYPE_NAME (unsigned_V4SI_type_node) = tdecl;
9456 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9457 tdecl = build_decl (BUILTINS_LOCATION,
9458 TYPE_DECL, get_identifier ("__vector signed int"),
5afaa917
NS
9459 V4SI_type_node);
9460 TYPE_NAME (V4SI_type_node) = tdecl;
9461 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9462 tdecl = build_decl (BUILTINS_LOCATION,
9463 TYPE_DECL, get_identifier ("__vector __bool int"),
5afaa917
NS
9464 bool_V4SI_type_node);
9465 TYPE_NAME (bool_V4SI_type_node) = tdecl;
9466 (*lang_hooks.decls.pushdecl) (tdecl);
9467
71856685
AH
9468 tdecl = build_decl (BUILTINS_LOCATION,
9469 TYPE_DECL, get_identifier ("__vector float"),
5afaa917
NS
9470 V4SF_type_node);
9471 TYPE_NAME (V4SF_type_node) = tdecl;
9472 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9473 tdecl = build_decl (BUILTINS_LOCATION,
9474 TYPE_DECL, get_identifier ("__vector __pixel"),
5afaa917
NS
9475 pixel_V8HI_type_node);
9476 TYPE_NAME (pixel_V8HI_type_node) = tdecl;
9477 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9478
96038623
DE
9479 if (TARGET_PAIRED_FLOAT)
9480 paired_init_builtins ();
a3170dc6 9481 if (TARGET_SPE)
3fdaa45a 9482 spe_init_builtins ();
0ac081f6
AH
9483 if (TARGET_ALTIVEC)
9484 altivec_init_builtins ();
96038623 9485 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9486 rs6000_common_init_builtins ();
9c78b944
DE
9487 if (TARGET_PPC_GFXOPT)
9488 {
9489 tree ftype = build_function_type_list (float_type_node,
9490 float_type_node,
9491 float_type_node,
9492 NULL_TREE);
9493 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9494 RS6000_BUILTIN_RECIPF);
9495
9496 ftype = build_function_type_list (float_type_node,
9497 float_type_node,
9498 NULL_TREE);
9499 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9500 RS6000_BUILTIN_RSQRTF);
9501 }
9502 if (TARGET_POPCNTB)
9503 {
9504 tree ftype = build_function_type_list (double_type_node,
9505 double_type_node,
9506 double_type_node,
9507 NULL_TREE);
9508 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9509 RS6000_BUILTIN_RECIP);
9510
9511 }
69ca3549
DE
9512
9513#if TARGET_XCOFF
9514 /* AIX libm provides clog as __clog. */
9515 if (built_in_decls [BUILT_IN_CLOG])
9516 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9517#endif
fb220235
FXC
9518
9519#ifdef SUBTARGET_INIT_BUILTINS
9520 SUBTARGET_INIT_BUILTINS;
9521#endif
0ac081f6
AH
9522}
9523
a3170dc6
AH
9524/* Search through a set of builtins and enable the mask bits.
9525 DESC is an array of builtins.
b6d08ca1 9526 SIZE is the total number of builtins.
a3170dc6
AH
9527 START is the builtin enum at which to start.
9528 END is the builtin enum at which to end. */
0ac081f6 9529static void
a2369ed3 9530enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9531 enum rs6000_builtins start,
a2369ed3 9532 enum rs6000_builtins end)
a3170dc6
AH
9533{
9534 int i;
9535
9536 for (i = 0; i < size; ++i)
9537 if (desc[i].code == start)
9538 break;
9539
9540 if (i == size)
9541 return;
9542
9543 for (; i < size; ++i)
9544 {
9545 /* Flip all the bits on. */
9546 desc[i].mask = target_flags;
9547 if (desc[i].code == end)
9548 break;
9549 }
9550}
9551
9552static void
863d938c 9553spe_init_builtins (void)
0ac081f6 9554{
a3170dc6
AH
9555 tree endlink = void_list_node;
9556 tree puint_type_node = build_pointer_type (unsigned_type_node);
9557 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9558 struct builtin_description *d;
0ac081f6
AH
9559 size_t i;
9560
a3170dc6
AH
9561 tree v2si_ftype_4_v2si
9562 = build_function_type
3fdaa45a
AH
9563 (opaque_V2SI_type_node,
9564 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9565 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9566 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9567 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9568 endlink)))));
9569
9570 tree v2sf_ftype_4_v2sf
9571 = build_function_type
3fdaa45a
AH
9572 (opaque_V2SF_type_node,
9573 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9574 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9575 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9576 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9577 endlink)))));
9578
9579 tree int_ftype_int_v2si_v2si
9580 = build_function_type
9581 (integer_type_node,
9582 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9583 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9584 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9585 endlink))));
9586
9587 tree int_ftype_int_v2sf_v2sf
9588 = build_function_type
9589 (integer_type_node,
9590 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9591 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9592 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9593 endlink))));
9594
9595 tree void_ftype_v2si_puint_int
9596 = build_function_type (void_type_node,
3fdaa45a 9597 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9598 tree_cons (NULL_TREE, puint_type_node,
9599 tree_cons (NULL_TREE,
9600 integer_type_node,
9601 endlink))));
9602
9603 tree void_ftype_v2si_puint_char
9604 = build_function_type (void_type_node,
3fdaa45a 9605 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9606 tree_cons (NULL_TREE, puint_type_node,
9607 tree_cons (NULL_TREE,
9608 char_type_node,
9609 endlink))));
9610
9611 tree void_ftype_v2si_pv2si_int
9612 = build_function_type (void_type_node,
3fdaa45a 9613 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9614 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9615 tree_cons (NULL_TREE,
9616 integer_type_node,
9617 endlink))));
9618
9619 tree void_ftype_v2si_pv2si_char
9620 = build_function_type (void_type_node,
3fdaa45a 9621 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9622 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9623 tree_cons (NULL_TREE,
9624 char_type_node,
9625 endlink))));
9626
9627 tree void_ftype_int
9628 = build_function_type (void_type_node,
9629 tree_cons (NULL_TREE, integer_type_node, endlink));
9630
9631 tree int_ftype_void
36e8d515 9632 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9633
9634 tree v2si_ftype_pv2si_int
3fdaa45a 9635 = build_function_type (opaque_V2SI_type_node,
6035d635 9636 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9637 tree_cons (NULL_TREE, integer_type_node,
9638 endlink)));
9639
9640 tree v2si_ftype_puint_int
3fdaa45a 9641 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9642 tree_cons (NULL_TREE, puint_type_node,
9643 tree_cons (NULL_TREE, integer_type_node,
9644 endlink)));
9645
9646 tree v2si_ftype_pushort_int
3fdaa45a 9647 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9648 tree_cons (NULL_TREE, pushort_type_node,
9649 tree_cons (NULL_TREE, integer_type_node,
9650 endlink)));
9651
00332c9f
AH
9652 tree v2si_ftype_signed_char
9653 = build_function_type (opaque_V2SI_type_node,
9654 tree_cons (NULL_TREE, signed_char_type_node,
9655 endlink));
9656
a3170dc6
AH
9657 /* The initialization of the simple binary and unary builtins is
9658 done in rs6000_common_init_builtins, but we have to enable the
9659 mask bits here manually because we have run out of `target_flags'
9660 bits. We really need to redesign this mask business. */
9661
9662 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9663 ARRAY_SIZE (bdesc_2arg),
9664 SPE_BUILTIN_EVADDW,
9665 SPE_BUILTIN_EVXOR);
9666 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9667 ARRAY_SIZE (bdesc_1arg),
9668 SPE_BUILTIN_EVABS,
9669 SPE_BUILTIN_EVSUBFUSIAAW);
9670 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9671 ARRAY_SIZE (bdesc_spe_predicates),
9672 SPE_BUILTIN_EVCMPEQ,
9673 SPE_BUILTIN_EVFSTSTLT);
9674 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9675 ARRAY_SIZE (bdesc_spe_evsel),
9676 SPE_BUILTIN_EVSEL_CMPGTS,
9677 SPE_BUILTIN_EVSEL_FSTSTEQ);
9678
36252949 9679 (*lang_hooks.decls.pushdecl)
71856685
AH
9680 (build_decl (BUILTINS_LOCATION, TYPE_DECL,
9681 get_identifier ("__ev64_opaque__"),
36252949
AH
9682 opaque_V2SI_type_node));
9683
a3170dc6 9684 /* Initialize irregular SPE builtins. */
f676971a 9685
a3170dc6
AH
9686 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9687 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9688 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9689 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9690 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9691 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9692 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9693 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9694 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9695 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9696 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9697 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9698 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9699 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9700 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9701 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9702 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9703 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9704
9705 /* Loads. */
9706 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9707 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9708 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9709 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9710 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9711 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9712 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9713 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9714 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9715 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9716 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9717 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9718 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9719 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9720 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9721 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9722 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9723 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9724 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9725 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9726 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9727 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9728
9729 /* Predicates. */
9730 d = (struct builtin_description *) bdesc_spe_predicates;
9731 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9732 {
9733 tree type;
9734
9735 switch (insn_data[d->icode].operand[1].mode)
9736 {
9737 case V2SImode:
9738 type = int_ftype_int_v2si_v2si;
9739 break;
9740 case V2SFmode:
9741 type = int_ftype_int_v2sf_v2sf;
9742 break;
9743 default:
37409796 9744 gcc_unreachable ();
a3170dc6
AH
9745 }
9746
9747 def_builtin (d->mask, d->name, type, d->code);
9748 }
9749
9750 /* Evsel predicates. */
9751 d = (struct builtin_description *) bdesc_spe_evsel;
9752 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9753 {
9754 tree type;
9755
9756 switch (insn_data[d->icode].operand[1].mode)
9757 {
9758 case V2SImode:
9759 type = v2si_ftype_4_v2si;
9760 break;
9761 case V2SFmode:
9762 type = v2sf_ftype_4_v2sf;
9763 break;
9764 default:
37409796 9765 gcc_unreachable ();
a3170dc6
AH
9766 }
9767
9768 def_builtin (d->mask, d->name, type, d->code);
9769 }
9770}
9771
96038623
DE
9772static void
9773paired_init_builtins (void)
9774{
23a651fc 9775 const struct builtin_description *d;
96038623
DE
9776 size_t i;
9777 tree endlink = void_list_node;
9778
9779 tree int_ftype_int_v2sf_v2sf
9780 = build_function_type
9781 (integer_type_node,
9782 tree_cons (NULL_TREE, integer_type_node,
9783 tree_cons (NULL_TREE, V2SF_type_node,
9784 tree_cons (NULL_TREE, V2SF_type_node,
9785 endlink))));
9786 tree pcfloat_type_node =
9787 build_pointer_type (build_qualified_type
9788 (float_type_node, TYPE_QUAL_CONST));
9789
9790 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9791 long_integer_type_node,
9792 pcfloat_type_node,
9793 NULL_TREE);
9794 tree void_ftype_v2sf_long_pcfloat =
9795 build_function_type_list (void_type_node,
9796 V2SF_type_node,
9797 long_integer_type_node,
9798 pcfloat_type_node,
9799 NULL_TREE);
9800
9801
9802 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9803 PAIRED_BUILTIN_LX);
9804
9805
9806 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9807 PAIRED_BUILTIN_STX);
9808
9809 /* Predicates. */
23a651fc 9810 d = bdesc_paired_preds;
96038623
DE
9811 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9812 {
9813 tree type;
9814
9815 switch (insn_data[d->icode].operand[1].mode)
9816 {
9817 case V2SFmode:
9818 type = int_ftype_int_v2sf_v2sf;
9819 break;
9820 default:
9821 gcc_unreachable ();
9822 }
9823
9824 def_builtin (d->mask, d->name, type, d->code);
9825 }
9826}
9827
a3170dc6 9828static void
863d938c 9829altivec_init_builtins (void)
a3170dc6 9830{
586de218
KG
9831 const struct builtin_description *d;
9832 const struct builtin_description_predicates *dp;
a3170dc6 9833 size_t i;
7a4eca66
DE
9834 tree ftype;
9835
a3170dc6
AH
9836 tree pfloat_type_node = build_pointer_type (float_type_node);
9837 tree pint_type_node = build_pointer_type (integer_type_node);
9838 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9839 tree pchar_type_node = build_pointer_type (char_type_node);
9840
9841 tree pvoid_type_node = build_pointer_type (void_type_node);
9842
0dbc3651
ZW
9843 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9844 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9845 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9846 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9847
9848 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9849
58646b77
PB
9850 tree int_ftype_opaque
9851 = build_function_type_list (integer_type_node,
9852 opaque_V4SI_type_node, NULL_TREE);
266b4890
AP
9853 tree opaque_ftype_opaque
9854 = build_function_type (integer_type_node,
9855 NULL_TREE);
58646b77
PB
9856 tree opaque_ftype_opaque_int
9857 = build_function_type_list (opaque_V4SI_type_node,
9858 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9859 tree opaque_ftype_opaque_opaque_int
9860 = build_function_type_list (opaque_V4SI_type_node,
9861 opaque_V4SI_type_node, opaque_V4SI_type_node,
9862 integer_type_node, NULL_TREE);
9863 tree int_ftype_int_opaque_opaque
9864 = build_function_type_list (integer_type_node,
9865 integer_type_node, opaque_V4SI_type_node,
9866 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9867 tree int_ftype_int_v4si_v4si
9868 = build_function_type_list (integer_type_node,
9869 integer_type_node, V4SI_type_node,
9870 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9871 tree v4sf_ftype_pcfloat
9872 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9873 tree void_ftype_pfloat_v4sf
b4de2f7d 9874 = build_function_type_list (void_type_node,
a3170dc6 9875 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9876 tree v4si_ftype_pcint
9877 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9878 tree void_ftype_pint_v4si
b4de2f7d
AH
9879 = build_function_type_list (void_type_node,
9880 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9881 tree v8hi_ftype_pcshort
9882 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9883 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9884 = build_function_type_list (void_type_node,
9885 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9886 tree v16qi_ftype_pcchar
9887 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9888 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9889 = build_function_type_list (void_type_node,
9890 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9891 tree void_ftype_v4si
b4de2f7d 9892 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9893 tree v8hi_ftype_void
9894 = build_function_type (V8HI_type_node, void_list_node);
9895 tree void_ftype_void
9896 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9897 tree void_ftype_int
9898 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9899
58646b77
PB
9900 tree opaque_ftype_long_pcvoid
9901 = build_function_type_list (opaque_V4SI_type_node,
9902 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9903 tree v16qi_ftype_long_pcvoid
a3170dc6 9904 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9905 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9906 tree v8hi_ftype_long_pcvoid
a3170dc6 9907 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9908 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9909 tree v4si_ftype_long_pcvoid
a3170dc6 9910 = build_function_type_list (V4SI_type_node,
b4a62fa0 9911 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9912
58646b77
PB
9913 tree void_ftype_opaque_long_pvoid
9914 = build_function_type_list (void_type_node,
9915 opaque_V4SI_type_node, long_integer_type_node,
9916 pvoid_type_node, NULL_TREE);
b4a62fa0 9917 tree void_ftype_v4si_long_pvoid
b4de2f7d 9918 = build_function_type_list (void_type_node,
b4a62fa0 9919 V4SI_type_node, long_integer_type_node,
b4de2f7d 9920 pvoid_type_node, NULL_TREE);
b4a62fa0 9921 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9922 = build_function_type_list (void_type_node,
b4a62fa0 9923 V16QI_type_node, long_integer_type_node,
b4de2f7d 9924 pvoid_type_node, NULL_TREE);
b4a62fa0 9925 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9926 = build_function_type_list (void_type_node,
b4a62fa0 9927 V8HI_type_node, long_integer_type_node,
b4de2f7d 9928 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9929 tree int_ftype_int_v8hi_v8hi
9930 = build_function_type_list (integer_type_node,
9931 integer_type_node, V8HI_type_node,
9932 V8HI_type_node, NULL_TREE);
9933 tree int_ftype_int_v16qi_v16qi
9934 = build_function_type_list (integer_type_node,
9935 integer_type_node, V16QI_type_node,
9936 V16QI_type_node, NULL_TREE);
9937 tree int_ftype_int_v4sf_v4sf
9938 = build_function_type_list (integer_type_node,
9939 integer_type_node, V4SF_type_node,
9940 V4SF_type_node, NULL_TREE);
9941 tree v4si_ftype_v4si
9942 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9943 tree v8hi_ftype_v8hi
9944 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9945 tree v16qi_ftype_v16qi
9946 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9947 tree v4sf_ftype_v4sf
9948 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9949 tree void_ftype_pcvoid_int_int
a3170dc6 9950 = build_function_type_list (void_type_node,
0dbc3651 9951 pcvoid_type_node, integer_type_node,
8bb418a3 9952 integer_type_node, NULL_TREE);
8bb418a3 9953
0dbc3651
ZW
9954 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9955 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9956 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9957 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9958 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9959 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9960 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9961 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9962 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9963 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9964 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9965 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9966 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9967 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9968 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9969 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9970 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9971 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9972 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9973 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9974 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9975 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9976 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9977 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9978 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9979 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9980 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9981 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9982 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9983 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9984 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9985 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9986 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9987 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9988 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9989 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9990 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9991 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9992 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9993 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9994 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9995 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9996 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9997 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9998 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9999 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
10000
0b61703c
AP
10001 if (rs6000_cpu == PROCESSOR_CELL)
10002 {
10003 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
10004 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
10005 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
10006 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRXL);
10007
10008 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLX);
10009 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLXL);
10010 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRX);
10011 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRXL);
10012
10013 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLX);
10014 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLXL);
10015 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRX);
10016 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRXL);
10017
10018 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLX);
10019 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLXL);
10020 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRX);
10021 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRXL);
10022 }
58646b77 10023 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
266b4890
AP
10024 def_builtin (MASK_ALTIVEC, "__builtin_vec_splats", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_SPLATS);
10025 def_builtin (MASK_ALTIVEC, "__builtin_vec_promote", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_PROMOTE);
58646b77
PB
10026
10027 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
10028 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
266b4890
AP
10029 def_builtin (MASK_ALTIVEC, "__builtin_vec_extract", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_EXTRACT);
10030 def_builtin (MASK_ALTIVEC, "__builtin_vec_insert", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_INSERT);
58646b77
PB
10031 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
10032 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
10033 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
10034 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
10035 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
10036 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
10037 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
10038 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 10039
a3170dc6 10040 /* Add the DST variants. */
586de218 10041 d = bdesc_dst;
a3170dc6 10042 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 10043 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
10044
10045 /* Initialize the predicates. */
586de218 10046 dp = bdesc_altivec_preds;
a3170dc6
AH
10047 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
10048 {
10049 enum machine_mode mode1;
10050 tree type;
58646b77
PB
10051 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10052 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 10053
58646b77
PB
10054 if (is_overloaded)
10055 mode1 = VOIDmode;
10056 else
10057 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
10058
10059 switch (mode1)
10060 {
58646b77
PB
10061 case VOIDmode:
10062 type = int_ftype_int_opaque_opaque;
10063 break;
a3170dc6
AH
10064 case V4SImode:
10065 type = int_ftype_int_v4si_v4si;
10066 break;
10067 case V8HImode:
10068 type = int_ftype_int_v8hi_v8hi;
10069 break;
10070 case V16QImode:
10071 type = int_ftype_int_v16qi_v16qi;
10072 break;
10073 case V4SFmode:
10074 type = int_ftype_int_v4sf_v4sf;
10075 break;
10076 default:
37409796 10077 gcc_unreachable ();
a3170dc6 10078 }
f676971a 10079
a3170dc6
AH
10080 def_builtin (dp->mask, dp->name, type, dp->code);
10081 }
10082
10083 /* Initialize the abs* operators. */
586de218 10084 d = bdesc_abs;
a3170dc6
AH
10085 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
10086 {
10087 enum machine_mode mode0;
10088 tree type;
10089
10090 mode0 = insn_data[d->icode].operand[0].mode;
10091
10092 switch (mode0)
10093 {
10094 case V4SImode:
10095 type = v4si_ftype_v4si;
10096 break;
10097 case V8HImode:
10098 type = v8hi_ftype_v8hi;
10099 break;
10100 case V16QImode:
10101 type = v16qi_ftype_v16qi;
10102 break;
10103 case V4SFmode:
10104 type = v4sf_ftype_v4sf;
10105 break;
10106 default:
37409796 10107 gcc_unreachable ();
a3170dc6 10108 }
f676971a 10109
a3170dc6
AH
10110 def_builtin (d->mask, d->name, type, d->code);
10111 }
7ccf35ed 10112
13c62176
DN
10113 if (TARGET_ALTIVEC)
10114 {
10115 tree decl;
10116
10117 /* Initialize target builtin that implements
10118 targetm.vectorize.builtin_mask_for_load. */
10119
c79efc4d
RÁE
10120 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
10121 v16qi_ftype_long_pcvoid,
10122 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
10123 BUILT_IN_MD, NULL, NULL_TREE);
10124 TREE_READONLY (decl) = 1;
13c62176
DN
10125 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
10126 altivec_builtin_mask_for_load = decl;
13c62176 10127 }
7a4eca66
DE
10128
10129 /* Access to the vec_init patterns. */
10130 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
10131 integer_type_node, integer_type_node,
10132 integer_type_node, NULL_TREE);
10133 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
10134 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
10135
10136 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
10137 short_integer_type_node,
10138 short_integer_type_node,
10139 short_integer_type_node,
10140 short_integer_type_node,
10141 short_integer_type_node,
10142 short_integer_type_node,
10143 short_integer_type_node, NULL_TREE);
10144 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
10145 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
10146
10147 ftype = build_function_type_list (V16QI_type_node, char_type_node,
10148 char_type_node, char_type_node,
10149 char_type_node, char_type_node,
10150 char_type_node, char_type_node,
10151 char_type_node, char_type_node,
10152 char_type_node, char_type_node,
10153 char_type_node, char_type_node,
10154 char_type_node, char_type_node,
10155 char_type_node, NULL_TREE);
10156 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
10157 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
10158
10159 ftype = build_function_type_list (V4SF_type_node, float_type_node,
10160 float_type_node, float_type_node,
10161 float_type_node, NULL_TREE);
10162 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
10163 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
10164
10165 /* Access to the vec_set patterns. */
10166 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
10167 intSI_type_node,
10168 integer_type_node, NULL_TREE);
10169 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
10170 ALTIVEC_BUILTIN_VEC_SET_V4SI);
10171
10172 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10173 intHI_type_node,
10174 integer_type_node, NULL_TREE);
10175 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10176 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10177
10178 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10179 intQI_type_node,
10180 integer_type_node, NULL_TREE);
10181 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10182 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10183
10184 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10185 float_type_node,
10186 integer_type_node, NULL_TREE);
10187 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10188 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10189
10190 /* Access to the vec_extract patterns. */
10191 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10192 integer_type_node, NULL_TREE);
10193 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10194 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10195
10196 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10197 integer_type_node, NULL_TREE);
10198 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10199 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10200
10201 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10202 integer_type_node, NULL_TREE);
10203 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10204 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10205
10206 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10207 integer_type_node, NULL_TREE);
10208 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10209 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
10210}
10211
10212static void
863d938c 10213rs6000_common_init_builtins (void)
a3170dc6 10214{
586de218 10215 const struct builtin_description *d;
a3170dc6
AH
10216 size_t i;
10217
96038623
DE
10218 tree v2sf_ftype_v2sf_v2sf_v2sf
10219 = build_function_type_list (V2SF_type_node,
10220 V2SF_type_node, V2SF_type_node,
10221 V2SF_type_node, NULL_TREE);
10222
a3170dc6
AH
10223 tree v4sf_ftype_v4sf_v4sf_v16qi
10224 = build_function_type_list (V4SF_type_node,
10225 V4SF_type_node, V4SF_type_node,
10226 V16QI_type_node, NULL_TREE);
10227 tree v4si_ftype_v4si_v4si_v16qi
10228 = build_function_type_list (V4SI_type_node,
10229 V4SI_type_node, V4SI_type_node,
10230 V16QI_type_node, NULL_TREE);
10231 tree v8hi_ftype_v8hi_v8hi_v16qi
10232 = build_function_type_list (V8HI_type_node,
10233 V8HI_type_node, V8HI_type_node,
10234 V16QI_type_node, NULL_TREE);
10235 tree v16qi_ftype_v16qi_v16qi_v16qi
10236 = build_function_type_list (V16QI_type_node,
10237 V16QI_type_node, V16QI_type_node,
10238 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
10239 tree v4si_ftype_int
10240 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10241 tree v8hi_ftype_int
10242 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10243 tree v16qi_ftype_int
10244 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
10245 tree v8hi_ftype_v16qi
10246 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10247 tree v4sf_ftype_v4sf
10248 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10249
10250 tree v2si_ftype_v2si_v2si
2abe3e28
AH
10251 = build_function_type_list (opaque_V2SI_type_node,
10252 opaque_V2SI_type_node,
10253 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10254
96038623 10255 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10256 = build_function_type_list (opaque_V2SF_type_node,
10257 opaque_V2SF_type_node,
10258 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10259
96038623
DE
10260 tree v2sf_ftype_v2sf_v2sf
10261 = build_function_type_list (V2SF_type_node,
10262 V2SF_type_node,
10263 V2SF_type_node, NULL_TREE);
10264
10265
a3170dc6 10266 tree v2si_ftype_int_int
2abe3e28 10267 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10268 integer_type_node, integer_type_node,
10269 NULL_TREE);
10270
58646b77
PB
10271 tree opaque_ftype_opaque
10272 = build_function_type_list (opaque_V4SI_type_node,
10273 opaque_V4SI_type_node, NULL_TREE);
10274
a3170dc6 10275 tree v2si_ftype_v2si
2abe3e28
AH
10276 = build_function_type_list (opaque_V2SI_type_node,
10277 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10278
96038623 10279 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10280 = build_function_type_list (opaque_V2SF_type_node,
10281 opaque_V2SF_type_node, NULL_TREE);
f676971a 10282
96038623
DE
10283 tree v2sf_ftype_v2sf
10284 = build_function_type_list (V2SF_type_node,
10285 V2SF_type_node, NULL_TREE);
10286
a3170dc6 10287 tree v2sf_ftype_v2si
2abe3e28
AH
10288 = build_function_type_list (opaque_V2SF_type_node,
10289 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10290
10291 tree v2si_ftype_v2sf
2abe3e28
AH
10292 = build_function_type_list (opaque_V2SI_type_node,
10293 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10294
10295 tree v2si_ftype_v2si_char
2abe3e28
AH
10296 = build_function_type_list (opaque_V2SI_type_node,
10297 opaque_V2SI_type_node,
10298 char_type_node, NULL_TREE);
a3170dc6
AH
10299
10300 tree v2si_ftype_int_char
2abe3e28 10301 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10302 integer_type_node, char_type_node, NULL_TREE);
10303
10304 tree v2si_ftype_char
2abe3e28
AH
10305 = build_function_type_list (opaque_V2SI_type_node,
10306 char_type_node, NULL_TREE);
a3170dc6
AH
10307
10308 tree int_ftype_int_int
10309 = build_function_type_list (integer_type_node,
10310 integer_type_node, integer_type_node,
10311 NULL_TREE);
95385cbb 10312
58646b77
PB
10313 tree opaque_ftype_opaque_opaque
10314 = build_function_type_list (opaque_V4SI_type_node,
10315 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10316 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10317 = build_function_type_list (V4SI_type_node,
10318 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10319 tree v4sf_ftype_v4si_int
b4de2f7d 10320 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10321 V4SI_type_node, integer_type_node, NULL_TREE);
10322 tree v4si_ftype_v4sf_int
b4de2f7d 10323 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10324 V4SF_type_node, integer_type_node, NULL_TREE);
10325 tree v4si_ftype_v4si_int
b4de2f7d 10326 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10327 V4SI_type_node, integer_type_node, NULL_TREE);
10328 tree v8hi_ftype_v8hi_int
b4de2f7d 10329 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10330 V8HI_type_node, integer_type_node, NULL_TREE);
10331 tree v16qi_ftype_v16qi_int
b4de2f7d 10332 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10333 V16QI_type_node, integer_type_node, NULL_TREE);
10334 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10335 = build_function_type_list (V16QI_type_node,
10336 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10337 integer_type_node, NULL_TREE);
10338 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10339 = build_function_type_list (V8HI_type_node,
10340 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10341 integer_type_node, NULL_TREE);
10342 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10343 = build_function_type_list (V4SI_type_node,
10344 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10345 integer_type_node, NULL_TREE);
10346 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10347 = build_function_type_list (V4SF_type_node,
10348 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10349 integer_type_node, NULL_TREE);
0ac081f6 10350 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10351 = build_function_type_list (V4SF_type_node,
10352 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10353 tree opaque_ftype_opaque_opaque_opaque
10354 = build_function_type_list (opaque_V4SI_type_node,
10355 opaque_V4SI_type_node, opaque_V4SI_type_node,
10356 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10357 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10358 = build_function_type_list (V4SF_type_node,
10359 V4SF_type_node, V4SF_type_node,
10360 V4SI_type_node, NULL_TREE);
2212663f 10361 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10362 = build_function_type_list (V4SF_type_node,
10363 V4SF_type_node, V4SF_type_node,
10364 V4SF_type_node, NULL_TREE);
f676971a 10365 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10366 = build_function_type_list (V4SI_type_node,
10367 V4SI_type_node, V4SI_type_node,
10368 V4SI_type_node, NULL_TREE);
0ac081f6 10369 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10370 = build_function_type_list (V8HI_type_node,
10371 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10372 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10373 = build_function_type_list (V8HI_type_node,
10374 V8HI_type_node, V8HI_type_node,
10375 V8HI_type_node, NULL_TREE);
c4ad648e 10376 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10377 = build_function_type_list (V4SI_type_node,
10378 V8HI_type_node, V8HI_type_node,
10379 V4SI_type_node, NULL_TREE);
c4ad648e 10380 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10381 = build_function_type_list (V4SI_type_node,
10382 V16QI_type_node, V16QI_type_node,
10383 V4SI_type_node, NULL_TREE);
0ac081f6 10384 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10385 = build_function_type_list (V16QI_type_node,
10386 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10387 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10388 = build_function_type_list (V4SI_type_node,
10389 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10390 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10391 = build_function_type_list (V8HI_type_node,
10392 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10393 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10394 = build_function_type_list (V4SI_type_node,
10395 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10396 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10397 = build_function_type_list (V8HI_type_node,
10398 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10399 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10400 = build_function_type_list (V16QI_type_node,
10401 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10402 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10403 = build_function_type_list (V4SI_type_node,
10404 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10405 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10406 = build_function_type_list (V4SI_type_node,
10407 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10408 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10409 = build_function_type_list (V4SI_type_node,
10410 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10411 tree v4si_ftype_v8hi
10412 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10413 tree int_ftype_v4si_v4si
10414 = build_function_type_list (integer_type_node,
10415 V4SI_type_node, V4SI_type_node, NULL_TREE);
10416 tree int_ftype_v4sf_v4sf
10417 = build_function_type_list (integer_type_node,
10418 V4SF_type_node, V4SF_type_node, NULL_TREE);
10419 tree int_ftype_v16qi_v16qi
10420 = build_function_type_list (integer_type_node,
10421 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10422 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10423 = build_function_type_list (integer_type_node,
10424 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10425
6f317ef3 10426 /* Add the simple ternary operators. */
586de218 10427 d = bdesc_3arg;
ca7558fc 10428 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10429 {
2212663f
DB
10430 enum machine_mode mode0, mode1, mode2, mode3;
10431 tree type;
58646b77
PB
10432 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10433 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10434
58646b77
PB
10435 if (is_overloaded)
10436 {
10437 mode0 = VOIDmode;
10438 mode1 = VOIDmode;
10439 mode2 = VOIDmode;
10440 mode3 = VOIDmode;
10441 }
10442 else
10443 {
10444 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10445 continue;
f676971a 10446
58646b77
PB
10447 mode0 = insn_data[d->icode].operand[0].mode;
10448 mode1 = insn_data[d->icode].operand[1].mode;
10449 mode2 = insn_data[d->icode].operand[2].mode;
10450 mode3 = insn_data[d->icode].operand[3].mode;
10451 }
bb8df8a6 10452
2212663f
DB
10453 /* When all four are of the same mode. */
10454 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10455 {
10456 switch (mode0)
10457 {
58646b77
PB
10458 case VOIDmode:
10459 type = opaque_ftype_opaque_opaque_opaque;
10460 break;
617e0e1d
DB
10461 case V4SImode:
10462 type = v4si_ftype_v4si_v4si_v4si;
10463 break;
2212663f
DB
10464 case V4SFmode:
10465 type = v4sf_ftype_v4sf_v4sf_v4sf;
10466 break;
10467 case V8HImode:
10468 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10469 break;
2212663f
DB
10470 case V16QImode:
10471 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10472 break;
96038623
DE
10473 case V2SFmode:
10474 type = v2sf_ftype_v2sf_v2sf_v2sf;
10475 break;
2212663f 10476 default:
37409796 10477 gcc_unreachable ();
2212663f
DB
10478 }
10479 }
10480 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10481 {
2212663f
DB
10482 switch (mode0)
10483 {
10484 case V4SImode:
10485 type = v4si_ftype_v4si_v4si_v16qi;
10486 break;
10487 case V4SFmode:
10488 type = v4sf_ftype_v4sf_v4sf_v16qi;
10489 break;
10490 case V8HImode:
10491 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10492 break;
2212663f
DB
10493 case V16QImode:
10494 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10495 break;
2212663f 10496 default:
37409796 10497 gcc_unreachable ();
2212663f
DB
10498 }
10499 }
f676971a 10500 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10501 && mode3 == V4SImode)
24408032 10502 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10503 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10504 && mode3 == V4SImode)
24408032 10505 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10506 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10507 && mode3 == V4SImode)
24408032
AH
10508 type = v4sf_ftype_v4sf_v4sf_v4si;
10509
a7b376ee 10510 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10511 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10512 && mode3 == QImode)
b9e4e5d1 10513 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10514
a7b376ee 10515 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10516 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10517 && mode3 == QImode)
b9e4e5d1 10518 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10519
a7b376ee 10520 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10521 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10522 && mode3 == QImode)
b9e4e5d1 10523 type = v4si_ftype_v4si_v4si_int;
24408032 10524
a7b376ee 10525 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10526 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10527 && mode3 == QImode)
b9e4e5d1 10528 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10529
2212663f 10530 else
37409796 10531 gcc_unreachable ();
2212663f
DB
10532
10533 def_builtin (d->mask, d->name, type, d->code);
10534 }
10535
0ac081f6 10536 /* Add the simple binary operators. */
00b960c7 10537 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10538 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10539 {
10540 enum machine_mode mode0, mode1, mode2;
10541 tree type;
58646b77
PB
10542 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10543 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10544
58646b77
PB
10545 if (is_overloaded)
10546 {
10547 mode0 = VOIDmode;
10548 mode1 = VOIDmode;
10549 mode2 = VOIDmode;
10550 }
10551 else
bb8df8a6 10552 {
58646b77
PB
10553 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10554 continue;
f676971a 10555
58646b77
PB
10556 mode0 = insn_data[d->icode].operand[0].mode;
10557 mode1 = insn_data[d->icode].operand[1].mode;
10558 mode2 = insn_data[d->icode].operand[2].mode;
10559 }
0ac081f6
AH
10560
10561 /* When all three operands are of the same mode. */
10562 if (mode0 == mode1 && mode1 == mode2)
10563 {
10564 switch (mode0)
10565 {
58646b77
PB
10566 case VOIDmode:
10567 type = opaque_ftype_opaque_opaque;
10568 break;
0ac081f6
AH
10569 case V4SFmode:
10570 type = v4sf_ftype_v4sf_v4sf;
10571 break;
10572 case V4SImode:
10573 type = v4si_ftype_v4si_v4si;
10574 break;
10575 case V16QImode:
10576 type = v16qi_ftype_v16qi_v16qi;
10577 break;
10578 case V8HImode:
10579 type = v8hi_ftype_v8hi_v8hi;
10580 break;
a3170dc6
AH
10581 case V2SImode:
10582 type = v2si_ftype_v2si_v2si;
10583 break;
96038623
DE
10584 case V2SFmode:
10585 if (TARGET_PAIRED_FLOAT)
10586 type = v2sf_ftype_v2sf_v2sf;
10587 else
10588 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10589 break;
10590 case SImode:
10591 type = int_ftype_int_int;
10592 break;
0ac081f6 10593 default:
37409796 10594 gcc_unreachable ();
0ac081f6
AH
10595 }
10596 }
10597
10598 /* A few other combos we really don't want to do manually. */
10599
10600 /* vint, vfloat, vfloat. */
10601 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10602 type = v4si_ftype_v4sf_v4sf;
10603
10604 /* vshort, vchar, vchar. */
10605 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10606 type = v8hi_ftype_v16qi_v16qi;
10607
10608 /* vint, vshort, vshort. */
10609 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10610 type = v4si_ftype_v8hi_v8hi;
10611
10612 /* vshort, vint, vint. */
10613 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10614 type = v8hi_ftype_v4si_v4si;
10615
10616 /* vchar, vshort, vshort. */
10617 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10618 type = v16qi_ftype_v8hi_v8hi;
10619
10620 /* vint, vchar, vint. */
10621 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10622 type = v4si_ftype_v16qi_v4si;
10623
fa066a23
AH
10624 /* vint, vchar, vchar. */
10625 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10626 type = v4si_ftype_v16qi_v16qi;
10627
0ac081f6
AH
10628 /* vint, vshort, vint. */
10629 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10630 type = v4si_ftype_v8hi_v4si;
f676971a 10631
a7b376ee 10632 /* vint, vint, 5-bit literal. */
2212663f 10633 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10634 type = v4si_ftype_v4si_int;
f676971a 10635
a7b376ee 10636 /* vshort, vshort, 5-bit literal. */
2212663f 10637 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10638 type = v8hi_ftype_v8hi_int;
f676971a 10639
a7b376ee 10640 /* vchar, vchar, 5-bit literal. */
2212663f 10641 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10642 type = v16qi_ftype_v16qi_int;
0ac081f6 10643
a7b376ee 10644 /* vfloat, vint, 5-bit literal. */
617e0e1d 10645 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10646 type = v4sf_ftype_v4si_int;
f676971a 10647
a7b376ee 10648 /* vint, vfloat, 5-bit literal. */
617e0e1d 10649 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10650 type = v4si_ftype_v4sf_int;
617e0e1d 10651
a3170dc6
AH
10652 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10653 type = v2si_ftype_int_int;
10654
10655 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10656 type = v2si_ftype_v2si_char;
10657
10658 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10659 type = v2si_ftype_int_char;
10660
37409796 10661 else
0ac081f6 10662 {
37409796
NS
10663 /* int, x, x. */
10664 gcc_assert (mode0 == SImode);
0ac081f6
AH
10665 switch (mode1)
10666 {
10667 case V4SImode:
10668 type = int_ftype_v4si_v4si;
10669 break;
10670 case V4SFmode:
10671 type = int_ftype_v4sf_v4sf;
10672 break;
10673 case V16QImode:
10674 type = int_ftype_v16qi_v16qi;
10675 break;
10676 case V8HImode:
10677 type = int_ftype_v8hi_v8hi;
10678 break;
10679 default:
37409796 10680 gcc_unreachable ();
0ac081f6
AH
10681 }
10682 }
10683
2212663f
DB
10684 def_builtin (d->mask, d->name, type, d->code);
10685 }
24408032 10686
2212663f
DB
10687 /* Add the simple unary operators. */
10688 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10689 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10690 {
10691 enum machine_mode mode0, mode1;
10692 tree type;
58646b77
PB
10693 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10694 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10695
10696 if (is_overloaded)
10697 {
10698 mode0 = VOIDmode;
10699 mode1 = VOIDmode;
10700 }
10701 else
10702 {
10703 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10704 continue;
bb8df8a6 10705
58646b77
PB
10706 mode0 = insn_data[d->icode].operand[0].mode;
10707 mode1 = insn_data[d->icode].operand[1].mode;
10708 }
2212663f
DB
10709
10710 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10711 type = v4si_ftype_int;
2212663f 10712 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10713 type = v8hi_ftype_int;
2212663f 10714 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10715 type = v16qi_ftype_int;
58646b77
PB
10716 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10717 type = opaque_ftype_opaque;
617e0e1d
DB
10718 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10719 type = v4sf_ftype_v4sf;
20e26713
AH
10720 else if (mode0 == V8HImode && mode1 == V16QImode)
10721 type = v8hi_ftype_v16qi;
10722 else if (mode0 == V4SImode && mode1 == V8HImode)
10723 type = v4si_ftype_v8hi;
a3170dc6
AH
10724 else if (mode0 == V2SImode && mode1 == V2SImode)
10725 type = v2si_ftype_v2si;
10726 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10727 {
10728 if (TARGET_PAIRED_FLOAT)
10729 type = v2sf_ftype_v2sf;
10730 else
10731 type = v2sf_ftype_v2sf_spe;
10732 }
a3170dc6
AH
10733 else if (mode0 == V2SFmode && mode1 == V2SImode)
10734 type = v2sf_ftype_v2si;
10735 else if (mode0 == V2SImode && mode1 == V2SFmode)
10736 type = v2si_ftype_v2sf;
10737 else if (mode0 == V2SImode && mode1 == QImode)
10738 type = v2si_ftype_char;
2212663f 10739 else
37409796 10740 gcc_unreachable ();
2212663f 10741
0ac081f6
AH
10742 def_builtin (d->mask, d->name, type, d->code);
10743 }
10744}
10745
c15c90bb
ZW
10746static void
10747rs6000_init_libfuncs (void)
10748{
602ea4d3
JJ
10749 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10750 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10751 {
602ea4d3
JJ
10752 /* AIX library routines for float->int conversion. */
10753 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10754 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10755 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10756 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10757 }
c15c90bb 10758
602ea4d3 10759 if (!TARGET_IEEEQUAD)
98c41d98 10760 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10761 if (!TARGET_XL_COMPAT)
10762 {
10763 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10764 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10765 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10766 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10767
17caeff2 10768 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10769 {
10770 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10771 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10772 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10773 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10774 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10775 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10776 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10777
10778 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10779 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10780 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10781 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10782 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10783 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10784 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10785 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10786 }
b26941b4
JM
10787
10788 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10789 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10790 }
10791 else
10792 {
10793 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10794 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10795 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10796 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10797 }
c9034561 10798 else
c15c90bb 10799 {
c9034561 10800 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10801
10802 set_optab_libfunc (add_optab, TFmode, "_q_add");
10803 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10804 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10805 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10806 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10807 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10808 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10809
c9034561
ZW
10810 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10811 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10812 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10813 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10814 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10815 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10816
85363ca0
ZW
10817 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10818 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10819 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10820 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10821 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10822 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10823 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10824 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10825 }
10826}
fba73eb1
DE
10827
10828\f
10829/* Expand a block clear operation, and return 1 if successful. Return 0
10830 if we should let the compiler generate normal code.
10831
10832 operands[0] is the destination
10833 operands[1] is the length
57e84f18 10834 operands[3] is the alignment */
fba73eb1
DE
10835
10836int
10837expand_block_clear (rtx operands[])
10838{
10839 rtx orig_dest = operands[0];
10840 rtx bytes_rtx = operands[1];
57e84f18 10841 rtx align_rtx = operands[3];
5514620a
GK
10842 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10843 HOST_WIDE_INT align;
10844 HOST_WIDE_INT bytes;
fba73eb1
DE
10845 int offset;
10846 int clear_bytes;
5514620a 10847 int clear_step;
fba73eb1
DE
10848
10849 /* If this is not a fixed size move, just call memcpy */
10850 if (! constp)
10851 return 0;
10852
37409796
NS
10853 /* This must be a fixed size alignment */
10854 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10855 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10856
10857 /* Anything to clear? */
10858 bytes = INTVAL (bytes_rtx);
10859 if (bytes <= 0)
10860 return 1;
10861
5514620a
GK
10862 /* Use the builtin memset after a point, to avoid huge code bloat.
10863 When optimize_size, avoid any significant code bloat; calling
10864 memset is about 4 instructions, so allow for one instruction to
10865 load zero and three to do clearing. */
10866 if (TARGET_ALTIVEC && align >= 128)
10867 clear_step = 16;
10868 else if (TARGET_POWERPC64 && align >= 32)
10869 clear_step = 8;
21d818ff
NF
10870 else if (TARGET_SPE && align >= 64)
10871 clear_step = 8;
5514620a
GK
10872 else
10873 clear_step = 4;
fba73eb1 10874
5514620a
GK
10875 if (optimize_size && bytes > 3 * clear_step)
10876 return 0;
10877 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10878 return 0;
10879
10880 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10881 {
fba73eb1
DE
10882 enum machine_mode mode = BLKmode;
10883 rtx dest;
f676971a 10884
5514620a
GK
10885 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10886 {
10887 clear_bytes = 16;
10888 mode = V4SImode;
10889 }
21d818ff
NF
10890 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10891 {
10892 clear_bytes = 8;
10893 mode = V2SImode;
10894 }
5514620a 10895 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10896 /* 64-bit loads and stores require word-aligned
10897 displacements. */
10898 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10899 {
10900 clear_bytes = 8;
10901 mode = DImode;
fba73eb1 10902 }
5514620a 10903 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10904 { /* move 4 bytes */
10905 clear_bytes = 4;
10906 mode = SImode;
fba73eb1 10907 }
ec53fc93 10908 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10909 { /* move 2 bytes */
10910 clear_bytes = 2;
10911 mode = HImode;
fba73eb1
DE
10912 }
10913 else /* move 1 byte at a time */
10914 {
10915 clear_bytes = 1;
10916 mode = QImode;
fba73eb1 10917 }
f676971a 10918
fba73eb1 10919 dest = adjust_address (orig_dest, mode, offset);
f676971a 10920
5514620a 10921 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10922 }
10923
10924 return 1;
10925}
10926
35aff10b 10927\f
7e69e155
MM
10928/* Expand a block move operation, and return 1 if successful. Return 0
10929 if we should let the compiler generate normal code.
10930
10931 operands[0] is the destination
10932 operands[1] is the source
10933 operands[2] is the length
10934 operands[3] is the alignment */
10935
3933e0e1
MM
10936#define MAX_MOVE_REG 4
10937
7e69e155 10938int
a2369ed3 10939expand_block_move (rtx operands[])
7e69e155 10940{
b6c9286a
MM
10941 rtx orig_dest = operands[0];
10942 rtx orig_src = operands[1];
7e69e155 10943 rtx bytes_rtx = operands[2];
7e69e155 10944 rtx align_rtx = operands[3];
3933e0e1 10945 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10946 int align;
3933e0e1
MM
10947 int bytes;
10948 int offset;
7e69e155 10949 int move_bytes;
cabfd258
GK
10950 rtx stores[MAX_MOVE_REG];
10951 int num_reg = 0;
7e69e155 10952
3933e0e1 10953 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10954 if (! constp)
3933e0e1
MM
10955 return 0;
10956
37409796
NS
10957 /* This must be a fixed size alignment */
10958 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10959 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10960
7e69e155 10961 /* Anything to move? */
3933e0e1
MM
10962 bytes = INTVAL (bytes_rtx);
10963 if (bytes <= 0)
7e69e155
MM
10964 return 1;
10965
ea9982a8 10966 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10967 reg_parm_stack_space. */
ea9982a8 10968 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10969 return 0;
10970
cabfd258 10971 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10972 {
cabfd258 10973 union {
70128ad9 10974 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10975 rtx (*mov) (rtx, rtx);
cabfd258
GK
10976 } gen_func;
10977 enum machine_mode mode = BLKmode;
10978 rtx src, dest;
f676971a 10979
5514620a
GK
10980 /* Altivec first, since it will be faster than a string move
10981 when it applies, and usually not significantly larger. */
10982 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10983 {
10984 move_bytes = 16;
10985 mode = V4SImode;
10986 gen_func.mov = gen_movv4si;
10987 }
21d818ff
NF
10988 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10989 {
10990 move_bytes = 8;
10991 mode = V2SImode;
10992 gen_func.mov = gen_movv2si;
10993 }
5514620a 10994 else if (TARGET_STRING
cabfd258
GK
10995 && bytes > 24 /* move up to 32 bytes at a time */
10996 && ! fixed_regs[5]
10997 && ! fixed_regs[6]
10998 && ! fixed_regs[7]
10999 && ! fixed_regs[8]
11000 && ! fixed_regs[9]
11001 && ! fixed_regs[10]
11002 && ! fixed_regs[11]
11003 && ! fixed_regs[12])
7e69e155 11004 {
cabfd258 11005 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 11006 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
11007 }
11008 else if (TARGET_STRING
11009 && bytes > 16 /* move up to 24 bytes at a time */
11010 && ! fixed_regs[5]
11011 && ! fixed_regs[6]
11012 && ! fixed_regs[7]
11013 && ! fixed_regs[8]
11014 && ! fixed_regs[9]
11015 && ! fixed_regs[10])
11016 {
11017 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 11018 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
11019 }
11020 else if (TARGET_STRING
11021 && bytes > 8 /* move up to 16 bytes at a time */
11022 && ! fixed_regs[5]
11023 && ! fixed_regs[6]
11024 && ! fixed_regs[7]
11025 && ! fixed_regs[8])
11026 {
11027 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 11028 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
11029 }
11030 else if (bytes >= 8 && TARGET_POWERPC64
11031 /* 64-bit loads and stores require word-aligned
11032 displacements. */
fba73eb1 11033 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
11034 {
11035 move_bytes = 8;
11036 mode = DImode;
11037 gen_func.mov = gen_movdi;
11038 }
11039 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
11040 { /* move up to 8 bytes at a time */
11041 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 11042 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 11043 }
cd7d9ca4 11044 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
11045 { /* move 4 bytes */
11046 move_bytes = 4;
11047 mode = SImode;
11048 gen_func.mov = gen_movsi;
11049 }
ec53fc93 11050 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
11051 { /* move 2 bytes */
11052 move_bytes = 2;
11053 mode = HImode;
11054 gen_func.mov = gen_movhi;
11055 }
11056 else if (TARGET_STRING && bytes > 1)
11057 { /* move up to 4 bytes at a time */
11058 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 11059 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
11060 }
11061 else /* move 1 byte at a time */
11062 {
11063 move_bytes = 1;
11064 mode = QImode;
11065 gen_func.mov = gen_movqi;
11066 }
f676971a 11067
cabfd258
GK
11068 src = adjust_address (orig_src, mode, offset);
11069 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
11070
11071 if (mode != BLKmode)
cabfd258
GK
11072 {
11073 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 11074
cabfd258
GK
11075 emit_insn ((*gen_func.mov) (tmp_reg, src));
11076 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 11077 }
3933e0e1 11078
cabfd258
GK
11079 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
11080 {
11081 int i;
11082 for (i = 0; i < num_reg; i++)
11083 emit_insn (stores[i]);
11084 num_reg = 0;
11085 }
35aff10b 11086
cabfd258 11087 if (mode == BLKmode)
7e69e155 11088 {
70128ad9 11089 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
11090 patterns require zero offset. */
11091 if (!REG_P (XEXP (src, 0)))
b6c9286a 11092 {
cabfd258
GK
11093 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
11094 src = replace_equiv_address (src, src_reg);
b6c9286a 11095 }
cabfd258 11096 set_mem_size (src, GEN_INT (move_bytes));
f676971a 11097
cabfd258 11098 if (!REG_P (XEXP (dest, 0)))
3933e0e1 11099 {
cabfd258
GK
11100 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
11101 dest = replace_equiv_address (dest, dest_reg);
7e69e155 11102 }
cabfd258 11103 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 11104
70128ad9 11105 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
11106 GEN_INT (move_bytes & 31),
11107 align_rtx));
7e69e155 11108 }
7e69e155
MM
11109 }
11110
11111 return 1;
11112}
11113
d62294f5 11114\f
9caa3eb2
DE
11115/* Return a string to perform a load_multiple operation.
11116 operands[0] is the vector.
11117 operands[1] is the source address.
11118 operands[2] is the first destination register. */
11119
11120const char *
a2369ed3 11121rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
11122{
11123 /* We have to handle the case where the pseudo used to contain the address
11124 is assigned to one of the output registers. */
11125 int i, j;
11126 int words = XVECLEN (operands[0], 0);
11127 rtx xop[10];
11128
11129 if (XVECLEN (operands[0], 0) == 1)
11130 return "{l|lwz} %2,0(%1)";
11131
11132 for (i = 0; i < words; i++)
11133 if (refers_to_regno_p (REGNO (operands[2]) + i,
11134 REGNO (operands[2]) + i + 1, operands[1], 0))
11135 {
11136 if (i == words-1)
11137 {
11138 xop[0] = GEN_INT (4 * (words-1));
11139 xop[1] = operands[1];
11140 xop[2] = operands[2];
11141 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
11142 return "";
11143 }
11144 else if (i == 0)
11145 {
11146 xop[0] = GEN_INT (4 * (words-1));
11147 xop[1] = operands[1];
11148 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
11149 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
11150 return "";
11151 }
11152 else
11153 {
11154 for (j = 0; j < words; j++)
11155 if (j != i)
11156 {
11157 xop[0] = GEN_INT (j * 4);
11158 xop[1] = operands[1];
11159 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
11160 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
11161 }
11162 xop[0] = GEN_INT (i * 4);
11163 xop[1] = operands[1];
11164 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
11165 return "";
11166 }
11167 }
11168
11169 return "{lsi|lswi} %2,%1,%N0";
11170}
11171
9878760c 11172\f
a4f6c312
SS
11173/* A validation routine: say whether CODE, a condition code, and MODE
11174 match. The other alternatives either don't make sense or should
11175 never be generated. */
39a10a29 11176
48d72335 11177void
a2369ed3 11178validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 11179{
37409796
NS
11180 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11181 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11182 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
11183
11184 /* These don't make sense. */
37409796
NS
11185 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11186 || mode != CCUNSmode);
39a10a29 11187
37409796
NS
11188 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11189 || mode == CCUNSmode);
39a10a29 11190
37409796
NS
11191 gcc_assert (mode == CCFPmode
11192 || (code != ORDERED && code != UNORDERED
11193 && code != UNEQ && code != LTGT
11194 && code != UNGT && code != UNLT
11195 && code != UNGE && code != UNLE));
f676971a
EC
11196
11197 /* These should never be generated except for
bc9ec0e0 11198 flag_finite_math_only. */
37409796
NS
11199 gcc_assert (mode != CCFPmode
11200 || flag_finite_math_only
11201 || (code != LE && code != GE
11202 && code != UNEQ && code != LTGT
11203 && code != UNGT && code != UNLT));
39a10a29
GK
11204
11205 /* These are invalid; the information is not there. */
37409796 11206 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
11207}
11208
9878760c
RK
11209\f
11210/* Return 1 if ANDOP is a mask that has no bits on that are not in the
11211 mask required to convert the result of a rotate insn into a shift
b1765bde 11212 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
11213
11214int
a2369ed3 11215includes_lshift_p (rtx shiftop, rtx andop)
9878760c 11216{
e2c953b6
DE
11217 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11218
11219 shift_mask <<= INTVAL (shiftop);
9878760c 11220
b1765bde 11221 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
11222}
11223
11224/* Similar, but for right shift. */
11225
11226int
a2369ed3 11227includes_rshift_p (rtx shiftop, rtx andop)
9878760c 11228{
a7653a2c 11229 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
11230
11231 shift_mask >>= INTVAL (shiftop);
11232
b1765bde 11233 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
11234}
11235
c5059423
AM
11236/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11237 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 11238 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
11239
11240int
a2369ed3 11241includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 11242{
c5059423
AM
11243 if (GET_CODE (andop) == CONST_INT)
11244 {
02071907 11245 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 11246
c5059423 11247 c = INTVAL (andop);
02071907 11248 if (c == 0 || c == ~0)
c5059423 11249 return 0;
e2c953b6 11250
02071907 11251 shift_mask = ~0;
c5059423
AM
11252 shift_mask <<= INTVAL (shiftop);
11253
b6d08ca1 11254 /* Find the least significant one bit. */
c5059423
AM
11255 lsb = c & -c;
11256
11257 /* It must coincide with the LSB of the shift mask. */
11258 if (-lsb != shift_mask)
11259 return 0;
e2c953b6 11260
c5059423
AM
11261 /* Invert to look for the next transition (if any). */
11262 c = ~c;
11263
11264 /* Remove the low group of ones (originally low group of zeros). */
11265 c &= -lsb;
11266
11267 /* Again find the lsb, and check we have all 1's above. */
11268 lsb = c & -c;
11269 return c == -lsb;
11270 }
11271 else if (GET_CODE (andop) == CONST_DOUBLE
11272 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11273 {
02071907
AM
11274 HOST_WIDE_INT low, high, lsb;
11275 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11276
11277 low = CONST_DOUBLE_LOW (andop);
11278 if (HOST_BITS_PER_WIDE_INT < 64)
11279 high = CONST_DOUBLE_HIGH (andop);
11280
11281 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11282 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11283 return 0;
11284
11285 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11286 {
02071907 11287 shift_mask_high = ~0;
c5059423
AM
11288 if (INTVAL (shiftop) > 32)
11289 shift_mask_high <<= INTVAL (shiftop) - 32;
11290
11291 lsb = high & -high;
11292
11293 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11294 return 0;
11295
11296 high = ~high;
11297 high &= -lsb;
11298
11299 lsb = high & -high;
11300 return high == -lsb;
11301 }
11302
02071907 11303 shift_mask_low = ~0;
c5059423
AM
11304 shift_mask_low <<= INTVAL (shiftop);
11305
11306 lsb = low & -low;
11307
11308 if (-lsb != shift_mask_low)
11309 return 0;
11310
11311 if (HOST_BITS_PER_WIDE_INT < 64)
11312 high = ~high;
11313 low = ~low;
11314 low &= -lsb;
11315
11316 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11317 {
11318 lsb = high & -high;
11319 return high == -lsb;
11320 }
11321
11322 lsb = low & -low;
11323 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11324 }
11325 else
11326 return 0;
11327}
e2c953b6 11328
c5059423
AM
11329/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11330 to perform a left shift. It must have SHIFTOP or more least
c1207243 11331 significant 0's, with the remainder of the word 1's. */
e2c953b6 11332
c5059423 11333int
a2369ed3 11334includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11335{
e2c953b6 11336 if (GET_CODE (andop) == CONST_INT)
c5059423 11337 {
02071907 11338 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11339
02071907 11340 shift_mask = ~0;
c5059423
AM
11341 shift_mask <<= INTVAL (shiftop);
11342 c = INTVAL (andop);
11343
c1207243 11344 /* Find the least significant one bit. */
c5059423
AM
11345 lsb = c & -c;
11346
11347 /* It must be covered by the shift mask.
a4f6c312 11348 This test also rejects c == 0. */
c5059423
AM
11349 if ((lsb & shift_mask) == 0)
11350 return 0;
11351
11352 /* Check we have all 1's above the transition, and reject all 1's. */
11353 return c == -lsb && lsb != 1;
11354 }
11355 else if (GET_CODE (andop) == CONST_DOUBLE
11356 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11357 {
02071907 11358 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11359
11360 low = CONST_DOUBLE_LOW (andop);
11361
11362 if (HOST_BITS_PER_WIDE_INT < 64)
11363 {
02071907 11364 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11365
11366 high = CONST_DOUBLE_HIGH (andop);
11367
11368 if (low == 0)
11369 {
02071907 11370 shift_mask_high = ~0;
c5059423
AM
11371 if (INTVAL (shiftop) > 32)
11372 shift_mask_high <<= INTVAL (shiftop) - 32;
11373
11374 lsb = high & -high;
11375
11376 if ((lsb & shift_mask_high) == 0)
11377 return 0;
11378
11379 return high == -lsb;
11380 }
11381 if (high != ~0)
11382 return 0;
11383 }
11384
02071907 11385 shift_mask_low = ~0;
c5059423
AM
11386 shift_mask_low <<= INTVAL (shiftop);
11387
11388 lsb = low & -low;
11389
11390 if ((lsb & shift_mask_low) == 0)
11391 return 0;
11392
11393 return low == -lsb && lsb != 1;
11394 }
e2c953b6 11395 else
c5059423 11396 return 0;
9878760c 11397}
35068b43 11398
11ac38b2
DE
11399/* Return 1 if operands will generate a valid arguments to rlwimi
11400instruction for insert with right shift in 64-bit mode. The mask may
11401not start on the first bit or stop on the last bit because wrap-around
11402effects of instruction do not correspond to semantics of RTL insn. */
11403
11404int
11405insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11406{
429ec7dc
DE
11407 if (INTVAL (startop) > 32
11408 && INTVAL (startop) < 64
11409 && INTVAL (sizeop) > 1
11410 && INTVAL (sizeop) + INTVAL (startop) < 64
11411 && INTVAL (shiftop) > 0
11412 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11413 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11414 return 1;
11415
11416 return 0;
11417}
11418
35068b43 11419/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11420 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11421
11422int
a2369ed3 11423registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11424{
11425 /* We might have been passed a SUBREG. */
f676971a 11426 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11427 return 0;
f676971a 11428
90f81f99
AP
11429 /* We might have been passed non floating point registers. */
11430 if (!FP_REGNO_P (REGNO (reg1))
11431 || !FP_REGNO_P (REGNO (reg2)))
11432 return 0;
35068b43
RK
11433
11434 return (REGNO (reg1) == REGNO (reg2) - 1);
11435}
11436
a4f6c312
SS
11437/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11438 addr1 and addr2 must be in consecutive memory locations
11439 (addr2 == addr1 + 8). */
35068b43
RK
11440
11441int
90f81f99 11442mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11443{
90f81f99 11444 rtx addr1, addr2;
bb8df8a6
EC
11445 unsigned int reg1, reg2;
11446 int offset1, offset2;
35068b43 11447
90f81f99
AP
11448 /* The mems cannot be volatile. */
11449 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11450 return 0;
f676971a 11451
90f81f99
AP
11452 addr1 = XEXP (mem1, 0);
11453 addr2 = XEXP (mem2, 0);
11454
35068b43
RK
11455 /* Extract an offset (if used) from the first addr. */
11456 if (GET_CODE (addr1) == PLUS)
11457 {
11458 /* If not a REG, return zero. */
11459 if (GET_CODE (XEXP (addr1, 0)) != REG)
11460 return 0;
11461 else
11462 {
c4ad648e 11463 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11464 /* The offset must be constant! */
11465 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11466 return 0;
11467 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11468 }
11469 }
11470 else if (GET_CODE (addr1) != REG)
11471 return 0;
11472 else
11473 {
11474 reg1 = REGNO (addr1);
11475 /* This was a simple (mem (reg)) expression. Offset is 0. */
11476 offset1 = 0;
11477 }
11478
bb8df8a6
EC
11479 /* And now for the second addr. */
11480 if (GET_CODE (addr2) == PLUS)
11481 {
11482 /* If not a REG, return zero. */
11483 if (GET_CODE (XEXP (addr2, 0)) != REG)
11484 return 0;
11485 else
11486 {
11487 reg2 = REGNO (XEXP (addr2, 0));
11488 /* The offset must be constant. */
11489 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11490 return 0;
11491 offset2 = INTVAL (XEXP (addr2, 1));
11492 }
11493 }
11494 else if (GET_CODE (addr2) != REG)
35068b43 11495 return 0;
bb8df8a6
EC
11496 else
11497 {
11498 reg2 = REGNO (addr2);
11499 /* This was a simple (mem (reg)) expression. Offset is 0. */
11500 offset2 = 0;
11501 }
35068b43 11502
bb8df8a6
EC
11503 /* Both of these must have the same base register. */
11504 if (reg1 != reg2)
35068b43
RK
11505 return 0;
11506
11507 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11508 if (offset2 != offset1 + 8)
35068b43
RK
11509 return 0;
11510
11511 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11512 instructions. */
11513 return 1;
11514}
9878760c 11515\f
e41b2a33
PB
11516
11517rtx
11518rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11519{
11520 static bool eliminated = false;
11521 if (mode != SDmode)
11522 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11523 else
11524 {
11525 rtx mem = cfun->machine->sdmode_stack_slot;
11526 gcc_assert (mem != NULL_RTX);
11527
11528 if (!eliminated)
11529 {
11530 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11531 cfun->machine->sdmode_stack_slot = mem;
11532 eliminated = true;
11533 }
11534 return mem;
11535 }
11536}
11537
11538static tree
11539rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11540{
11541 /* Don't walk into types. */
11542 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11543 {
11544 *walk_subtrees = 0;
11545 return NULL_TREE;
11546 }
11547
11548 switch (TREE_CODE (*tp))
11549 {
11550 case VAR_DECL:
11551 case PARM_DECL:
11552 case FIELD_DECL:
11553 case RESULT_DECL:
48f5b722 11554 case SSA_NAME:
e41b2a33 11555 case REAL_CST:
fdf4f148 11556 case INDIRECT_REF:
a0f39282
JJ
11557 case ALIGN_INDIRECT_REF:
11558 case MISALIGNED_INDIRECT_REF:
fdf4f148 11559 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11560 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11561 return *tp;
11562 break;
11563 default:
11564 break;
11565 }
11566
11567 return NULL_TREE;
11568}
11569
11570
11571/* Allocate a 64-bit stack slot to be used for copying SDmode
11572 values through if this function has any SDmode references. */
11573
11574static void
11575rs6000_alloc_sdmode_stack_slot (void)
11576{
11577 tree t;
11578 basic_block bb;
726a989a 11579 gimple_stmt_iterator gsi;
e41b2a33
PB
11580
11581 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11582
11583 FOR_EACH_BB (bb)
726a989a 11584 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e41b2a33 11585 {
726a989a 11586 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
e41b2a33
PB
11587 if (ret)
11588 {
11589 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11590 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11591 SDmode, 0);
11592 return;
11593 }
11594 }
11595
11596 /* Check for any SDmode parameters of the function. */
11597 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11598 {
11599 if (TREE_TYPE (t) == error_mark_node)
11600 continue;
11601
11602 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11603 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11604 {
11605 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11606 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11607 SDmode, 0);
11608 return;
11609 }
11610 }
11611}
11612
11613static void
11614rs6000_instantiate_decls (void)
11615{
11616 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11617 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11618}
11619
9878760c 11620/* Return the register class of a scratch register needed to copy IN into
0a2aaacc 11621 or out of a register in RCLASS in MODE. If it can be done directly,
9878760c
RK
11622 NO_REGS is returned. */
11623
11624enum reg_class
0a2aaacc 11625rs6000_secondary_reload_class (enum reg_class rclass,
3c4774e0
R
11626 enum machine_mode mode ATTRIBUTE_UNUSED,
11627 rtx in)
9878760c 11628{
5accd822 11629 int regno;
9878760c 11630
ab82a49f
AP
11631 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11632#if TARGET_MACHO
c4ad648e 11633 && MACHOPIC_INDIRECT
ab82a49f 11634#endif
c4ad648e 11635 ))
46fad5b7
DJ
11636 {
11637 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11638 other than BASE_REGS for TARGET_ELF. So indicate that a
11639 register from BASE_REGS is needed as an intermediate
11640 register.
f676971a 11641
46fad5b7
DJ
11642 On Darwin, pic addresses require a load from memory, which
11643 needs a base register. */
0a2aaacc 11644 if (rclass != BASE_REGS
c4ad648e
AM
11645 && (GET_CODE (in) == SYMBOL_REF
11646 || GET_CODE (in) == HIGH
11647 || GET_CODE (in) == LABEL_REF
11648 || GET_CODE (in) == CONST))
11649 return BASE_REGS;
46fad5b7 11650 }
e7b7998a 11651
5accd822
DE
11652 if (GET_CODE (in) == REG)
11653 {
11654 regno = REGNO (in);
11655 if (regno >= FIRST_PSEUDO_REGISTER)
11656 {
11657 regno = true_regnum (in);
11658 if (regno >= FIRST_PSEUDO_REGISTER)
11659 regno = -1;
11660 }
11661 }
11662 else if (GET_CODE (in) == SUBREG)
11663 {
11664 regno = true_regnum (in);
11665 if (regno >= FIRST_PSEUDO_REGISTER)
11666 regno = -1;
11667 }
11668 else
11669 regno = -1;
11670
9878760c
RK
11671 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11672 into anything. */
0a2aaacc 11673 if (rclass == GENERAL_REGS || rclass == BASE_REGS
9878760c
RK
11674 || (regno >= 0 && INT_REGNO_P (regno)))
11675 return NO_REGS;
11676
11677 /* Constants, memory, and FP registers can go into FP registers. */
11678 if ((regno == -1 || FP_REGNO_P (regno))
0a2aaacc 11679 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
e41b2a33 11680 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11681
0ac081f6
AH
11682 /* Memory, and AltiVec registers can go into AltiVec registers. */
11683 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
0a2aaacc 11684 && rclass == ALTIVEC_REGS)
0ac081f6
AH
11685 return NO_REGS;
11686
9878760c 11687 /* We can copy among the CR registers. */
0a2aaacc 11688 if ((rclass == CR_REGS || rclass == CR0_REGS)
9878760c
RK
11689 && regno >= 0 && CR_REGNO_P (regno))
11690 return NO_REGS;
11691
11692 /* Otherwise, we need GENERAL_REGS. */
11693 return GENERAL_REGS;
11694}
11695\f
11696/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11697 know this is a valid comparison.
9878760c
RK
11698
11699 SCC_P is 1 if this is for an scc. That means that %D will have been
11700 used instead of %C, so the bits will be in different places.
11701
b4ac57ab 11702 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11703
11704int
a2369ed3 11705ccr_bit (rtx op, int scc_p)
9878760c
RK
11706{
11707 enum rtx_code code = GET_CODE (op);
11708 enum machine_mode cc_mode;
11709 int cc_regnum;
11710 int base_bit;
9ebbca7d 11711 rtx reg;
9878760c 11712
ec8e098d 11713 if (!COMPARISON_P (op))
9878760c
RK
11714 return -1;
11715
9ebbca7d
GK
11716 reg = XEXP (op, 0);
11717
37409796 11718 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11719
11720 cc_mode = GET_MODE (reg);
11721 cc_regnum = REGNO (reg);
11722 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11723
39a10a29 11724 validate_condition_mode (code, cc_mode);
c5defebb 11725
b7053a3f
GK
11726 /* When generating a sCOND operation, only positive conditions are
11727 allowed. */
37409796
NS
11728 gcc_assert (!scc_p
11729 || code == EQ || code == GT || code == LT || code == UNORDERED
11730 || code == GTU || code == LTU);
f676971a 11731
9878760c
RK
11732 switch (code)
11733 {
11734 case NE:
11735 return scc_p ? base_bit + 3 : base_bit + 2;
11736 case EQ:
11737 return base_bit + 2;
1c882ea4 11738 case GT: case GTU: case UNLE:
9878760c 11739 return base_bit + 1;
1c882ea4 11740 case LT: case LTU: case UNGE:
9878760c 11741 return base_bit;
1c882ea4
GK
11742 case ORDERED: case UNORDERED:
11743 return base_bit + 3;
9878760c
RK
11744
11745 case GE: case GEU:
39a10a29 11746 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11747 unordered position. So test that bit. For integer, this is ! LT
11748 unless this is an scc insn. */
39a10a29 11749 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11750
11751 case LE: case LEU:
39a10a29 11752 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11753
9878760c 11754 default:
37409796 11755 gcc_unreachable ();
9878760c
RK
11756 }
11757}
1ff7789b 11758\f
8d30c4ee 11759/* Return the GOT register. */
1ff7789b 11760
9390387d 11761rtx
a2369ed3 11762rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11763{
a4f6c312
SS
11764 /* The second flow pass currently (June 1999) can't update
11765 regs_ever_live without disturbing other parts of the compiler, so
11766 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11767 if (!can_create_pseudo_p ()
11768 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11769 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11770
e3b5732b 11771 crtl->uses_pic_offset_table = 1;
3cb999d8 11772
1ff7789b
MM
11773 return pic_offset_table_rtx;
11774}
a7df97e6 11775\f
e2500fed
GK
11776/* Function to init struct machine_function.
11777 This will be called, via a pointer variable,
11778 from push_function_context. */
a7df97e6 11779
e2500fed 11780static struct machine_function *
863d938c 11781rs6000_init_machine_status (void)
a7df97e6 11782{
5ead67f6 11783 return GGC_CNEW (machine_function);
a7df97e6 11784}
9878760c 11785\f
0ba1b2ff
AM
11786/* These macros test for integers and extract the low-order bits. */
11787#define INT_P(X) \
11788((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11789 && GET_MODE (X) == VOIDmode)
11790
11791#define INT_LOWPART(X) \
11792 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11793
11794int
a2369ed3 11795extract_MB (rtx op)
0ba1b2ff
AM
11796{
11797 int i;
11798 unsigned long val = INT_LOWPART (op);
11799
11800 /* If the high bit is zero, the value is the first 1 bit we find
11801 from the left. */
11802 if ((val & 0x80000000) == 0)
11803 {
37409796 11804 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11805
11806 i = 1;
11807 while (((val <<= 1) & 0x80000000) == 0)
11808 ++i;
11809 return i;
11810 }
11811
11812 /* If the high bit is set and the low bit is not, or the mask is all
11813 1's, the value is zero. */
11814 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11815 return 0;
11816
11817 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11818 from the right. */
11819 i = 31;
11820 while (((val >>= 1) & 1) != 0)
11821 --i;
11822
11823 return i;
11824}
11825
11826int
a2369ed3 11827extract_ME (rtx op)
0ba1b2ff
AM
11828{
11829 int i;
11830 unsigned long val = INT_LOWPART (op);
11831
11832 /* If the low bit is zero, the value is the first 1 bit we find from
11833 the right. */
11834 if ((val & 1) == 0)
11835 {
37409796 11836 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11837
11838 i = 30;
11839 while (((val >>= 1) & 1) == 0)
11840 --i;
11841
11842 return i;
11843 }
11844
11845 /* If the low bit is set and the high bit is not, or the mask is all
11846 1's, the value is 31. */
11847 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11848 return 31;
11849
11850 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11851 from the left. */
11852 i = 0;
11853 while (((val <<= 1) & 0x80000000) != 0)
11854 ++i;
11855
11856 return i;
11857}
11858
c4501e62
JJ
11859/* Locate some local-dynamic symbol still in use by this function
11860 so that we can print its name in some tls_ld pattern. */
11861
11862static const char *
863d938c 11863rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11864{
11865 rtx insn;
11866
11867 if (cfun->machine->some_ld_name)
11868 return cfun->machine->some_ld_name;
11869
11870 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11871 if (INSN_P (insn)
11872 && for_each_rtx (&PATTERN (insn),
11873 rs6000_get_some_local_dynamic_name_1, 0))
11874 return cfun->machine->some_ld_name;
11875
37409796 11876 gcc_unreachable ();
c4501e62
JJ
11877}
11878
11879/* Helper function for rs6000_get_some_local_dynamic_name. */
11880
11881static int
a2369ed3 11882rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11883{
11884 rtx x = *px;
11885
11886 if (GET_CODE (x) == SYMBOL_REF)
11887 {
11888 const char *str = XSTR (x, 0);
11889 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11890 {
11891 cfun->machine->some_ld_name = str;
11892 return 1;
11893 }
11894 }
11895
11896 return 0;
11897}
11898
85b776df
AM
11899/* Write out a function code label. */
11900
11901void
11902rs6000_output_function_entry (FILE *file, const char *fname)
11903{
11904 if (fname[0] != '.')
11905 {
11906 switch (DEFAULT_ABI)
11907 {
11908 default:
37409796 11909 gcc_unreachable ();
85b776df
AM
11910
11911 case ABI_AIX:
11912 if (DOT_SYMBOLS)
11913 putc ('.', file);
11914 else
11915 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11916 break;
11917
11918 case ABI_V4:
11919 case ABI_DARWIN:
11920 break;
11921 }
11922 }
11923 if (TARGET_AIX)
11924 RS6000_OUTPUT_BASENAME (file, fname);
11925 else
11926 assemble_name (file, fname);
11927}
11928
9878760c
RK
11929/* Print an operand. Recognize special options, documented below. */
11930
38c1f2d7 11931#if TARGET_ELF
d9407988 11932#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11933#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11934#else
11935#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11936#define SMALL_DATA_REG 0
ba5e43aa
MM
11937#endif
11938
9878760c 11939void
a2369ed3 11940print_operand (FILE *file, rtx x, int code)
9878760c
RK
11941{
11942 int i;
a260abc9 11943 HOST_WIDE_INT val;
0ba1b2ff 11944 unsigned HOST_WIDE_INT uval;
9878760c
RK
11945
11946 switch (code)
11947 {
a8b3aeda 11948 case '.':
a85d226b
RK
11949 /* Write out an instruction after the call which may be replaced
11950 with glue code by the loader. This depends on the AIX version. */
11951 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11952 return;
11953
81eace42
GK
11954 /* %a is output_address. */
11955
9854d9ed
RK
11956 case 'A':
11957 /* If X is a constant integer whose low-order 5 bits are zero,
11958 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11959 in the AIX assembler where "sri" with a zero shift count
20e26713 11960 writes a trash instruction. */
9854d9ed 11961 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11962 putc ('l', file);
9854d9ed 11963 else
76229ac8 11964 putc ('r', file);
9854d9ed
RK
11965 return;
11966
11967 case 'b':
e2c953b6
DE
11968 /* If constant, low-order 16 bits of constant, unsigned.
11969 Otherwise, write normally. */
11970 if (INT_P (x))
11971 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11972 else
11973 print_operand (file, x, 0);
cad12a8d
RK
11974 return;
11975
a260abc9
DE
11976 case 'B':
11977 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11978 for 64-bit mask direction. */
9390387d 11979 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11980 return;
a260abc9 11981
81eace42
GK
11982 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11983 output_operand. */
11984
423c1189
AH
11985 case 'c':
11986 /* X is a CR register. Print the number of the GT bit of the CR. */
11987 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11988 output_operand_lossage ("invalid %%E value");
11989 else
11990 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11991 return;
11992
11993 case 'D':
cef6b86c 11994 /* Like 'J' but get to the GT bit only. */
37409796 11995 gcc_assert (GET_CODE (x) == REG);
423c1189 11996
cef6b86c
EB
11997 /* Bit 1 is GT bit. */
11998 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11999
cef6b86c
EB
12000 /* Add one for shift count in rlinm for scc. */
12001 fprintf (file, "%d", i + 1);
423c1189
AH
12002 return;
12003
9854d9ed 12004 case 'E':
39a10a29 12005 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
12006 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12007 output_operand_lossage ("invalid %%E value");
78fbdbf7 12008 else
39a10a29 12009 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 12010 return;
9854d9ed
RK
12011
12012 case 'f':
12013 /* X is a CR register. Print the shift count needed to move it
12014 to the high-order four bits. */
12015 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12016 output_operand_lossage ("invalid %%f value");
12017 else
9ebbca7d 12018 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12019 return;
12020
12021 case 'F':
12022 /* Similar, but print the count for the rotate in the opposite
12023 direction. */
12024 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12025 output_operand_lossage ("invalid %%F value");
12026 else
9ebbca7d 12027 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12028 return;
12029
12030 case 'G':
12031 /* X is a constant integer. If it is negative, print "m",
43aa4e05 12032 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
12033 if (GET_CODE (x) != CONST_INT)
12034 output_operand_lossage ("invalid %%G value");
12035 else if (INTVAL (x) >= 0)
76229ac8 12036 putc ('z', file);
9854d9ed 12037 else
76229ac8 12038 putc ('m', file);
9854d9ed 12039 return;
e2c953b6 12040
9878760c 12041 case 'h':
a4f6c312
SS
12042 /* If constant, output low-order five bits. Otherwise, write
12043 normally. */
9878760c 12044 if (INT_P (x))
5f59ecb7 12045 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
12046 else
12047 print_operand (file, x, 0);
12048 return;
12049
64305719 12050 case 'H':
a4f6c312
SS
12051 /* If constant, output low-order six bits. Otherwise, write
12052 normally. */
64305719 12053 if (INT_P (x))
5f59ecb7 12054 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
12055 else
12056 print_operand (file, x, 0);
12057 return;
12058
9854d9ed
RK
12059 case 'I':
12060 /* Print `i' if this is a constant, else nothing. */
9878760c 12061 if (INT_P (x))
76229ac8 12062 putc ('i', file);
9878760c
RK
12063 return;
12064
9854d9ed
RK
12065 case 'j':
12066 /* Write the bit number in CCR for jump. */
12067 i = ccr_bit (x, 0);
12068 if (i == -1)
12069 output_operand_lossage ("invalid %%j code");
9878760c 12070 else
9854d9ed 12071 fprintf (file, "%d", i);
9878760c
RK
12072 return;
12073
9854d9ed
RK
12074 case 'J':
12075 /* Similar, but add one for shift count in rlinm for scc and pass
12076 scc flag to `ccr_bit'. */
12077 i = ccr_bit (x, 1);
12078 if (i == -1)
12079 output_operand_lossage ("invalid %%J code");
12080 else
a0466a68
RK
12081 /* If we want bit 31, write a shift count of zero, not 32. */
12082 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
12083 return;
12084
9854d9ed
RK
12085 case 'k':
12086 /* X must be a constant. Write the 1's complement of the
12087 constant. */
9878760c 12088 if (! INT_P (x))
9854d9ed 12089 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
12090 else
12091 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
12092 return;
12093
81eace42 12094 case 'K':
9ebbca7d
GK
12095 /* X must be a symbolic constant on ELF. Write an
12096 expression suitable for an 'addi' that adds in the low 16
12097 bits of the MEM. */
12098 if (GET_CODE (x) != CONST)
12099 {
12100 print_operand_address (file, x);
12101 fputs ("@l", file);
12102 }
12103 else
12104 {
12105 if (GET_CODE (XEXP (x, 0)) != PLUS
12106 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
12107 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
12108 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 12109 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
12110 print_operand_address (file, XEXP (XEXP (x, 0), 0));
12111 fputs ("@l", file);
ed8d2920
MM
12112 /* For GNU as, there must be a non-alphanumeric character
12113 between 'l' and the number. The '-' is added by
12114 print_operand() already. */
12115 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
12116 fputs ("+", file);
9ebbca7d
GK
12117 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
12118 }
81eace42
GK
12119 return;
12120
12121 /* %l is output_asm_label. */
9ebbca7d 12122
9854d9ed
RK
12123 case 'L':
12124 /* Write second word of DImode or DFmode reference. Works on register
12125 or non-indexed memory only. */
12126 if (GET_CODE (x) == REG)
fb5c67a7 12127 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
12128 else if (GET_CODE (x) == MEM)
12129 {
12130 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 12131 we have already done it, we can just use an offset of word. */
9854d9ed
RK
12132 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12133 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
12134 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12135 UNITS_PER_WORD));
6fb5fa3c
DB
12136 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12137 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12138 UNITS_PER_WORD));
9854d9ed 12139 else
d7624dc0
RK
12140 output_address (XEXP (adjust_address_nv (x, SImode,
12141 UNITS_PER_WORD),
12142 0));
ed8908e7 12143
ba5e43aa 12144 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12145 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12146 reg_names[SMALL_DATA_REG]);
9854d9ed 12147 }
9878760c 12148 return;
f676971a 12149
9878760c
RK
12150 case 'm':
12151 /* MB value for a mask operand. */
b1765bde 12152 if (! mask_operand (x, SImode))
9878760c
RK
12153 output_operand_lossage ("invalid %%m value");
12154
0ba1b2ff 12155 fprintf (file, "%d", extract_MB (x));
9878760c
RK
12156 return;
12157
12158 case 'M':
12159 /* ME value for a mask operand. */
b1765bde 12160 if (! mask_operand (x, SImode))
a260abc9 12161 output_operand_lossage ("invalid %%M value");
9878760c 12162
0ba1b2ff 12163 fprintf (file, "%d", extract_ME (x));
9878760c
RK
12164 return;
12165
81eace42
GK
12166 /* %n outputs the negative of its operand. */
12167
9878760c
RK
12168 case 'N':
12169 /* Write the number of elements in the vector times 4. */
12170 if (GET_CODE (x) != PARALLEL)
12171 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
12172 else
12173 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
12174 return;
12175
12176 case 'O':
12177 /* Similar, but subtract 1 first. */
12178 if (GET_CODE (x) != PARALLEL)
1427100a 12179 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
12180 else
12181 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
12182 return;
12183
9854d9ed
RK
12184 case 'p':
12185 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12186 if (! INT_P (x)
2bfcf297 12187 || INT_LOWPART (x) < 0
9854d9ed
RK
12188 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12189 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
12190 else
12191 fprintf (file, "%d", i);
9854d9ed
RK
12192 return;
12193
9878760c
RK
12194 case 'P':
12195 /* The operand must be an indirect memory reference. The result
8bb418a3 12196 is the register name. */
9878760c
RK
12197 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12198 || REGNO (XEXP (x, 0)) >= 32)
12199 output_operand_lossage ("invalid %%P value");
e2c953b6 12200 else
fb5c67a7 12201 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
12202 return;
12203
dfbdccdb
GK
12204 case 'q':
12205 /* This outputs the logical code corresponding to a boolean
12206 expression. The expression may have one or both operands
39a10a29 12207 negated (if one, only the first one). For condition register
c4ad648e
AM
12208 logical operations, it will also treat the negated
12209 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 12210 {
63bc1d05 12211 const char *const *t = 0;
dfbdccdb
GK
12212 const char *s;
12213 enum rtx_code code = GET_CODE (x);
12214 static const char * const tbl[3][3] = {
12215 { "and", "andc", "nor" },
12216 { "or", "orc", "nand" },
12217 { "xor", "eqv", "xor" } };
12218
12219 if (code == AND)
12220 t = tbl[0];
12221 else if (code == IOR)
12222 t = tbl[1];
12223 else if (code == XOR)
12224 t = tbl[2];
12225 else
12226 output_operand_lossage ("invalid %%q value");
12227
12228 if (GET_CODE (XEXP (x, 0)) != NOT)
12229 s = t[0];
12230 else
12231 {
12232 if (GET_CODE (XEXP (x, 1)) == NOT)
12233 s = t[2];
12234 else
12235 s = t[1];
12236 }
f676971a 12237
dfbdccdb
GK
12238 fputs (s, file);
12239 }
12240 return;
12241
2c4a9cff
DE
12242 case 'Q':
12243 if (TARGET_MFCRF)
3b6ce0af 12244 fputc (',', file);
5efb1046 12245 /* FALLTHRU */
2c4a9cff
DE
12246 else
12247 return;
12248
9854d9ed
RK
12249 case 'R':
12250 /* X is a CR register. Print the mask for `mtcrf'. */
12251 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12252 output_operand_lossage ("invalid %%R value");
12253 else
9ebbca7d 12254 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12255 return;
9854d9ed
RK
12256
12257 case 's':
12258 /* Low 5 bits of 32 - value */
12259 if (! INT_P (x))
12260 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12261 else
12262 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12263 return;
9854d9ed 12264
a260abc9 12265 case 'S':
0ba1b2ff 12266 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12267 CONST_INT 32-bit mask is considered sign-extended so any
12268 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12269 if (! mask64_operand (x, DImode))
a260abc9
DE
12270 output_operand_lossage ("invalid %%S value");
12271
0ba1b2ff 12272 uval = INT_LOWPART (x);
a260abc9 12273
0ba1b2ff 12274 if (uval & 1) /* Clear Left */
a260abc9 12275 {
f099d360
GK
12276#if HOST_BITS_PER_WIDE_INT > 64
12277 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12278#endif
0ba1b2ff 12279 i = 64;
a260abc9 12280 }
0ba1b2ff 12281 else /* Clear Right */
a260abc9 12282 {
0ba1b2ff 12283 uval = ~uval;
f099d360
GK
12284#if HOST_BITS_PER_WIDE_INT > 64
12285 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12286#endif
0ba1b2ff 12287 i = 63;
a260abc9 12288 }
0ba1b2ff
AM
12289 while (uval != 0)
12290 --i, uval >>= 1;
37409796 12291 gcc_assert (i >= 0);
0ba1b2ff
AM
12292 fprintf (file, "%d", i);
12293 return;
a260abc9 12294
a3170dc6
AH
12295 case 't':
12296 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12297 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12298
12299 /* Bit 3 is OV bit. */
12300 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12301
12302 /* If we want bit 31, write a shift count of zero, not 32. */
12303 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12304 return;
12305
cccf3bdc
DE
12306 case 'T':
12307 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12308 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12309 && REGNO (x) != CTR_REGNO))
cccf3bdc 12310 output_operand_lossage ("invalid %%T value");
1de43f85 12311 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12312 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12313 else
12314 fputs ("ctr", file);
12315 return;
12316
9854d9ed 12317 case 'u':
802a0058 12318 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12319 if (! INT_P (x))
12320 output_operand_lossage ("invalid %%u value");
e2c953b6 12321 else
f676971a 12322 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12323 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12324 return;
12325
802a0058
MM
12326 case 'v':
12327 /* High-order 16 bits of constant for use in signed operand. */
12328 if (! INT_P (x))
12329 output_operand_lossage ("invalid %%v value");
e2c953b6 12330 else
134c32f6
DE
12331 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12332 (INT_LOWPART (x) >> 16) & 0xffff);
12333 return;
802a0058 12334
9854d9ed
RK
12335 case 'U':
12336 /* Print `u' if this has an auto-increment or auto-decrement. */
12337 if (GET_CODE (x) == MEM
12338 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12339 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12340 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12341 putc ('u', file);
9854d9ed 12342 return;
9878760c 12343
e0cd0770
JC
12344 case 'V':
12345 /* Print the trap code for this operand. */
12346 switch (GET_CODE (x))
12347 {
12348 case EQ:
12349 fputs ("eq", file); /* 4 */
12350 break;
12351 case NE:
12352 fputs ("ne", file); /* 24 */
12353 break;
12354 case LT:
12355 fputs ("lt", file); /* 16 */
12356 break;
12357 case LE:
12358 fputs ("le", file); /* 20 */
12359 break;
12360 case GT:
12361 fputs ("gt", file); /* 8 */
12362 break;
12363 case GE:
12364 fputs ("ge", file); /* 12 */
12365 break;
12366 case LTU:
12367 fputs ("llt", file); /* 2 */
12368 break;
12369 case LEU:
12370 fputs ("lle", file); /* 6 */
12371 break;
12372 case GTU:
12373 fputs ("lgt", file); /* 1 */
12374 break;
12375 case GEU:
12376 fputs ("lge", file); /* 5 */
12377 break;
12378 default:
37409796 12379 gcc_unreachable ();
e0cd0770
JC
12380 }
12381 break;
12382
9854d9ed
RK
12383 case 'w':
12384 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12385 normally. */
12386 if (INT_P (x))
f676971a 12387 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12388 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12389 else
12390 print_operand (file, x, 0);
9878760c
RK
12391 return;
12392
9854d9ed 12393 case 'W':
e2c953b6 12394 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12395 val = (GET_CODE (x) == CONST_INT
12396 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12397
12398 if (val < 0)
12399 i = -1;
9854d9ed 12400 else
e2c953b6
DE
12401 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12402 if ((val <<= 1) < 0)
12403 break;
12404
12405#if HOST_BITS_PER_WIDE_INT == 32
12406 if (GET_CODE (x) == CONST_INT && i >= 0)
12407 i += 32; /* zero-extend high-part was all 0's */
12408 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12409 {
12410 val = CONST_DOUBLE_LOW (x);
12411
37409796
NS
12412 gcc_assert (val);
12413 if (val < 0)
e2c953b6
DE
12414 --i;
12415 else
12416 for ( ; i < 64; i++)
12417 if ((val <<= 1) < 0)
12418 break;
12419 }
12420#endif
12421
12422 fprintf (file, "%d", i + 1);
9854d9ed 12423 return;
9878760c 12424
9854d9ed
RK
12425 case 'X':
12426 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12427 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12428 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12429 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12430 putc ('x', file);
9854d9ed 12431 return;
9878760c 12432
9854d9ed
RK
12433 case 'Y':
12434 /* Like 'L', for third word of TImode */
12435 if (GET_CODE (x) == REG)
fb5c67a7 12436 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12437 else if (GET_CODE (x) == MEM)
9878760c 12438 {
9854d9ed
RK
12439 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12440 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12441 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12442 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12443 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12444 else
d7624dc0 12445 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12446 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12447 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12448 reg_names[SMALL_DATA_REG]);
9878760c
RK
12449 }
12450 return;
f676971a 12451
9878760c 12452 case 'z':
b4ac57ab
RS
12453 /* X is a SYMBOL_REF. Write out the name preceded by a
12454 period and without any trailing data in brackets. Used for function
4d30c363
MM
12455 names. If we are configured for System V (or the embedded ABI) on
12456 the PowerPC, do not emit the period, since those systems do not use
12457 TOCs and the like. */
37409796 12458 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12459
c4ad648e
AM
12460 /* Mark the decl as referenced so that cgraph will output the
12461 function. */
9bf6462a 12462 if (SYMBOL_REF_DECL (x))
c4ad648e 12463 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12464
85b776df 12465 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12466 if (TARGET_MACHO)
12467 {
12468 const char *name = XSTR (x, 0);
a031e781 12469#if TARGET_MACHO
3b48085e 12470 if (MACHOPIC_INDIRECT
11abc112
MM
12471 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12472 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12473#endif
12474 assemble_name (file, name);
12475 }
85b776df 12476 else if (!DOT_SYMBOLS)
9739c90c 12477 assemble_name (file, XSTR (x, 0));
85b776df
AM
12478 else
12479 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12480 return;
12481
9854d9ed
RK
12482 case 'Z':
12483 /* Like 'L', for last word of TImode. */
12484 if (GET_CODE (x) == REG)
fb5c67a7 12485 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12486 else if (GET_CODE (x) == MEM)
12487 {
12488 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12489 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12490 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12491 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12492 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12493 else
d7624dc0 12494 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12495 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12496 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12497 reg_names[SMALL_DATA_REG]);
9854d9ed 12498 }
5c23c401 12499 return;
0ac081f6 12500
a3170dc6 12501 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12502 case 'y':
12503 {
12504 rtx tmp;
12505
37409796 12506 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12507
12508 tmp = XEXP (x, 0);
12509
90d3ff1c 12510 /* Ugly hack because %y is overloaded. */
8ef65e3d 12511 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12512 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12513 || GET_MODE (x) == TFmode
12514 || GET_MODE (x) == TImode))
a3170dc6
AH
12515 {
12516 /* Handle [reg]. */
12517 if (GET_CODE (tmp) == REG)
12518 {
12519 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12520 break;
12521 }
12522 /* Handle [reg+UIMM]. */
12523 else if (GET_CODE (tmp) == PLUS &&
12524 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12525 {
12526 int x;
12527
37409796 12528 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12529
12530 x = INTVAL (XEXP (tmp, 1));
12531 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12532 break;
12533 }
12534
12535 /* Fall through. Must be [reg+reg]. */
12536 }
850e8d3d
DN
12537 if (TARGET_ALTIVEC
12538 && GET_CODE (tmp) == AND
12539 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12540 && INTVAL (XEXP (tmp, 1)) == -16)
12541 tmp = XEXP (tmp, 0);
0ac081f6 12542 if (GET_CODE (tmp) == REG)
c62f2db5 12543 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12544 else
0ac081f6 12545 {
cb8cc791
AP
12546 if (!GET_CODE (tmp) == PLUS
12547 || !REG_P (XEXP (tmp, 0))
12548 || !REG_P (XEXP (tmp, 1)))
12549 {
12550 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12551 break;
12552 }
bb8df8a6 12553
0ac081f6
AH
12554 if (REGNO (XEXP (tmp, 0)) == 0)
12555 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12556 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12557 else
12558 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12559 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12560 }
0ac081f6
AH
12561 break;
12562 }
f676971a 12563
9878760c
RK
12564 case 0:
12565 if (GET_CODE (x) == REG)
12566 fprintf (file, "%s", reg_names[REGNO (x)]);
12567 else if (GET_CODE (x) == MEM)
12568 {
12569 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12570 know the width from the mode. */
12571 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12572 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12573 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12574 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12575 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12576 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12577 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12578 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12579 else
a54d04b7 12580 output_address (XEXP (x, 0));
9878760c
RK
12581 }
12582 else
a54d04b7 12583 output_addr_const (file, x);
a85d226b 12584 return;
9878760c 12585
c4501e62
JJ
12586 case '&':
12587 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12588 return;
12589
9878760c
RK
12590 default:
12591 output_operand_lossage ("invalid %%xn code");
12592 }
12593}
12594\f
12595/* Print the address of an operand. */
12596
12597void
a2369ed3 12598print_operand_address (FILE *file, rtx x)
9878760c
RK
12599{
12600 if (GET_CODE (x) == REG)
4697a36c 12601 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12602 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12603 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12604 {
12605 output_addr_const (file, x);
ba5e43aa 12606 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12607 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12608 reg_names[SMALL_DATA_REG]);
37409796
NS
12609 else
12610 gcc_assert (!TARGET_TOC);
9878760c
RK
12611 }
12612 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12613 {
9024f4b8 12614 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12615 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12616 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12617 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12618 else
4697a36c
MM
12619 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12620 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12621 }
12622 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12623 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12624 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12625#if TARGET_ELF
12626 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12627 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12628 {
12629 output_addr_const (file, XEXP (x, 1));
12630 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12631 }
c859cda6
DJ
12632#endif
12633#if TARGET_MACHO
12634 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12635 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12636 {
12637 fprintf (file, "lo16(");
12638 output_addr_const (file, XEXP (x, 1));
12639 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12640 }
3cb999d8 12641#endif
4d588c14 12642 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12643 {
2e4316da 12644 output_addr_const (file, XEXP (x, 1));
9ebbca7d
GK
12645 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12646 }
9878760c 12647 else
37409796 12648 gcc_unreachable ();
9878760c
RK
12649}
12650\f
2e4316da
RS
12651/* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */
12652
12653bool
12654rs6000_output_addr_const_extra (FILE *file, rtx x)
12655{
12656 if (GET_CODE (x) == UNSPEC)
12657 switch (XINT (x, 1))
12658 {
12659 case UNSPEC_TOCREL:
12660 x = XVECEXP (x, 0, 0);
12661 gcc_assert (GET_CODE (x) == SYMBOL_REF);
12662 output_addr_const (file, x);
12663 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
12664 {
12665 putc ('-', file);
12666 assemble_name (file, toc_label_name);
12667 }
12668 else if (TARGET_ELF)
12669 fputs ("@toc", file);
12670 return true;
08a6a74b
RS
12671
12672#if TARGET_MACHO
12673 case UNSPEC_MACHOPIC_OFFSET:
12674 output_addr_const (file, XVECEXP (x, 0, 0));
12675 putc ('-', file);
12676 machopic_output_function_base_name (file);
12677 return true;
12678#endif
2e4316da
RS
12679 }
12680 return false;
12681}
12682\f
88cad84b 12683/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12684 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12685 is defined. It also needs to handle DI-mode objects on 64-bit
12686 targets. */
12687
12688static bool
a2369ed3 12689rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12690{
f4f4921e 12691#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12692 /* Special handling for SI values. */
84dcde01 12693 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12694 {
301d03af 12695 static int recurse = 0;
f676971a 12696
301d03af
RS
12697 /* For -mrelocatable, we mark all addresses that need to be fixed up
12698 in the .fixup section. */
12699 if (TARGET_RELOCATABLE
d6b5193b
RS
12700 && in_section != toc_section
12701 && in_section != text_section
4325ca90 12702 && !unlikely_text_section_p (in_section)
301d03af
RS
12703 && !recurse
12704 && GET_CODE (x) != CONST_INT
12705 && GET_CODE (x) != CONST_DOUBLE
12706 && CONSTANT_P (x))
12707 {
12708 char buf[256];
12709
12710 recurse = 1;
12711 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12712 fixuplabelno++;
12713 ASM_OUTPUT_LABEL (asm_out_file, buf);
12714 fprintf (asm_out_file, "\t.long\t(");
12715 output_addr_const (asm_out_file, x);
12716 fprintf (asm_out_file, ")@fixup\n");
12717 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12718 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12719 fprintf (asm_out_file, "\t.long\t");
12720 assemble_name (asm_out_file, buf);
12721 fprintf (asm_out_file, "\n\t.previous\n");
12722 recurse = 0;
12723 return true;
12724 }
12725 /* Remove initial .'s to turn a -mcall-aixdesc function
12726 address into the address of the descriptor, not the function
12727 itself. */
12728 else if (GET_CODE (x) == SYMBOL_REF
12729 && XSTR (x, 0)[0] == '.'
12730 && DEFAULT_ABI == ABI_AIX)
12731 {
12732 const char *name = XSTR (x, 0);
12733 while (*name == '.')
12734 name++;
12735
12736 fprintf (asm_out_file, "\t.long\t%s\n", name);
12737 return true;
12738 }
12739 }
f4f4921e 12740#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12741 return default_assemble_integer (x, size, aligned_p);
12742}
93638d7a
AM
12743
12744#ifdef HAVE_GAS_HIDDEN
12745/* Emit an assembler directive to set symbol visibility for DECL to
12746 VISIBILITY_TYPE. */
12747
5add3202 12748static void
a2369ed3 12749rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12750{
93638d7a
AM
12751 /* Functions need to have their entry point symbol visibility set as
12752 well as their descriptor symbol visibility. */
85b776df
AM
12753 if (DEFAULT_ABI == ABI_AIX
12754 && DOT_SYMBOLS
12755 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12756 {
25fdb4dc 12757 static const char * const visibility_types[] = {
c4ad648e 12758 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12759 };
12760
12761 const char *name, *type;
93638d7a
AM
12762
12763 name = ((* targetm.strip_name_encoding)
12764 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12765 type = visibility_types[vis];
93638d7a 12766
25fdb4dc
RH
12767 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12768 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12769 }
25fdb4dc
RH
12770 else
12771 default_assemble_visibility (decl, vis);
93638d7a
AM
12772}
12773#endif
301d03af 12774\f
39a10a29 12775enum rtx_code
a2369ed3 12776rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12777{
12778 /* Reversal of FP compares takes care -- an ordered compare
12779 becomes an unordered compare and vice versa. */
f676971a 12780 if (mode == CCFPmode
bc9ec0e0
GK
12781 && (!flag_finite_math_only
12782 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12783 || code == UNEQ || code == LTGT))
bab6226b 12784 return reverse_condition_maybe_unordered (code);
39a10a29 12785 else
bab6226b 12786 return reverse_condition (code);
39a10a29
GK
12787}
12788
39a10a29
GK
12789/* Generate a compare for CODE. Return a brand-new rtx that
12790 represents the result of the compare. */
a4f6c312 12791
39a10a29 12792static rtx
f90b7a5a 12793rs6000_generate_compare (rtx cmp, enum machine_mode mode)
39a10a29
GK
12794{
12795 enum machine_mode comp_mode;
12796 rtx compare_result;
f90b7a5a
PB
12797 enum rtx_code code = GET_CODE (cmp);
12798 rtx op0 = XEXP (cmp, 0);
12799 rtx op1 = XEXP (cmp, 1);
39a10a29 12800
f90b7a5a 12801 if (FLOAT_MODE_P (mode))
39a10a29
GK
12802 comp_mode = CCFPmode;
12803 else if (code == GTU || code == LTU
c4ad648e 12804 || code == GEU || code == LEU)
39a10a29 12805 comp_mode = CCUNSmode;
60934f9c 12806 else if ((code == EQ || code == NE)
f90b7a5a
PB
12807 && GET_CODE (op0) == SUBREG
12808 && GET_CODE (op1) == SUBREG
12809 && SUBREG_PROMOTED_UNSIGNED_P (op0)
12810 && SUBREG_PROMOTED_UNSIGNED_P (op1))
60934f9c
NS
12811 /* These are unsigned values, perhaps there will be a later
12812 ordering compare that can be shared with this one.
12813 Unfortunately we cannot detect the signedness of the operands
12814 for non-subregs. */
12815 comp_mode = CCUNSmode;
39a10a29
GK
12816 else
12817 comp_mode = CCmode;
12818
12819 /* First, the compare. */
12820 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12821
cef6b86c 12822 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12823 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
f90b7a5a 12824 && FLOAT_MODE_P (mode))
a3170dc6 12825 {
64022b5d 12826 rtx cmp, or_result, compare_result2;
f90b7a5a 12827 enum machine_mode op_mode = GET_MODE (op0);
4d4cbc0e
AH
12828
12829 if (op_mode == VOIDmode)
f90b7a5a 12830 op_mode = GET_MODE (op1);
a3170dc6 12831
cef6b86c
EB
12832 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12833 This explains the following mess. */
423c1189 12834
a3170dc6
AH
12835 switch (code)
12836 {
423c1189 12837 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12838 switch (op_mode)
12839 {
12840 case SFmode:
1cdc0d8f 12841 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12842 ? gen_tstsfeq_gpr (compare_result, op0, op1)
12843 : gen_cmpsfeq_gpr (compare_result, op0, op1);
37409796
NS
12844 break;
12845
12846 case DFmode:
1cdc0d8f 12847 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12848 ? gen_tstdfeq_gpr (compare_result, op0, op1)
12849 : gen_cmpdfeq_gpr (compare_result, op0, op1);
37409796
NS
12850 break;
12851
17caeff2 12852 case TFmode:
1cdc0d8f 12853 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12854 ? gen_tsttfeq_gpr (compare_result, op0, op1)
12855 : gen_cmptfeq_gpr (compare_result, op0, op1);
17caeff2
JM
12856 break;
12857
37409796
NS
12858 default:
12859 gcc_unreachable ();
12860 }
a3170dc6 12861 break;
bb8df8a6 12862
423c1189 12863 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12864 switch (op_mode)
12865 {
12866 case SFmode:
1cdc0d8f 12867 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12868 ? gen_tstsfgt_gpr (compare_result, op0, op1)
12869 : gen_cmpsfgt_gpr (compare_result, op0, op1);
37409796 12870 break;
bb8df8a6 12871
37409796 12872 case DFmode:
1cdc0d8f 12873 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12874 ? gen_tstdfgt_gpr (compare_result, op0, op1)
12875 : gen_cmpdfgt_gpr (compare_result, op0, op1);
37409796
NS
12876 break;
12877
17caeff2 12878 case TFmode:
1cdc0d8f 12879 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12880 ? gen_tsttfgt_gpr (compare_result, op0, op1)
12881 : gen_cmptfgt_gpr (compare_result, op0, op1);
17caeff2
JM
12882 break;
12883
37409796
NS
12884 default:
12885 gcc_unreachable ();
12886 }
a3170dc6 12887 break;
bb8df8a6 12888
423c1189 12889 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12890 switch (op_mode)
12891 {
12892 case SFmode:
1cdc0d8f 12893 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12894 ? gen_tstsflt_gpr (compare_result, op0, op1)
12895 : gen_cmpsflt_gpr (compare_result, op0, op1);
37409796 12896 break;
bb8df8a6 12897
37409796 12898 case DFmode:
1cdc0d8f 12899 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12900 ? gen_tstdflt_gpr (compare_result, op0, op1)
12901 : gen_cmpdflt_gpr (compare_result, op0, op1);
37409796
NS
12902 break;
12903
17caeff2 12904 case TFmode:
1cdc0d8f 12905 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12906 ? gen_tsttflt_gpr (compare_result, op0, op1)
12907 : gen_cmptflt_gpr (compare_result, op0, op1);
17caeff2
JM
12908 break;
12909
37409796
NS
12910 default:
12911 gcc_unreachable ();
12912 }
a3170dc6 12913 break;
4d4cbc0e 12914 default:
37409796 12915 gcc_unreachable ();
a3170dc6
AH
12916 }
12917
12918 /* Synthesize LE and GE from LT/GT || EQ. */
12919 if (code == LE || code == GE || code == LEU || code == GEU)
12920 {
a3170dc6
AH
12921 emit_insn (cmp);
12922
12923 switch (code)
12924 {
12925 case LE: code = LT; break;
12926 case GE: code = GT; break;
12927 case LEU: code = LT; break;
12928 case GEU: code = GT; break;
37409796 12929 default: gcc_unreachable ();
a3170dc6
AH
12930 }
12931
a3170dc6
AH
12932 compare_result2 = gen_reg_rtx (CCFPmode);
12933
12934 /* Do the EQ. */
37409796
NS
12935 switch (op_mode)
12936 {
12937 case SFmode:
1cdc0d8f 12938 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12939 ? gen_tstsfeq_gpr (compare_result2, op0, op1)
12940 : gen_cmpsfeq_gpr (compare_result2, op0, op1);
37409796
NS
12941 break;
12942
12943 case DFmode:
1cdc0d8f 12944 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12945 ? gen_tstdfeq_gpr (compare_result2, op0, op1)
12946 : gen_cmpdfeq_gpr (compare_result2, op0, op1);
37409796
NS
12947 break;
12948
17caeff2 12949 case TFmode:
1cdc0d8f 12950 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
12951 ? gen_tsttfeq_gpr (compare_result2, op0, op1)
12952 : gen_cmptfeq_gpr (compare_result2, op0, op1);
17caeff2
JM
12953 break;
12954
37409796
NS
12955 default:
12956 gcc_unreachable ();
12957 }
a3170dc6
AH
12958 emit_insn (cmp);
12959
a3170dc6 12960 /* OR them together. */
64022b5d
AH
12961 or_result = gen_reg_rtx (CCFPmode);
12962 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12963 compare_result2);
a3170dc6
AH
12964 compare_result = or_result;
12965 code = EQ;
12966 }
12967 else
12968 {
a3170dc6 12969 if (code == NE || code == LTGT)
a3170dc6 12970 code = NE;
423c1189
AH
12971 else
12972 code = EQ;
a3170dc6
AH
12973 }
12974
12975 emit_insn (cmp);
12976 }
12977 else
de17c25f
DE
12978 {
12979 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12980 CLOBBERs to match cmptf_internal2 pattern. */
12981 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
f90b7a5a 12982 && GET_MODE (op0) == TFmode
602ea4d3 12983 && !TARGET_IEEEQUAD
de17c25f
DE
12984 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12985 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12986 gen_rtvec (9,
12987 gen_rtx_SET (VOIDmode,
12988 compare_result,
f90b7a5a 12989 gen_rtx_COMPARE (comp_mode, op0, op1)),
de17c25f
DE
12990 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12991 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12992 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12993 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12994 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12995 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12996 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12997 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
f90b7a5a
PB
12998 else if (GET_CODE (op1) == UNSPEC
12999 && XINT (op1, 1) == UNSPEC_SP_TEST)
3aebbe5f 13000 {
c24151ce 13001 rtx op1b = XVECEXP (op1, 0, 0);
3aebbe5f
JJ
13002 comp_mode = CCEQmode;
13003 compare_result = gen_reg_rtx (CCEQmode);
13004 if (TARGET_64BIT)
c24151ce 13005 emit_insn (gen_stack_protect_testdi (compare_result, op0, op1b));
3aebbe5f 13006 else
c24151ce 13007 emit_insn (gen_stack_protect_testsi (compare_result, op0, op1b));
3aebbe5f 13008 }
de17c25f
DE
13009 else
13010 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
f90b7a5a 13011 gen_rtx_COMPARE (comp_mode, op0, op1)));
de17c25f 13012 }
f676971a 13013
ca5adc63 13014 /* Some kinds of FP comparisons need an OR operation;
e7108df9 13015 under flag_finite_math_only we don't bother. */
f90b7a5a 13016 if (FLOAT_MODE_P (mode)
e7108df9 13017 && !flag_finite_math_only
8ef65e3d 13018 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
13019 && (code == LE || code == GE
13020 || code == UNEQ || code == LTGT
13021 || code == UNGT || code == UNLT))
13022 {
13023 enum rtx_code or1, or2;
13024 rtx or1_rtx, or2_rtx, compare2_rtx;
13025 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 13026
39a10a29
GK
13027 switch (code)
13028 {
13029 case LE: or1 = LT; or2 = EQ; break;
13030 case GE: or1 = GT; or2 = EQ; break;
13031 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
13032 case LTGT: or1 = LT; or2 = GT; break;
13033 case UNGT: or1 = UNORDERED; or2 = GT; break;
13034 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 13035 default: gcc_unreachable ();
39a10a29
GK
13036 }
13037 validate_condition_mode (or1, comp_mode);
13038 validate_condition_mode (or2, comp_mode);
1c563bed
KH
13039 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
13040 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
13041 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
13042 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
13043 const_true_rtx);
13044 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
13045
13046 compare_result = or_result;
13047 code = EQ;
13048 }
13049
13050 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 13051
1c563bed 13052 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
13053}
13054
13055
13056/* Emit the RTL for an sCOND pattern. */
13057
13058void
f90b7a5a 13059rs6000_emit_sCOND (enum machine_mode mode, rtx operands[])
39a10a29
GK
13060{
13061 rtx condition_rtx;
13062 enum machine_mode op_mode;
b7053a3f 13063 enum rtx_code cond_code;
f90b7a5a 13064 rtx result = operands[0];
39a10a29 13065
f90b7a5a 13066 condition_rtx = rs6000_generate_compare (operands[1], mode);
b7053a3f
GK
13067 cond_code = GET_CODE (condition_rtx);
13068
f90b7a5a 13069 if (FLOAT_MODE_P (mode)
423c1189
AH
13070 && !TARGET_FPRS && TARGET_HARD_FLOAT)
13071 {
13072 rtx t;
13073
13074 PUT_MODE (condition_rtx, SImode);
13075 t = XEXP (condition_rtx, 0);
13076
37409796 13077 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
13078
13079 if (cond_code == NE)
64022b5d 13080 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 13081
64022b5d 13082 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
13083 return;
13084 }
13085
b7053a3f
GK
13086 if (cond_code == NE
13087 || cond_code == GE || cond_code == LE
13088 || cond_code == GEU || cond_code == LEU
13089 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
13090 {
13091 rtx not_result = gen_reg_rtx (CCEQmode);
13092 rtx not_op, rev_cond_rtx;
13093 enum machine_mode cc_mode;
f676971a 13094
b7053a3f
GK
13095 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
13096
1c563bed 13097 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 13098 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
13099 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
13100 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
13101 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
13102 }
39a10a29 13103
f90b7a5a 13104 op_mode = GET_MODE (XEXP (operands[1], 0));
39a10a29 13105 if (op_mode == VOIDmode)
f90b7a5a 13106 op_mode = GET_MODE (XEXP (operands[1], 1));
39a10a29 13107
f90b7a5a 13108 if (TARGET_POWERPC64 && (op_mode == DImode || FLOAT_MODE_P (mode)))
39a10a29
GK
13109 {
13110 PUT_MODE (condition_rtx, DImode);
13111 convert_move (result, condition_rtx, 0);
13112 }
13113 else
13114 {
13115 PUT_MODE (condition_rtx, SImode);
13116 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
13117 }
13118}
13119
39a10a29
GK
13120/* Emit a branch of kind CODE to location LOC. */
13121
13122void
f90b7a5a 13123rs6000_emit_cbranch (enum machine_mode mode, rtx operands[])
39a10a29
GK
13124{
13125 rtx condition_rtx, loc_ref;
13126
f90b7a5a
PB
13127 condition_rtx = rs6000_generate_compare (operands[0], mode);
13128 loc_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
39a10a29
GK
13129 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
13130 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
13131 loc_ref, pc_rtx)));
13132}
13133
12a4e8c5
GK
13134/* Return the string to output a conditional branch to LABEL, which is
13135 the operand number of the label, or -1 if the branch is really a
f676971a 13136 conditional return.
12a4e8c5
GK
13137
13138 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
13139 condition code register and its mode specifies what kind of
13140 comparison we made.
13141
a0ab749a 13142 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
13143
13144 INSN is the insn. */
13145
13146char *
a2369ed3 13147output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
13148{
13149 static char string[64];
13150 enum rtx_code code = GET_CODE (op);
13151 rtx cc_reg = XEXP (op, 0);
13152 enum machine_mode mode = GET_MODE (cc_reg);
13153 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 13154 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
13155 int really_reversed = reversed ^ need_longbranch;
13156 char *s = string;
13157 const char *ccode;
13158 const char *pred;
13159 rtx note;
13160
39a10a29
GK
13161 validate_condition_mode (code, mode);
13162
13163 /* Work out which way this really branches. We could use
13164 reverse_condition_maybe_unordered here always but this
13165 makes the resulting assembler clearer. */
12a4e8c5 13166 if (really_reversed)
de40e1df
DJ
13167 {
13168 /* Reversal of FP compares takes care -- an ordered compare
13169 becomes an unordered compare and vice versa. */
13170 if (mode == CCFPmode)
13171 code = reverse_condition_maybe_unordered (code);
13172 else
13173 code = reverse_condition (code);
13174 }
12a4e8c5 13175
8ef65e3d 13176 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
13177 {
13178 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13179 to the GT bit. */
37409796
NS
13180 switch (code)
13181 {
13182 case EQ:
13183 /* Opposite of GT. */
13184 code = GT;
13185 break;
13186
13187 case NE:
13188 code = UNLE;
13189 break;
13190
13191 default:
13192 gcc_unreachable ();
13193 }
a3170dc6
AH
13194 }
13195
39a10a29 13196 switch (code)
12a4e8c5
GK
13197 {
13198 /* Not all of these are actually distinct opcodes, but
13199 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
13200 case NE: case LTGT:
13201 ccode = "ne"; break;
13202 case EQ: case UNEQ:
13203 ccode = "eq"; break;
f676971a 13204 case GE: case GEU:
50a0b056 13205 ccode = "ge"; break;
f676971a 13206 case GT: case GTU: case UNGT:
50a0b056 13207 ccode = "gt"; break;
f676971a 13208 case LE: case LEU:
50a0b056 13209 ccode = "le"; break;
f676971a 13210 case LT: case LTU: case UNLT:
50a0b056 13211 ccode = "lt"; break;
12a4e8c5
GK
13212 case UNORDERED: ccode = "un"; break;
13213 case ORDERED: ccode = "nu"; break;
13214 case UNGE: ccode = "nl"; break;
13215 case UNLE: ccode = "ng"; break;
13216 default:
37409796 13217 gcc_unreachable ();
12a4e8c5 13218 }
f676971a
EC
13219
13220 /* Maybe we have a guess as to how likely the branch is.
94a54f47 13221 The old mnemonics don't have a way to specify this information. */
f4857b9b 13222 pred = "";
12a4e8c5
GK
13223 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13224 if (note != NULL_RTX)
13225 {
13226 /* PROB is the difference from 50%. */
13227 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13228
13229 /* Only hint for highly probable/improbable branches on newer
13230 cpus as static prediction overrides processor dynamic
13231 prediction. For older cpus we may as well always hint, but
13232 assume not taken for branches that are very close to 50% as a
13233 mispredicted taken branch is more expensive than a
f676971a 13234 mispredicted not-taken branch. */
ec507f2d 13235 if (rs6000_always_hint
2c9e13f3
JH
13236 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13237 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13238 {
13239 if (abs (prob) > REG_BR_PROB_BASE / 20
13240 && ((prob > 0) ^ need_longbranch))
c4ad648e 13241 pred = "+";
f4857b9b
AM
13242 else
13243 pred = "-";
13244 }
12a4e8c5 13245 }
12a4e8c5
GK
13246
13247 if (label == NULL)
94a54f47 13248 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13249 else
94a54f47 13250 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13251
37c67319 13252 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13253 Assume they'd only be the first character.... */
37c67319
GK
13254 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13255 *s++ = '%';
94a54f47 13256 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13257
13258 if (label != NULL)
13259 {
13260 /* If the branch distance was too far, we may have to use an
13261 unconditional branch to go the distance. */
13262 if (need_longbranch)
44518ddd 13263 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13264 else
13265 s += sprintf (s, ",%s", label);
13266 }
13267
13268 return string;
13269}
50a0b056 13270
64022b5d 13271/* Return the string to flip the GT bit on a CR. */
423c1189 13272char *
64022b5d 13273output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13274{
13275 static char string[64];
13276 int a, b;
13277
37409796
NS
13278 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13279 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13280
64022b5d
AH
13281 /* GT bit. */
13282 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13283 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13284
13285 sprintf (string, "crnot %d,%d", a, b);
13286 return string;
13287}
13288
21213b4c
DP
13289/* Return insn index for the vector compare instruction for given CODE,
13290 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13291 not available. */
13292
13293static int
94ff898d 13294get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13295 enum machine_mode dest_mode,
13296 enum machine_mode op_mode)
13297{
13298 if (!TARGET_ALTIVEC)
13299 return INSN_NOT_AVAILABLE;
13300
13301 switch (code)
13302 {
13303 case EQ:
13304 if (dest_mode == V16QImode && op_mode == V16QImode)
13305 return UNSPEC_VCMPEQUB;
13306 if (dest_mode == V8HImode && op_mode == V8HImode)
13307 return UNSPEC_VCMPEQUH;
13308 if (dest_mode == V4SImode && op_mode == V4SImode)
13309 return UNSPEC_VCMPEQUW;
13310 if (dest_mode == V4SImode && op_mode == V4SFmode)
13311 return UNSPEC_VCMPEQFP;
13312 break;
13313 case GE:
13314 if (dest_mode == V4SImode && op_mode == V4SFmode)
13315 return UNSPEC_VCMPGEFP;
13316 case GT:
13317 if (dest_mode == V16QImode && op_mode == V16QImode)
13318 return UNSPEC_VCMPGTSB;
13319 if (dest_mode == V8HImode && op_mode == V8HImode)
13320 return UNSPEC_VCMPGTSH;
13321 if (dest_mode == V4SImode && op_mode == V4SImode)
13322 return UNSPEC_VCMPGTSW;
13323 if (dest_mode == V4SImode && op_mode == V4SFmode)
13324 return UNSPEC_VCMPGTFP;
13325 break;
13326 case GTU:
13327 if (dest_mode == V16QImode && op_mode == V16QImode)
13328 return UNSPEC_VCMPGTUB;
13329 if (dest_mode == V8HImode && op_mode == V8HImode)
13330 return UNSPEC_VCMPGTUH;
13331 if (dest_mode == V4SImode && op_mode == V4SImode)
13332 return UNSPEC_VCMPGTUW;
13333 break;
13334 default:
13335 break;
13336 }
13337 return INSN_NOT_AVAILABLE;
13338}
13339
13340/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13341 DMODE is expected destination mode. This is a recursive function. */
13342
13343static rtx
13344rs6000_emit_vector_compare (enum rtx_code rcode,
13345 rtx op0, rtx op1,
13346 enum machine_mode dmode)
13347{
13348 int vec_cmp_insn;
13349 rtx mask;
13350 enum machine_mode dest_mode;
13351 enum machine_mode op_mode = GET_MODE (op1);
13352
37409796
NS
13353 gcc_assert (TARGET_ALTIVEC);
13354 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13355
13356 /* Floating point vector compare instructions uses destination V4SImode.
13357 Move destination to appropriate mode later. */
13358 if (dmode == V4SFmode)
13359 dest_mode = V4SImode;
13360 else
13361 dest_mode = dmode;
13362
13363 mask = gen_reg_rtx (dest_mode);
13364 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13365
13366 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13367 {
13368 bool swap_operands = false;
13369 bool try_again = false;
13370 switch (rcode)
13371 {
13372 case LT:
13373 rcode = GT;
13374 swap_operands = true;
13375 try_again = true;
13376 break;
13377 case LTU:
13378 rcode = GTU;
13379 swap_operands = true;
13380 try_again = true;
13381 break;
13382 case NE:
370df7db
JC
13383 case UNLE:
13384 case UNLT:
13385 case UNGE:
13386 case UNGT:
13387 /* Invert condition and try again.
13388 e.g., A != B becomes ~(A==B). */
21213b4c 13389 {
370df7db 13390 enum rtx_code rev_code;
21213b4c 13391 enum insn_code nor_code;
d1123cde 13392 rtx eq_rtx;
370df7db
JC
13393
13394 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13395 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13396 dest_mode);
94ff898d 13397
166cdb08 13398 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13399 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13400 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13401
13402 if (dmode != dest_mode)
13403 {
13404 rtx temp = gen_reg_rtx (dest_mode);
13405 convert_move (temp, mask, 0);
13406 return temp;
13407 }
13408 return mask;
13409 }
13410 break;
13411 case GE:
13412 case GEU:
13413 case LE:
13414 case LEU:
13415 /* Try GT/GTU/LT/LTU OR EQ */
13416 {
13417 rtx c_rtx, eq_rtx;
13418 enum insn_code ior_code;
13419 enum rtx_code new_code;
13420
37409796
NS
13421 switch (rcode)
13422 {
13423 case GE:
13424 new_code = GT;
13425 break;
13426
13427 case GEU:
13428 new_code = GTU;
13429 break;
13430
13431 case LE:
13432 new_code = LT;
13433 break;
13434
13435 case LEU:
13436 new_code = LTU;
13437 break;
13438
13439 default:
13440 gcc_unreachable ();
13441 }
21213b4c
DP
13442
13443 c_rtx = rs6000_emit_vector_compare (new_code,
13444 op0, op1, dest_mode);
13445 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13446 dest_mode);
13447
166cdb08 13448 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13449 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13450 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13451 if (dmode != dest_mode)
13452 {
13453 rtx temp = gen_reg_rtx (dest_mode);
13454 convert_move (temp, mask, 0);
13455 return temp;
13456 }
13457 return mask;
13458 }
13459 break;
13460 default:
37409796 13461 gcc_unreachable ();
21213b4c
DP
13462 }
13463
13464 if (try_again)
13465 {
13466 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13467 /* You only get two chances. */
13468 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13469 }
13470
13471 if (swap_operands)
13472 {
13473 rtx tmp;
13474 tmp = op0;
13475 op0 = op1;
13476 op1 = tmp;
13477 }
13478 }
13479
915167f5
GK
13480 emit_insn (gen_rtx_SET (VOIDmode, mask,
13481 gen_rtx_UNSPEC (dest_mode,
13482 gen_rtvec (2, op0, op1),
13483 vec_cmp_insn)));
21213b4c
DP
13484 if (dmode != dest_mode)
13485 {
13486 rtx temp = gen_reg_rtx (dest_mode);
13487 convert_move (temp, mask, 0);
13488 return temp;
13489 }
13490 return mask;
13491}
13492
13493/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13494 valid insn doesn exist for given mode. */
13495
13496static int
13497get_vsel_insn (enum machine_mode mode)
13498{
13499 switch (mode)
13500 {
13501 case V4SImode:
13502 return UNSPEC_VSEL4SI;
13503 break;
13504 case V4SFmode:
13505 return UNSPEC_VSEL4SF;
13506 break;
13507 case V8HImode:
13508 return UNSPEC_VSEL8HI;
13509 break;
13510 case V16QImode:
13511 return UNSPEC_VSEL16QI;
13512 break;
13513 default:
13514 return INSN_NOT_AVAILABLE;
13515 break;
13516 }
13517 return INSN_NOT_AVAILABLE;
13518}
13519
13520/* Emit vector select insn where DEST is destination using
13521 operands OP1, OP2 and MASK. */
13522
13523static void
13524rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13525{
13526 rtx t, temp;
13527 enum machine_mode dest_mode = GET_MODE (dest);
13528 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13529
13530 temp = gen_reg_rtx (dest_mode);
94ff898d 13531
bb8df8a6 13532 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13533 select op2. */
915167f5
GK
13534 t = gen_rtx_SET (VOIDmode, temp,
13535 gen_rtx_UNSPEC (dest_mode,
13536 gen_rtvec (3, op2, op1, mask),
13537 vsel_insn_index));
21213b4c
DP
13538 emit_insn (t);
13539 emit_move_insn (dest, temp);
13540 return;
13541}
13542
94ff898d 13543/* Emit vector conditional expression.
21213b4c
DP
13544 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13545 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13546
13547int
13548rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13549 rtx cond, rtx cc_op0, rtx cc_op1)
13550{
13551 enum machine_mode dest_mode = GET_MODE (dest);
13552 enum rtx_code rcode = GET_CODE (cond);
13553 rtx mask;
13554
13555 if (!TARGET_ALTIVEC)
13556 return 0;
13557
13558 /* Get the vector mask for the given relational operations. */
13559 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13560
13561 rs6000_emit_vector_select (dest, op1, op2, mask);
13562
13563 return 1;
13564}
13565
50a0b056
GK
13566/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13567 operands of the last comparison is nonzero/true, FALSE_COND if it
13568 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13569
50a0b056 13570int
a2369ed3 13571rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13572{
13573 enum rtx_code code = GET_CODE (op);
f90b7a5a
PB
13574 rtx op0 = XEXP (op, 0);
13575 rtx op1 = XEXP (op, 1);
50a0b056 13576 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13577 enum machine_mode compare_mode = GET_MODE (op0);
13578 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13579 rtx temp;
add2402e 13580 bool is_against_zero;
50a0b056 13581
a3c9585f 13582 /* These modes should always match. */
a3170dc6
AH
13583 if (GET_MODE (op1) != compare_mode
13584 /* In the isel case however, we can use a compare immediate, so
13585 op1 may be a small constant. */
13586 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13587 return 0;
178c3eff 13588 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13589 return 0;
178c3eff 13590 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13591 return 0;
13592
50a0b056 13593 /* First, work out if the hardware can do this at all, or
a3c9585f 13594 if it's too slow.... */
f90b7a5a 13595 if (!FLOAT_MODE_P (compare_mode))
a3170dc6
AH
13596 {
13597 if (TARGET_ISEL)
13598 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13599 return 0;
13600 }
8ef65e3d 13601 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13602 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13603 return 0;
50a0b056 13604
add2402e 13605 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13606
add2402e
GK
13607 /* A floating-point subtract might overflow, underflow, or produce
13608 an inexact result, thus changing the floating-point flags, so it
13609 can't be generated if we care about that. It's safe if one side
13610 of the construct is zero, since then no subtract will be
13611 generated. */
ebb109ad 13612 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13613 && flag_trapping_math && ! is_against_zero)
13614 return 0;
13615
50a0b056
GK
13616 /* Eliminate half of the comparisons by switching operands, this
13617 makes the remaining code simpler. */
13618 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13619 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13620 {
13621 code = reverse_condition_maybe_unordered (code);
13622 temp = true_cond;
13623 true_cond = false_cond;
13624 false_cond = temp;
13625 }
13626
13627 /* UNEQ and LTGT take four instructions for a comparison with zero,
13628 it'll probably be faster to use a branch here too. */
bc9ec0e0 13629 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13630 return 0;
f676971a 13631
50a0b056
GK
13632 if (GET_CODE (op1) == CONST_DOUBLE)
13633 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13634
b6d08ca1 13635 /* We're going to try to implement comparisons by performing
50a0b056
GK
13636 a subtract, then comparing against zero. Unfortunately,
13637 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13638 know that the operand is finite and the comparison
50a0b056 13639 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13640 if (HONOR_INFINITIES (compare_mode)
50a0b056 13641 && code != GT && code != UNGE
045572c7 13642 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13643 /* Constructs of the form (a OP b ? a : b) are safe. */
13644 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13645 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13646 && ! rtx_equal_p (op1, true_cond))))
13647 return 0;
add2402e 13648
50a0b056
GK
13649 /* At this point we know we can use fsel. */
13650
13651 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13652 if (! is_against_zero)
13653 {
13654 temp = gen_reg_rtx (compare_mode);
13655 emit_insn (gen_rtx_SET (VOIDmode, temp,
13656 gen_rtx_MINUS (compare_mode, op0, op1)));
13657 op0 = temp;
13658 op1 = CONST0_RTX (compare_mode);
13659 }
50a0b056
GK
13660
13661 /* If we don't care about NaNs we can reduce some of the comparisons
13662 down to faster ones. */
bc9ec0e0 13663 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13664 switch (code)
13665 {
13666 case GT:
13667 code = LE;
13668 temp = true_cond;
13669 true_cond = false_cond;
13670 false_cond = temp;
13671 break;
13672 case UNGE:
13673 code = GE;
13674 break;
13675 case UNEQ:
13676 code = EQ;
13677 break;
13678 default:
13679 break;
13680 }
13681
13682 /* Now, reduce everything down to a GE. */
13683 switch (code)
13684 {
13685 case GE:
13686 break;
13687
13688 case LE:
3148ad6d
DJ
13689 temp = gen_reg_rtx (compare_mode);
13690 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13691 op0 = temp;
13692 break;
13693
13694 case ORDERED:
3148ad6d
DJ
13695 temp = gen_reg_rtx (compare_mode);
13696 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13697 op0 = temp;
13698 break;
13699
13700 case EQ:
3148ad6d 13701 temp = gen_reg_rtx (compare_mode);
f676971a 13702 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13703 gen_rtx_NEG (compare_mode,
13704 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13705 op0 = temp;
13706 break;
13707
13708 case UNGE:
bc9ec0e0 13709 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13710 temp = gen_reg_rtx (result_mode);
50a0b056 13711 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13712 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13713 gen_rtx_GE (VOIDmode,
13714 op0, op1),
13715 true_cond, false_cond)));
bc9ec0e0
GK
13716 false_cond = true_cond;
13717 true_cond = temp;
50a0b056 13718
3148ad6d
DJ
13719 temp = gen_reg_rtx (compare_mode);
13720 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13721 op0 = temp;
13722 break;
13723
13724 case GT:
bc9ec0e0 13725 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13726 temp = gen_reg_rtx (result_mode);
50a0b056 13727 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13728 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13729 gen_rtx_GE (VOIDmode,
13730 op0, op1),
13731 true_cond, false_cond)));
bc9ec0e0
GK
13732 true_cond = false_cond;
13733 false_cond = temp;
50a0b056 13734
3148ad6d
DJ
13735 temp = gen_reg_rtx (compare_mode);
13736 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13737 op0 = temp;
13738 break;
13739
13740 default:
37409796 13741 gcc_unreachable ();
50a0b056
GK
13742 }
13743
13744 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13745 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13746 gen_rtx_GE (VOIDmode,
13747 op0, op1),
13748 true_cond, false_cond)));
13749 return 1;
13750}
13751
a3170dc6
AH
13752/* Same as above, but for ints (isel). */
13753
13754static int
a2369ed3 13755rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13756{
13757 rtx condition_rtx, cr;
13758
13759 /* All isel implementations thus far are 32-bits. */
f90b7a5a 13760 if (GET_MODE (XEXP (op, 0)) != SImode)
a3170dc6
AH
13761 return 0;
13762
13763 /* We still have to do the compare, because isel doesn't do a
13764 compare, it just looks at the CRx bits set by a previous compare
13765 instruction. */
f90b7a5a 13766 condition_rtx = rs6000_generate_compare (op, SImode);
a3170dc6
AH
13767 cr = XEXP (condition_rtx, 0);
13768
13769 if (GET_MODE (cr) == CCmode)
13770 emit_insn (gen_isel_signed (dest, condition_rtx,
13771 true_cond, false_cond, cr));
13772 else
13773 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13774 true_cond, false_cond, cr));
13775
13776 return 1;
13777}
13778
13779const char *
a2369ed3 13780output_isel (rtx *operands)
a3170dc6
AH
13781{
13782 enum rtx_code code;
13783
13784 code = GET_CODE (operands[1]);
13785 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13786 {
13787 PUT_CODE (operands[1], reverse_condition (code));
13788 return "isel %0,%3,%2,%j1";
13789 }
13790 else
13791 return "isel %0,%2,%3,%j1";
13792}
13793
50a0b056 13794void
a2369ed3 13795rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13796{
13797 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13798 enum rtx_code c;
50a0b056 13799 rtx target;
5dc8d536
AH
13800
13801 if (code == SMAX || code == SMIN)
13802 c = GE;
13803 else
13804 c = GEU;
13805
50a0b056 13806 if (code == SMAX || code == UMAX)
f676971a 13807 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13808 op0, op1, mode, 0);
13809 else
f676971a 13810 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13811 op1, op0, mode, 0);
37409796 13812 gcc_assert (target);
50a0b056
GK
13813 if (target != dest)
13814 emit_move_insn (dest, target);
13815}
46c07df8 13816
915167f5
GK
13817/* Emit instructions to perform a load-reserved/store-conditional operation.
13818 The operation performed is an atomic
13819 (set M (CODE:MODE M OP))
13820 If not NULL, BEFORE is atomically set to M before the operation, and
13821 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13822 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13823 Either OP or M may be wrapped in a NOT operation. */
13824
13825void
13826rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13827 rtx m, rtx op, rtx before_param, rtx after_param,
13828 bool sync_p)
13829{
13830 enum machine_mode used_mode;
13831 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13832 rtx used_m;
13833 rtvec vec;
13834 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13835 rtx shift = NULL_RTX;
bb8df8a6 13836
915167f5 13837 if (sync_p)
a3cb4b82 13838 emit_insn (gen_lwsync ());
bb8df8a6 13839
915167f5
GK
13840 used_m = m;
13841
13842 /* If this is smaller than SImode, we'll have to use SImode with
13843 adjustments. */
13844 if (mode == QImode || mode == HImode)
13845 {
13846 rtx newop, oldop;
13847
13848 if (MEM_ALIGN (used_m) >= 32)
13849 {
13850 int ishift = 0;
13851 if (BYTES_BIG_ENDIAN)
13852 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13853
915167f5 13854 shift = GEN_INT (ishift);
c75c6d11 13855 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13856 }
13857 else
13858 {
13859 rtx addrSI, aligned_addr;
a9c9d3fa 13860 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13861
c75c6d11
JJ
13862 addrSI = gen_lowpart_common (SImode,
13863 force_reg (Pmode, XEXP (used_m, 0)));
13864 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13865 shift = gen_reg_rtx (SImode);
13866
13867 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13868 GEN_INT (shift_mask)));
13869 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13870
13871 aligned_addr = expand_binop (Pmode, and_optab,
13872 XEXP (used_m, 0),
13873 GEN_INT (-4), NULL_RTX,
13874 1, OPTAB_LIB_WIDEN);
13875 used_m = change_address (used_m, SImode, aligned_addr);
13876 set_mem_align (used_m, 32);
915167f5 13877 }
c75c6d11
JJ
13878 /* It's safe to keep the old alias set of USED_M, because
13879 the operation is atomic and only affects the original
13880 USED_M. */
f66f9865 13881 m = used_m;
915167f5
GK
13882
13883 if (GET_CODE (op) == NOT)
13884 {
13885 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13886 oldop = gen_rtx_NOT (SImode, oldop);
13887 }
13888 else
13889 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13890
915167f5
GK
13891 switch (code)
13892 {
13893 case IOR:
13894 case XOR:
13895 newop = expand_binop (SImode, and_optab,
13896 oldop, GEN_INT (imask), NULL_RTX,
13897 1, OPTAB_LIB_WIDEN);
13898 emit_insn (gen_ashlsi3 (newop, newop, shift));
13899 break;
13900
f66f9865
DE
13901 case NOT: /* NAND */
13902 newop = expand_binop (SImode, ior_optab,
13903 oldop, GEN_INT (~imask), NULL_RTX,
13904 1, OPTAB_LIB_WIDEN);
13905 emit_insn (gen_rotlsi3 (newop, newop, shift));
13906 break;
13907
915167f5
GK
13908 case AND:
13909 newop = expand_binop (SImode, ior_optab,
13910 oldop, GEN_INT (~imask), NULL_RTX,
13911 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13912 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13913 break;
13914
13915 case PLUS:
9f0076e5 13916 case MINUS:
915167f5
GK
13917 {
13918 rtx mask;
bb8df8a6 13919
915167f5
GK
13920 newop = expand_binop (SImode, and_optab,
13921 oldop, GEN_INT (imask), NULL_RTX,
13922 1, OPTAB_LIB_WIDEN);
13923 emit_insn (gen_ashlsi3 (newop, newop, shift));
13924
13925 mask = gen_reg_rtx (SImode);
13926 emit_move_insn (mask, GEN_INT (imask));
13927 emit_insn (gen_ashlsi3 (mask, mask, shift));
13928
9f0076e5
DE
13929 if (code == PLUS)
13930 newop = gen_rtx_PLUS (SImode, m, newop);
13931 else
13932 newop = gen_rtx_MINUS (SImode, m, newop);
13933 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13934 newop = gen_rtx_IOR (SImode, newop,
13935 gen_rtx_AND (SImode,
13936 gen_rtx_NOT (SImode, mask),
13937 m));
13938 break;
13939 }
13940
13941 default:
13942 gcc_unreachable ();
13943 }
13944
13945 op = newop;
13946 used_mode = SImode;
13947 before = gen_reg_rtx (used_mode);
13948 after = gen_reg_rtx (used_mode);
13949 }
13950 else
13951 {
13952 used_mode = mode;
13953 before = before_param;
13954 after = after_param;
13955
13956 if (before == NULL_RTX)
13957 before = gen_reg_rtx (used_mode);
13958 if (after == NULL_RTX)
13959 after = gen_reg_rtx (used_mode);
13960 }
bb8df8a6 13961
f66f9865 13962 if ((code == PLUS || code == MINUS)
9f0076e5 13963 && used_mode != mode)
915167f5
GK
13964 the_op = op; /* Computed above. */
13965 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13966 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
f66f9865
DE
13967 else if (code == NOT)
13968 the_op = gen_rtx_fmt_ee (IOR, used_mode,
13969 gen_rtx_NOT (used_mode, m),
13970 gen_rtx_NOT (used_mode, op));
915167f5
GK
13971 else
13972 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13973
13974 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13975 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13976 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13977 gen_rtx_UNSPEC (used_mode,
13978 gen_rtvec (1, the_op),
13979 UNSPEC_SYNC_OP));
915167f5
GK
13980 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13981
9f0076e5 13982 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13983 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13984 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13985 else
13986 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13987 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13988
13989 /* Shift and mask the return values properly. */
13990 if (used_mode != mode && before_param)
13991 {
13992 emit_insn (gen_lshrsi3 (before, before, shift));
13993 convert_move (before_param, before, 1);
13994 }
13995
13996 if (used_mode != mode && after_param)
13997 {
13998 emit_insn (gen_lshrsi3 (after, after, shift));
13999 convert_move (after_param, after, 1);
14000 }
14001
14002 /* The previous sequence will end with a branch that's dependent on
14003 the conditional store, so placing an isync will ensure that no
14004 other instructions (especially, no load or store instructions)
14005 can start before the atomic operation completes. */
14006 if (sync_p)
14007 emit_insn (gen_isync ());
14008}
14009
b52110d4
DE
14010/* A subroutine of the atomic operation splitters. Jump to LABEL if
14011 COND is true. Mark the jump as unlikely to be taken. */
14012
14013static void
14014emit_unlikely_jump (rtx cond, rtx label)
14015{
14016 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
14017 rtx x;
14018
14019 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
14020 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
bbbbb16a 14021 add_reg_note (x, REG_BR_PROB, very_unlikely);
b52110d4
DE
14022}
14023
14024/* A subroutine of the atomic operation splitters. Emit a load-locked
14025 instruction in MODE. */
14026
14027static void
14028emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
14029{
14030 rtx (*fn) (rtx, rtx) = NULL;
14031 if (mode == SImode)
14032 fn = gen_load_locked_si;
14033 else if (mode == DImode)
14034 fn = gen_load_locked_di;
14035 emit_insn (fn (reg, mem));
14036}
14037
14038/* A subroutine of the atomic operation splitters. Emit a store-conditional
14039 instruction in MODE. */
14040
14041static void
14042emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
14043{
14044 rtx (*fn) (rtx, rtx, rtx) = NULL;
14045 if (mode == SImode)
14046 fn = gen_store_conditional_si;
14047 else if (mode == DImode)
14048 fn = gen_store_conditional_di;
14049
9f0076e5 14050 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
14051 if (PPC405_ERRATUM77)
14052 emit_insn (gen_memory_barrier ());
14053
14054 emit_insn (fn (res, mem, val));
14055}
14056
ea2c620c 14057/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 14058 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
14059 operand of the binary operator. BEFORE and AFTER are optional locations to
14060 return the value of MEM either before of after the operation. SCRATCH is
14061 a scratch register. */
14062
14063void
14064rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
14065 rtx before, rtx after, rtx scratch)
14066{
14067 enum machine_mode mode = GET_MODE (mem);
14068 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14069
a3cb4b82 14070 emit_insn (gen_lwsync ());
9f0076e5
DE
14071
14072 label = gen_label_rtx ();
14073 emit_label (label);
14074 label = gen_rtx_LABEL_REF (VOIDmode, label);
14075
14076 if (before == NULL_RTX)
14077 before = scratch;
14078 emit_load_locked (mode, before, mem);
14079
14080 if (code == NOT)
f66f9865
DE
14081 x = gen_rtx_IOR (mode,
14082 gen_rtx_NOT (mode, before),
14083 gen_rtx_NOT (mode, val));
9f0076e5
DE
14084 else if (code == AND)
14085 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
14086 else
14087 x = gen_rtx_fmt_ee (code, mode, before, val);
14088
14089 if (after != NULL_RTX)
14090 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
14091 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
14092
14093 emit_store_conditional (mode, cond, mem, scratch);
14094
14095 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14096 emit_unlikely_jump (x, label);
14097
14098 emit_insn (gen_isync ());
14099}
14100
b52110d4
DE
14101/* Expand an atomic compare and swap operation. MEM is the memory on which
14102 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
14103 value to be stored. SCRATCH is a scratch GPR. */
14104
14105void
14106rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
14107 rtx scratch)
14108{
14109 enum machine_mode mode = GET_MODE (mem);
14110 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14111
a3cb4b82 14112 emit_insn (gen_lwsync ());
b52110d4
DE
14113
14114 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14115 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14116 emit_label (XEXP (label1, 0));
14117
14118 emit_load_locked (mode, retval, mem);
14119
14120 x = gen_rtx_COMPARE (CCmode, retval, oldval);
14121 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14122
14123 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14124 emit_unlikely_jump (x, label2);
14125
14126 emit_move_insn (scratch, newval);
14127 emit_store_conditional (mode, cond, mem, scratch);
14128
14129 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14130 emit_unlikely_jump (x, label1);
14131
14132 emit_insn (gen_isync ());
14133 emit_label (XEXP (label2, 0));
14134}
14135
14136/* Expand an atomic test and set operation. MEM is the memory on which
14137 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
14138
14139void
14140rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
14141{
14142 enum machine_mode mode = GET_MODE (mem);
14143 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14144
b52110d4
DE
14145 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14146 emit_label (XEXP (label, 0));
14147
14148 emit_load_locked (mode, retval, mem);
14149 emit_move_insn (scratch, val);
14150 emit_store_conditional (mode, cond, mem, scratch);
14151
14152 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14153 emit_unlikely_jump (x, label);
14154
14155 emit_insn (gen_isync ());
14156}
14157
9fc75b97
DE
14158void
14159rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14160{
14161 enum machine_mode mode = GET_MODE (mem);
14162 rtx addrSI, align, wdst, shift, mask;
14163 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14164 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14165
14166 /* Shift amount for subword relative to aligned word. */
4b49d9ce
AP
14167 addrSI = force_reg (GET_MODE (XEXP (mem, 0)), XEXP (mem, 0));
14168 addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI));
9fc75b97
DE
14169 shift = gen_reg_rtx (SImode);
14170 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14171 GEN_INT (shift_mask)));
14172 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14173
14174 /* Shift and mask old value into position within word. */
14175 oldval = convert_modes (SImode, mode, oldval, 1);
14176 oldval = expand_binop (SImode, and_optab,
14177 oldval, GEN_INT (imask), NULL_RTX,
14178 1, OPTAB_LIB_WIDEN);
14179 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14180
14181 /* Shift and mask new value into position within word. */
14182 newval = convert_modes (SImode, mode, newval, 1);
14183 newval = expand_binop (SImode, and_optab,
14184 newval, GEN_INT (imask), NULL_RTX,
14185 1, OPTAB_LIB_WIDEN);
14186 emit_insn (gen_ashlsi3 (newval, newval, shift));
14187
14188 /* Mask for insertion. */
14189 mask = gen_reg_rtx (SImode);
14190 emit_move_insn (mask, GEN_INT (imask));
14191 emit_insn (gen_ashlsi3 (mask, mask, shift));
14192
14193 /* Address of aligned word containing subword. */
14194 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14195 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14196 mem = change_address (mem, SImode, align);
14197 set_mem_align (mem, 32);
14198 MEM_VOLATILE_P (mem) = 1;
14199
14200 wdst = gen_reg_rtx (SImode);
14201 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14202 oldval, newval, mem));
14203
2725b75c
JJ
14204 /* Shift the result back. */
14205 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14206
9fc75b97
DE
14207 emit_move_insn (dst, gen_lowpart (mode, wdst));
14208}
14209
14210void
14211rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14212 rtx oldval, rtx newval, rtx mem,
14213 rtx scratch)
14214{
14215 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14216
a3cb4b82 14217 emit_insn (gen_lwsync ());
9fc75b97
DE
14218 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14219 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14220 emit_label (XEXP (label1, 0));
14221
14222 emit_load_locked (SImode, scratch, mem);
14223
14224 /* Mask subword within loaded value for comparison with oldval.
14225 Use UNSPEC_AND to avoid clobber.*/
14226 emit_insn (gen_rtx_SET (SImode, dest,
14227 gen_rtx_UNSPEC (SImode,
14228 gen_rtvec (2, scratch, mask),
14229 UNSPEC_AND)));
14230
14231 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14232 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14233
14234 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14235 emit_unlikely_jump (x, label2);
14236
14237 /* Clear subword within loaded value for insertion of new value. */
14238 emit_insn (gen_rtx_SET (SImode, scratch,
14239 gen_rtx_AND (SImode,
14240 gen_rtx_NOT (SImode, mask), scratch)));
14241 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14242 emit_store_conditional (SImode, cond, mem, scratch);
14243
14244 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14245 emit_unlikely_jump (x, label1);
14246
14247 emit_insn (gen_isync ());
14248 emit_label (XEXP (label2, 0));
14249}
14250
14251
b52110d4 14252 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14253 multi-register moves. It will emit at most one instruction for
14254 each register that is accessed; that is, it won't emit li/lis pairs
14255 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14256 register. */
46c07df8 14257
46c07df8 14258void
a9baceb1 14259rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14260{
a9baceb1
GK
14261 /* The register number of the first register being moved. */
14262 int reg;
14263 /* The mode that is to be moved. */
14264 enum machine_mode mode;
14265 /* The mode that the move is being done in, and its size. */
14266 enum machine_mode reg_mode;
14267 int reg_mode_size;
14268 /* The number of registers that will be moved. */
14269 int nregs;
14270
14271 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14272 mode = GET_MODE (dst);
c8b622ff 14273 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14274 if (FP_REGNO_P (reg))
696e45ba
ME
14275 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode :
14276 ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? DFmode : SFmode);
a9baceb1
GK
14277 else if (ALTIVEC_REGNO_P (reg))
14278 reg_mode = V16QImode;
4f011e1e
JM
14279 else if (TARGET_E500_DOUBLE && mode == TFmode)
14280 reg_mode = DFmode;
a9baceb1
GK
14281 else
14282 reg_mode = word_mode;
14283 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14284
37409796 14285 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14286
a9baceb1
GK
14287 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14288 {
14289 /* Move register range backwards, if we might have destructive
14290 overlap. */
14291 int i;
14292 for (i = nregs - 1; i >= 0; i--)
f676971a 14293 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14294 simplify_gen_subreg (reg_mode, dst, mode,
14295 i * reg_mode_size),
14296 simplify_gen_subreg (reg_mode, src, mode,
14297 i * reg_mode_size)));
14298 }
46c07df8
HP
14299 else
14300 {
a9baceb1
GK
14301 int i;
14302 int j = -1;
14303 bool used_update = false;
46c07df8 14304
c1e55850 14305 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14306 {
14307 rtx breg;
3a1f863f 14308
a9baceb1
GK
14309 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14310 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14311 {
14312 rtx delta_rtx;
a9baceb1 14313 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14314 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14315 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14316 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14317 emit_insn (TARGET_32BIT
14318 ? gen_addsi3 (breg, breg, delta_rtx)
14319 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14320 src = replace_equiv_address (src, breg);
3a1f863f 14321 }
d04b6e6e 14322 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14323 {
13e2e16e 14324 rtx basereg;
c1e55850
GK
14325 basereg = gen_rtx_REG (Pmode, reg);
14326 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14327 src = replace_equiv_address (src, basereg);
c1e55850 14328 }
3a1f863f 14329
0423421f
AM
14330 breg = XEXP (src, 0);
14331 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14332 breg = XEXP (breg, 0);
14333
14334 /* If the base register we are using to address memory is
14335 also a destination reg, then change that register last. */
14336 if (REG_P (breg)
14337 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14338 && REGNO (breg) < REGNO (dst) + nregs)
14339 j = REGNO (breg) - REGNO (dst);
c4ad648e 14340 }
46c07df8 14341
a9baceb1 14342 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14343 {
14344 rtx breg;
14345
a9baceb1
GK
14346 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14347 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14348 {
14349 rtx delta_rtx;
a9baceb1 14350 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14351 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14352 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14353 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14354
14355 /* We have to update the breg before doing the store.
14356 Use store with update, if available. */
14357
14358 if (TARGET_UPDATE)
14359 {
a9baceb1 14360 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14361 emit_insn (TARGET_32BIT
14362 ? (TARGET_POWERPC64
14363 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14364 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14365 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14366 used_update = true;
3a1f863f
DE
14367 }
14368 else
a9baceb1
GK
14369 emit_insn (TARGET_32BIT
14370 ? gen_addsi3 (breg, breg, delta_rtx)
14371 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14372 dst = replace_equiv_address (dst, breg);
3a1f863f 14373 }
37409796 14374 else
d04b6e6e 14375 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14376 }
14377
46c07df8 14378 for (i = 0; i < nregs; i++)
f676971a 14379 {
3a1f863f
DE
14380 /* Calculate index to next subword. */
14381 ++j;
f676971a 14382 if (j == nregs)
3a1f863f 14383 j = 0;
46c07df8 14384
112cdef5 14385 /* If compiler already emitted move of first word by
a9baceb1 14386 store with update, no need to do anything. */
3a1f863f 14387 if (j == 0 && used_update)
a9baceb1 14388 continue;
f676971a 14389
a9baceb1
GK
14390 emit_insn (gen_rtx_SET (VOIDmode,
14391 simplify_gen_subreg (reg_mode, dst, mode,
14392 j * reg_mode_size),
14393 simplify_gen_subreg (reg_mode, src, mode,
14394 j * reg_mode_size)));
3a1f863f 14395 }
46c07df8
HP
14396 }
14397}
14398
12a4e8c5 14399\f
a4f6c312
SS
14400/* This page contains routines that are used to determine what the
14401 function prologue and epilogue code will do and write them out. */
9878760c 14402
a4f6c312
SS
14403/* Return the first fixed-point register that is required to be
14404 saved. 32 if none. */
9878760c
RK
14405
14406int
863d938c 14407first_reg_to_save (void)
9878760c
RK
14408{
14409 int first_reg;
14410
14411 /* Find lowest numbered live register. */
14412 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14413 if (df_regs_ever_live_p (first_reg)
a38d360d 14414 && (! call_used_regs[first_reg]
1db02437 14415 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14416 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14417 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14418 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14419 break;
14420
ee890fe2 14421#if TARGET_MACHO
93638d7a 14422 if (flag_pic
e3b5732b 14423 && crtl->uses_pic_offset_table
93638d7a 14424 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14425 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14426#endif
14427
9878760c
RK
14428 return first_reg;
14429}
14430
14431/* Similar, for FP regs. */
14432
14433int
863d938c 14434first_fp_reg_to_save (void)
9878760c
RK
14435{
14436 int first_reg;
14437
14438 /* Find lowest numbered live register. */
14439 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14440 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14441 break;
14442
14443 return first_reg;
14444}
00b960c7
AH
14445
14446/* Similar, for AltiVec regs. */
14447
14448static int
863d938c 14449first_altivec_reg_to_save (void)
00b960c7
AH
14450{
14451 int i;
14452
14453 /* Stack frame remains as is unless we are in AltiVec ABI. */
14454 if (! TARGET_ALTIVEC_ABI)
14455 return LAST_ALTIVEC_REGNO + 1;
14456
22fa69da 14457 /* On Darwin, the unwind routines are compiled without
982afe02 14458 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14459 altivec registers when necessary. */
e3b5732b 14460 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14461 && ! TARGET_ALTIVEC)
14462 return FIRST_ALTIVEC_REGNO + 20;
14463
00b960c7
AH
14464 /* Find lowest numbered live register. */
14465 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14466 if (df_regs_ever_live_p (i))
00b960c7
AH
14467 break;
14468
14469 return i;
14470}
14471
14472/* Return a 32-bit mask of the AltiVec registers we need to set in
14473 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14474 the 32-bit word is 0. */
14475
14476static unsigned int
863d938c 14477compute_vrsave_mask (void)
00b960c7
AH
14478{
14479 unsigned int i, mask = 0;
14480
22fa69da 14481 /* On Darwin, the unwind routines are compiled without
982afe02 14482 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14483 call-saved altivec registers when necessary. */
e3b5732b 14484 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14485 && ! TARGET_ALTIVEC)
14486 mask |= 0xFFF;
14487
00b960c7
AH
14488 /* First, find out if we use _any_ altivec registers. */
14489 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14490 if (df_regs_ever_live_p (i))
00b960c7
AH
14491 mask |= ALTIVEC_REG_BIT (i);
14492
14493 if (mask == 0)
14494 return mask;
14495
00b960c7
AH
14496 /* Next, remove the argument registers from the set. These must
14497 be in the VRSAVE mask set by the caller, so we don't need to add
14498 them in again. More importantly, the mask we compute here is
14499 used to generate CLOBBERs in the set_vrsave insn, and we do not
14500 wish the argument registers to die. */
38173d38 14501 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14502 mask &= ~ALTIVEC_REG_BIT (i);
14503
14504 /* Similarly, remove the return value from the set. */
14505 {
14506 bool yes = false;
14507 diddle_return_value (is_altivec_return_reg, &yes);
14508 if (yes)
14509 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14510 }
14511
14512 return mask;
14513}
14514
d62294f5 14515/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14516 size of prologues/epilogues by calling our own save/restore-the-world
14517 routines. */
d62294f5
FJ
14518
14519static void
f57fe068
AM
14520compute_save_world_info (rs6000_stack_t *info_ptr)
14521{
14522 info_ptr->world_save_p = 1;
14523 info_ptr->world_save_p
14524 = (WORLD_SAVE_P (info_ptr)
14525 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14526 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14527 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14528 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14529 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14530 && info_ptr->cr_save_p);
f676971a 14531
d62294f5
FJ
14532 /* This will not work in conjunction with sibcalls. Make sure there
14533 are none. (This check is expensive, but seldom executed.) */
f57fe068 14534 if (WORLD_SAVE_P (info_ptr))
f676971a 14535 {
d62294f5
FJ
14536 rtx insn;
14537 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14538 if ( GET_CODE (insn) == CALL_INSN
14539 && SIBLING_CALL_P (insn))
14540 {
14541 info_ptr->world_save_p = 0;
14542 break;
14543 }
d62294f5 14544 }
f676971a 14545
f57fe068 14546 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14547 {
14548 /* Even if we're not touching VRsave, make sure there's room on the
14549 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14550 will attempt to save it. */
d62294f5
FJ
14551 info_ptr->vrsave_size = 4;
14552
298ac1dd
AP
14553 /* If we are going to save the world, we need to save the link register too. */
14554 info_ptr->lr_save_p = 1;
14555
d62294f5
FJ
14556 /* "Save" the VRsave register too if we're saving the world. */
14557 if (info_ptr->vrsave_mask == 0)
c4ad648e 14558 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14559
14560 /* Because the Darwin register save/restore routines only handle
c4ad648e 14561 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14562 check. */
37409796
NS
14563 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14564 && (info_ptr->first_altivec_reg_save
14565 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14566 }
f676971a 14567 return;
d62294f5
FJ
14568}
14569
14570
00b960c7 14571static void
a2369ed3 14572is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14573{
14574 bool *yes = (bool *) xyes;
14575 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14576 *yes = true;
14577}
14578
4697a36c
MM
14579\f
14580/* Calculate the stack information for the current function. This is
14581 complicated by having two separate calling sequences, the AIX calling
14582 sequence and the V.4 calling sequence.
14583
592696dd 14584 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14585 32-bit 64-bit
4697a36c 14586 SP----> +---------------------------------------+
a260abc9 14587 | back chain to caller | 0 0
4697a36c 14588 +---------------------------------------+
a260abc9 14589 | saved CR | 4 8 (8-11)
4697a36c 14590 +---------------------------------------+
a260abc9 14591 | saved LR | 8 16
4697a36c 14592 +---------------------------------------+
a260abc9 14593 | reserved for compilers | 12 24
4697a36c 14594 +---------------------------------------+
a260abc9 14595 | reserved for binders | 16 32
4697a36c 14596 +---------------------------------------+
a260abc9 14597 | saved TOC pointer | 20 40
4697a36c 14598 +---------------------------------------+
a260abc9 14599 | Parameter save area (P) | 24 48
4697a36c 14600 +---------------------------------------+
a260abc9 14601 | Alloca space (A) | 24+P etc.
802a0058 14602 +---------------------------------------+
a7df97e6 14603 | Local variable space (L) | 24+P+A
4697a36c 14604 +---------------------------------------+
a7df97e6 14605 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14606 +---------------------------------------+
00b960c7
AH
14607 | Save area for AltiVec registers (W) | 24+P+A+L+X
14608 +---------------------------------------+
14609 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14610 +---------------------------------------+
14611 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14612 +---------------------------------------+
00b960c7
AH
14613 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14614 +---------------------------------------+
14615 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14616 +---------------------------------------+
14617 old SP->| back chain to caller's caller |
14618 +---------------------------------------+
14619
5376a30c
KR
14620 The required alignment for AIX configurations is two words (i.e., 8
14621 or 16 bytes).
14622
14623
4697a36c
MM
14624 V.4 stack frames look like:
14625
14626 SP----> +---------------------------------------+
14627 | back chain to caller | 0
14628 +---------------------------------------+
5eb387b8 14629 | caller's saved LR | 4
4697a36c
MM
14630 +---------------------------------------+
14631 | Parameter save area (P) | 8
14632 +---------------------------------------+
a7df97e6 14633 | Alloca space (A) | 8+P
f676971a 14634 +---------------------------------------+
a7df97e6 14635 | Varargs save area (V) | 8+P+A
f676971a 14636 +---------------------------------------+
a7df97e6 14637 | Local variable space (L) | 8+P+A+V
f676971a 14638 +---------------------------------------+
a7df97e6 14639 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14640 +---------------------------------------+
00b960c7
AH
14641 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14642 +---------------------------------------+
14643 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14644 +---------------------------------------+
14645 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14646 +---------------------------------------+
c4ad648e
AM
14647 | SPE: area for 64-bit GP registers |
14648 +---------------------------------------+
14649 | SPE alignment padding |
14650 +---------------------------------------+
00b960c7 14651 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14652 +---------------------------------------+
00b960c7 14653 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14654 +---------------------------------------+
00b960c7 14655 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14656 +---------------------------------------+
14657 old SP->| back chain to caller's caller |
14658 +---------------------------------------+
b6c9286a 14659
5376a30c
KR
14660 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14661 given. (But note below and in sysv4.h that we require only 8 and
14662 may round up the size of our stack frame anyways. The historical
14663 reason is early versions of powerpc-linux which didn't properly
14664 align the stack at program startup. A happy side-effect is that
14665 -mno-eabi libraries can be used with -meabi programs.)
14666
50d440bc 14667 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14668 the stack alignment requirements may differ. If -mno-eabi is not
14669 given, the required stack alignment is 8 bytes; if -mno-eabi is
14670 given, the required alignment is 16 bytes. (But see V.4 comment
14671 above.) */
4697a36c 14672
61b2fbe7
MM
14673#ifndef ABI_STACK_BOUNDARY
14674#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14675#endif
14676
d1d0c603 14677static rs6000_stack_t *
863d938c 14678rs6000_stack_info (void)
4697a36c 14679{
022123e6 14680 static rs6000_stack_t info;
4697a36c 14681 rs6000_stack_t *info_ptr = &info;
327e5343 14682 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14683 int ehrd_size;
64045029 14684 int save_align;
8070c91a 14685 int first_gp;
44688022 14686 HOST_WIDE_INT non_fixed_size;
4697a36c 14687
022123e6 14688 memset (&info, 0, sizeof (info));
4697a36c 14689
c19de7aa
AH
14690 if (TARGET_SPE)
14691 {
14692 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14693 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14694 cfun->machine->insn_chain_scanned_p
14695 = spe_func_has_64bit_regs_p () + 1;
14696 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14697 }
14698
a4f6c312 14699 /* Select which calling sequence. */
178274da 14700 info_ptr->abi = DEFAULT_ABI;
9878760c 14701
a4f6c312 14702 /* Calculate which registers need to be saved & save area size. */
4697a36c 14703 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14704 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14705 even if it currently looks like we won't. Reload may need it to
14706 get at a constant; if so, it will have already created a constant
14707 pool entry for it. */
2bfcf297 14708 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14709 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14710 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14711 && crtl->uses_const_pool
1db02437 14712 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14713 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14714 else
8070c91a
DJ
14715 first_gp = info_ptr->first_gp_reg_save;
14716
14717 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14718
a3170dc6
AH
14719 /* For the SPE, we have an additional upper 32-bits on each GPR.
14720 Ideally we should save the entire 64-bits only when the upper
14721 half is used in SIMD instructions. Since we only record
14722 registers live (not the size they are used in), this proves
14723 difficult because we'd have to traverse the instruction chain at
14724 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14725 so we opt to save the GPRs in 64-bits always if but one register
14726 gets used in 64-bits. Otherwise, all the registers in the frame
14727 get saved in 32-bits.
a3170dc6 14728
c19de7aa 14729 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14730 traditional GP save area will be empty. */
c19de7aa 14731 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14732 info_ptr->gp_size = 0;
14733
4697a36c
MM
14734 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14735 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14736
00b960c7
AH
14737 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14738 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14739 - info_ptr->first_altivec_reg_save);
14740
592696dd 14741 /* Does this function call anything? */
71f123ca
FS
14742 info_ptr->calls_p = (! current_function_is_leaf
14743 || cfun->machine->ra_needs_full_frame);
b6c9286a 14744
a4f6c312 14745 /* Determine if we need to save the link register. */
022123e6 14746 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14747 && crtl->profile
022123e6 14748 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14749#ifdef TARGET_RELOCATABLE
14750 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14751#endif
14752 || (info_ptr->first_fp_reg_save != 64
14753 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14754 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14755 || info_ptr->calls_p
14756 || rs6000_ra_ever_killed ())
4697a36c
MM
14757 {
14758 info_ptr->lr_save_p = 1;
1de43f85 14759 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14760 }
14761
9ebbca7d 14762 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14763 if (df_regs_ever_live_p (CR2_REGNO)
14764 || df_regs_ever_live_p (CR3_REGNO)
14765 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14766 {
14767 info_ptr->cr_save_p = 1;
178274da 14768 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14769 info_ptr->cr_size = reg_size;
14770 }
14771
83720594
RH
14772 /* If the current function calls __builtin_eh_return, then we need
14773 to allocate stack space for registers that will hold data for
14774 the exception handler. */
e3b5732b 14775 if (crtl->calls_eh_return)
83720594
RH
14776 {
14777 unsigned int i;
14778 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14779 continue;
a3170dc6
AH
14780
14781 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14782 ehrd_size = i * (TARGET_SPE_ABI
14783 && info_ptr->spe_64bit_regs_used != 0
14784 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14785 }
14786 else
14787 ehrd_size = 0;
14788
592696dd 14789 /* Determine various sizes. */
4697a36c
MM
14790 info_ptr->reg_size = reg_size;
14791 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14792 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14793 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14794 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14795 if (FRAME_GROWS_DOWNWARD)
14796 info_ptr->vars_size
5b667039
JJ
14797 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14798 + info_ptr->parm_size,
7d5175e1 14799 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14800 - (info_ptr->fixed_size + info_ptr->vars_size
14801 + info_ptr->parm_size);
00b960c7 14802
c19de7aa 14803 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14804 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14805 else
14806 info_ptr->spe_gp_size = 0;
14807
4d774ff8
HP
14808 if (TARGET_ALTIVEC_ABI)
14809 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14810 else
4d774ff8
HP
14811 info_ptr->vrsave_mask = 0;
14812
14813 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14814 info_ptr->vrsave_size = 4;
14815 else
14816 info_ptr->vrsave_size = 0;
b6c9286a 14817
d62294f5
FJ
14818 compute_save_world_info (info_ptr);
14819
592696dd 14820 /* Calculate the offsets. */
178274da 14821 switch (DEFAULT_ABI)
4697a36c 14822 {
b6c9286a 14823 case ABI_NONE:
24d304eb 14824 default:
37409796 14825 gcc_unreachable ();
b6c9286a
MM
14826
14827 case ABI_AIX:
ee890fe2 14828 case ABI_DARWIN:
b6c9286a
MM
14829 info_ptr->fp_save_offset = - info_ptr->fp_size;
14830 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14831
14832 if (TARGET_ALTIVEC_ABI)
14833 {
14834 info_ptr->vrsave_save_offset
14835 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14836
982afe02 14837 /* Align stack so vector save area is on a quadword boundary.
9278121c 14838 The padding goes above the vectors. */
00b960c7
AH
14839 if (info_ptr->altivec_size != 0)
14840 info_ptr->altivec_padding_size
9278121c 14841 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14842 else
14843 info_ptr->altivec_padding_size = 0;
14844
14845 info_ptr->altivec_save_offset
14846 = info_ptr->vrsave_save_offset
14847 - info_ptr->altivec_padding_size
14848 - info_ptr->altivec_size;
9278121c
GK
14849 gcc_assert (info_ptr->altivec_size == 0
14850 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14851
14852 /* Adjust for AltiVec case. */
14853 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14854 }
14855 else
14856 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14857 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14858 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14859 break;
14860
14861 case ABI_V4:
b6c9286a
MM
14862 info_ptr->fp_save_offset = - info_ptr->fp_size;
14863 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14864 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14865
c19de7aa 14866 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14867 {
14868 /* Align stack so SPE GPR save area is aligned on a
14869 double-word boundary. */
f78c3290 14870 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14871 info_ptr->spe_padding_size
14872 = 8 - (-info_ptr->cr_save_offset % 8);
14873 else
14874 info_ptr->spe_padding_size = 0;
14875
14876 info_ptr->spe_gp_save_offset
14877 = info_ptr->cr_save_offset
14878 - info_ptr->spe_padding_size
14879 - info_ptr->spe_gp_size;
14880
14881 /* Adjust for SPE case. */
022123e6 14882 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14883 }
a3170dc6 14884 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14885 {
14886 info_ptr->vrsave_save_offset
14887 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14888
14889 /* Align stack so vector save area is on a quadword boundary. */
14890 if (info_ptr->altivec_size != 0)
14891 info_ptr->altivec_padding_size
14892 = 16 - (-info_ptr->vrsave_save_offset % 16);
14893 else
14894 info_ptr->altivec_padding_size = 0;
14895
14896 info_ptr->altivec_save_offset
14897 = info_ptr->vrsave_save_offset
14898 - info_ptr->altivec_padding_size
14899 - info_ptr->altivec_size;
14900
14901 /* Adjust for AltiVec case. */
022123e6 14902 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14903 }
14904 else
022123e6
AM
14905 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14906 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14907 info_ptr->lr_save_offset = reg_size;
14908 break;
4697a36c
MM
14909 }
14910
64045029 14911 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14912 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14913 + info_ptr->gp_size
14914 + info_ptr->altivec_size
14915 + info_ptr->altivec_padding_size
a3170dc6
AH
14916 + info_ptr->spe_gp_size
14917 + info_ptr->spe_padding_size
00b960c7
AH
14918 + ehrd_size
14919 + info_ptr->cr_size
022123e6 14920 + info_ptr->vrsave_size,
64045029 14921 save_align);
00b960c7 14922
44688022 14923 non_fixed_size = (info_ptr->vars_size
ff381587 14924 + info_ptr->parm_size
5b667039 14925 + info_ptr->save_size);
ff381587 14926
44688022
AM
14927 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14928 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14929
14930 /* Determine if we need to allocate any stack frame:
14931
a4f6c312
SS
14932 For AIX we need to push the stack if a frame pointer is needed
14933 (because the stack might be dynamically adjusted), if we are
14934 debugging, if we make calls, or if the sum of fp_save, gp_save,
14935 and local variables are more than the space needed to save all
14936 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14937 + 18*8 = 288 (GPR13 reserved).
ff381587 14938
a4f6c312
SS
14939 For V.4 we don't have the stack cushion that AIX uses, but assume
14940 that the debugger can handle stackless frames. */
ff381587
MM
14941
14942 if (info_ptr->calls_p)
14943 info_ptr->push_p = 1;
14944
178274da 14945 else if (DEFAULT_ABI == ABI_V4)
44688022 14946 info_ptr->push_p = non_fixed_size != 0;
ff381587 14947
178274da
AM
14948 else if (frame_pointer_needed)
14949 info_ptr->push_p = 1;
14950
14951 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14952 info_ptr->push_p = 1;
14953
ff381587 14954 else
44688022 14955 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14956
a4f6c312 14957 /* Zero offsets if we're not saving those registers. */
8dda1a21 14958 if (info_ptr->fp_size == 0)
4697a36c
MM
14959 info_ptr->fp_save_offset = 0;
14960
8dda1a21 14961 if (info_ptr->gp_size == 0)
4697a36c
MM
14962 info_ptr->gp_save_offset = 0;
14963
00b960c7
AH
14964 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14965 info_ptr->altivec_save_offset = 0;
14966
14967 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14968 info_ptr->vrsave_save_offset = 0;
14969
c19de7aa
AH
14970 if (! TARGET_SPE_ABI
14971 || info_ptr->spe_64bit_regs_used == 0
14972 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14973 info_ptr->spe_gp_save_offset = 0;
14974
c81fc13e 14975 if (! info_ptr->lr_save_p)
4697a36c
MM
14976 info_ptr->lr_save_offset = 0;
14977
c81fc13e 14978 if (! info_ptr->cr_save_p)
4697a36c
MM
14979 info_ptr->cr_save_offset = 0;
14980
14981 return info_ptr;
14982}
14983
c19de7aa
AH
14984/* Return true if the current function uses any GPRs in 64-bit SIMD
14985 mode. */
14986
14987static bool
863d938c 14988spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14989{
14990 rtx insns, insn;
14991
14992 /* Functions that save and restore all the call-saved registers will
14993 need to save/restore the registers in 64-bits. */
e3b5732b
JH
14994 if (crtl->calls_eh_return
14995 || cfun->calls_setjmp
14996 || crtl->has_nonlocal_goto)
c19de7aa
AH
14997 return true;
14998
14999 insns = get_insns ();
15000
15001 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
15002 {
15003 if (INSN_P (insn))
15004 {
15005 rtx i;
15006
b5a5beb9
AH
15007 /* FIXME: This should be implemented with attributes...
15008
15009 (set_attr "spe64" "true")....then,
15010 if (get_spe64(insn)) return true;
15011
15012 It's the only reliable way to do the stuff below. */
15013
c19de7aa 15014 i = PATTERN (insn);
f82f556d
AH
15015 if (GET_CODE (i) == SET)
15016 {
15017 enum machine_mode mode = GET_MODE (SET_SRC (i));
15018
15019 if (SPE_VECTOR_MODE (mode))
15020 return true;
4f011e1e 15021 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
15022 return true;
15023 }
c19de7aa
AH
15024 }
15025 }
15026
15027 return false;
15028}
15029
d1d0c603 15030static void
a2369ed3 15031debug_stack_info (rs6000_stack_t *info)
9878760c 15032{
d330fd93 15033 const char *abi_string;
24d304eb 15034
c81fc13e 15035 if (! info)
4697a36c
MM
15036 info = rs6000_stack_info ();
15037
15038 fprintf (stderr, "\nStack information for function %s:\n",
15039 ((current_function_decl && DECL_NAME (current_function_decl))
15040 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
15041 : "<unknown>"));
15042
24d304eb
RK
15043 switch (info->abi)
15044 {
b6c9286a
MM
15045 default: abi_string = "Unknown"; break;
15046 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 15047 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 15048 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 15049 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
15050 }
15051
15052 fprintf (stderr, "\tABI = %5s\n", abi_string);
15053
00b960c7
AH
15054 if (TARGET_ALTIVEC_ABI)
15055 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
15056
a3170dc6
AH
15057 if (TARGET_SPE_ABI)
15058 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
15059
4697a36c
MM
15060 if (info->first_gp_reg_save != 32)
15061 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
15062
15063 if (info->first_fp_reg_save != 64)
15064 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 15065
00b960c7
AH
15066 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
15067 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
15068 info->first_altivec_reg_save);
15069
4697a36c
MM
15070 if (info->lr_save_p)
15071 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 15072
4697a36c
MM
15073 if (info->cr_save_p)
15074 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
15075
00b960c7
AH
15076 if (info->vrsave_mask)
15077 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
15078
4697a36c
MM
15079 if (info->push_p)
15080 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
15081
15082 if (info->calls_p)
15083 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
15084
4697a36c
MM
15085 if (info->gp_save_offset)
15086 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
15087
15088 if (info->fp_save_offset)
15089 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
15090
00b960c7
AH
15091 if (info->altivec_save_offset)
15092 fprintf (stderr, "\taltivec_save_offset = %5d\n",
15093 info->altivec_save_offset);
15094
a3170dc6
AH
15095 if (info->spe_gp_save_offset)
15096 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
15097 info->spe_gp_save_offset);
15098
00b960c7
AH
15099 if (info->vrsave_save_offset)
15100 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
15101 info->vrsave_save_offset);
15102
4697a36c
MM
15103 if (info->lr_save_offset)
15104 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
15105
15106 if (info->cr_save_offset)
15107 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
15108
15109 if (info->varargs_save_offset)
15110 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
15111
15112 if (info->total_size)
d1d0c603
JJ
15113 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15114 info->total_size);
4697a36c 15115
4697a36c 15116 if (info->vars_size)
d1d0c603
JJ
15117 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15118 info->vars_size);
4697a36c
MM
15119
15120 if (info->parm_size)
15121 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
15122
15123 if (info->fixed_size)
15124 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
15125
15126 if (info->gp_size)
15127 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
15128
a3170dc6
AH
15129 if (info->spe_gp_size)
15130 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
15131
4697a36c
MM
15132 if (info->fp_size)
15133 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
15134
00b960c7
AH
15135 if (info->altivec_size)
15136 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
15137
15138 if (info->vrsave_size)
15139 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
15140
15141 if (info->altivec_padding_size)
15142 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15143 info->altivec_padding_size);
15144
a3170dc6
AH
15145 if (info->spe_padding_size)
15146 fprintf (stderr, "\tspe_padding_size = %5d\n",
15147 info->spe_padding_size);
15148
4697a36c
MM
15149 if (info->cr_size)
15150 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15151
15152 if (info->save_size)
15153 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15154
15155 if (info->reg_size != 4)
15156 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15157
15158 fprintf (stderr, "\n");
9878760c 15159}
71f123ca
FS
15160
15161rtx
a2369ed3 15162rs6000_return_addr (int count, rtx frame)
71f123ca 15163{
a4f6c312
SS
15164 /* Currently we don't optimize very well between prolog and body
15165 code and for PIC code the code can be actually quite bad, so
15166 don't try to be too clever here. */
f1384257 15167 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
15168 {
15169 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
15170
15171 return
15172 gen_rtx_MEM
15173 (Pmode,
15174 memory_address
15175 (Pmode,
15176 plus_constant (copy_to_reg
15177 (gen_rtx_MEM (Pmode,
15178 memory_address (Pmode, frame))),
15179 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
15180 }
15181
8c29550d 15182 cfun->machine->ra_need_lr = 1;
1de43f85 15183 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
15184}
15185
5e1bf043
DJ
15186/* Say whether a function is a candidate for sibcall handling or not.
15187 We do not allow indirect calls to be optimized into sibling calls.
15188 Also, we can't do it if there are any vector parameters; there's
15189 nowhere to put the VRsave code so it works; note that functions with
15190 vector parameters are required to have a prototype, so the argument
15191 type info must be available here. (The tail recursion case can work
15192 with vector parameters, but there's no way to distinguish here.) */
4977bab6 15193static bool
a2369ed3 15194rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
15195{
15196 tree type;
4977bab6 15197 if (decl)
5e1bf043
DJ
15198 {
15199 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 15200 {
4977bab6 15201 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
15202 type; type = TREE_CHAIN (type))
15203 {
c15b529f 15204 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 15205 return false;
5e1bf043 15206 }
c4ad648e 15207 }
5e1bf043 15208 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
15209 || ((*targetm.binds_local_p) (decl)
15210 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 15211 {
4977bab6 15212 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
15213
15214 if (!lookup_attribute ("longcall", attr_list)
15215 || lookup_attribute ("shortcall", attr_list))
4977bab6 15216 return true;
2bcc50d0 15217 }
5e1bf043 15218 }
4977bab6 15219 return false;
5e1bf043
DJ
15220}
15221
e7e64a25
AS
15222/* NULL if INSN insn is valid within a low-overhead loop.
15223 Otherwise return why doloop cannot be applied.
9419649c
DE
15224 PowerPC uses the COUNT register for branch on table instructions. */
15225
e7e64a25 15226static const char *
3101faab 15227rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15228{
15229 if (CALL_P (insn))
e7e64a25 15230 return "Function call in the loop.";
9419649c
DE
15231
15232 if (JUMP_P (insn)
15233 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15234 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15235 return "Computed branch in the loop.";
9419649c 15236
e7e64a25 15237 return NULL;
9419649c
DE
15238}
15239
71f123ca 15240static int
863d938c 15241rs6000_ra_ever_killed (void)
71f123ca
FS
15242{
15243 rtx top;
5e1bf043
DJ
15244 rtx reg;
15245 rtx insn;
71f123ca 15246
3c072c6b 15247 if (cfun->is_thunk)
71f123ca 15248 return 0;
eb0424da 15249
36f7e964
AH
15250 /* regs_ever_live has LR marked as used if any sibcalls are present,
15251 but this should not force saving and restoring in the
15252 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15253 clobbers LR, so that is inappropriate. */
36f7e964 15254
5e1bf043
DJ
15255 /* Also, the prologue can generate a store into LR that
15256 doesn't really count, like this:
36f7e964 15257
5e1bf043
DJ
15258 move LR->R0
15259 bcl to set PIC register
15260 move LR->R31
15261 move R0->LR
36f7e964
AH
15262
15263 When we're called from the epilogue, we need to avoid counting
15264 this as a store. */
f676971a 15265
71f123ca
FS
15266 push_topmost_sequence ();
15267 top = get_insns ();
15268 pop_topmost_sequence ();
1de43f85 15269 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15270
5e1bf043
DJ
15271 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15272 {
15273 if (INSN_P (insn))
15274 {
022123e6
AM
15275 if (CALL_P (insn))
15276 {
15277 if (!SIBLING_CALL_P (insn))
15278 return 1;
15279 }
1de43f85 15280 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15281 return 1;
36f7e964
AH
15282 else if (set_of (reg, insn) != NULL_RTX
15283 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15284 return 1;
15285 }
15286 }
15287 return 0;
71f123ca 15288}
4697a36c 15289\f
9ebbca7d 15290/* Emit instructions needed to load the TOC register.
c7ca610e 15291 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15292 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15293
15294void
a2369ed3 15295rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15296{
6fb5fa3c 15297 rtx dest;
1db02437 15298 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15299
7f970b70 15300 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15301 {
7f970b70 15302 char buf[30];
e65a3857 15303 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15304
15305 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15306 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15307 if (flag_pic == 2)
15308 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15309 else
15310 got = rs6000_got_sym ();
15311 tmp1 = tmp2 = dest;
15312 if (!fromprolog)
15313 {
15314 tmp1 = gen_reg_rtx (Pmode);
15315 tmp2 = gen_reg_rtx (Pmode);
15316 }
6fb5fa3c
DB
15317 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15318 emit_move_insn (tmp1,
1de43f85 15319 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15320 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15321 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15322 }
15323 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15324 {
6fb5fa3c 15325 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15326 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15327 }
15328 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15329 {
15330 char buf[30];
20b71b17
AM
15331 rtx temp0 = (fromprolog
15332 ? gen_rtx_REG (Pmode, 0)
15333 : gen_reg_rtx (Pmode));
20b71b17 15334
20b71b17
AM
15335 if (fromprolog)
15336 {
ccbca5e4 15337 rtx symF, symL;
38c1f2d7 15338
20b71b17
AM
15339 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15340 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15341
20b71b17
AM
15342 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15343 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15344
6fb5fa3c
DB
15345 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15346 emit_move_insn (dest,
1de43f85 15347 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15348 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15349 }
15350 else
20b71b17
AM
15351 {
15352 rtx tocsym;
20b71b17
AM
15353
15354 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15355 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15356 emit_move_insn (dest,
1de43f85 15357 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15358 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15359 }
6fb5fa3c 15360 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15361 }
20b71b17
AM
15362 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15363 {
15364 /* This is for AIX code running in non-PIC ELF32. */
15365 char buf[30];
15366 rtx realsym;
15367 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15368 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15369
6fb5fa3c
DB
15370 emit_insn (gen_elf_high (dest, realsym));
15371 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15372 }
37409796 15373 else
9ebbca7d 15374 {
37409796 15375 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15376
9ebbca7d 15377 if (TARGET_32BIT)
6fb5fa3c 15378 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15379 else
6fb5fa3c 15380 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15381 }
15382}
15383
d1d0c603
JJ
15384/* Emit instructions to restore the link register after determining where
15385 its value has been stored. */
15386
15387void
15388rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15389{
15390 rs6000_stack_t *info = rs6000_stack_info ();
15391 rtx operands[2];
15392
15393 operands[0] = source;
15394 operands[1] = scratch;
15395
15396 if (info->lr_save_p)
15397 {
15398 rtx frame_rtx = stack_pointer_rtx;
15399 HOST_WIDE_INT sp_offset = 0;
15400 rtx tmp;
15401
15402 if (frame_pointer_needed
e3b5732b 15403 || cfun->calls_alloca
d1d0c603
JJ
15404 || info->total_size > 32767)
15405 {
0be76840 15406 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15407 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15408 frame_rtx = operands[1];
15409 }
15410 else if (info->push_p)
15411 sp_offset = info->total_size;
15412
15413 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15414 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15415 emit_move_insn (tmp, operands[0]);
15416 }
15417 else
1de43f85 15418 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15419}
15420
4862826d 15421static GTY(()) alias_set_type set = -1;
f103e34d 15422
4862826d 15423alias_set_type
863d938c 15424get_TOC_alias_set (void)
9ebbca7d 15425{
f103e34d
GK
15426 if (set == -1)
15427 set = new_alias_set ();
15428 return set;
f676971a 15429}
9ebbca7d 15430
c1207243 15431/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15432 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15433 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15434#if TARGET_ELF
3c9eb5f4 15435static int
f676971a 15436uses_TOC (void)
9ebbca7d 15437{
c4501e62 15438 rtx insn;
38c1f2d7 15439
c4501e62
JJ
15440 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15441 if (INSN_P (insn))
15442 {
15443 rtx pat = PATTERN (insn);
15444 int i;
9ebbca7d 15445
f676971a 15446 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15447 for (i = 0; i < XVECLEN (pat, 0); i++)
15448 {
15449 rtx sub = XVECEXP (pat, 0, i);
15450 if (GET_CODE (sub) == USE)
15451 {
15452 sub = XEXP (sub, 0);
15453 if (GET_CODE (sub) == UNSPEC
15454 && XINT (sub, 1) == UNSPEC_TOC)
15455 return 1;
15456 }
15457 }
15458 }
15459 return 0;
9ebbca7d 15460}
c954844a 15461#endif
38c1f2d7 15462
9ebbca7d 15463rtx
f676971a 15464create_TOC_reference (rtx symbol)
9ebbca7d 15465{
b3a13419 15466 if (!can_create_pseudo_p ())
6fb5fa3c 15467 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15468 return gen_rtx_PLUS (Pmode,
a8a05998 15469 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a 15470 gen_rtx_CONST (Pmode,
2e4316da 15471 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL)));
9ebbca7d 15472}
38c1f2d7 15473
fc4767bb
JJ
15474/* If _Unwind_* has been called from within the same module,
15475 toc register is not guaranteed to be saved to 40(1) on function
15476 entry. Save it there in that case. */
c7ca610e 15477
9ebbca7d 15478void
863d938c 15479rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15480{
15481 rtx mem;
15482 rtx stack_top = gen_reg_rtx (Pmode);
15483 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15484 rtx opcode = gen_reg_rtx (SImode);
15485 rtx tocompare = gen_reg_rtx (SImode);
15486 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15487
8308679f 15488 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15489 emit_move_insn (stack_top, mem);
15490
8308679f
DE
15491 mem = gen_frame_mem (Pmode,
15492 gen_rtx_PLUS (Pmode, stack_top,
15493 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15494 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15495 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15496 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15497 : 0xE8410028, SImode));
9ebbca7d 15498
fc4767bb 15499 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15500 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15501 no_toc_save_needed);
9ebbca7d 15502
8308679f
DE
15503 mem = gen_frame_mem (Pmode,
15504 gen_rtx_PLUS (Pmode, stack_top,
15505 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15506 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15507 emit_label (no_toc_save_needed);
9ebbca7d 15508}
38c1f2d7 15509\f
0be76840
DE
15510/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15511 and the change to the stack pointer. */
ba4828e0 15512
9ebbca7d 15513static void
863d938c 15514rs6000_emit_stack_tie (void)
9ebbca7d 15515{
0be76840
DE
15516 rtx mem = gen_frame_mem (BLKmode,
15517 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15518
9ebbca7d
GK
15519 emit_insn (gen_stack_tie (mem));
15520}
38c1f2d7 15521
9ebbca7d
GK
15522/* Emit the correct code for allocating stack space, as insns.
15523 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15524 If COPY_R11, make sure a copy of the old frame is left in r11,
15525 in preference to r12 if COPY_R12.
9ebbca7d
GK
15526 The generated code may use hard register 0 as a temporary. */
15527
15528static void
f78c3290 15529rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15530{
9ebbca7d
GK
15531 rtx insn;
15532 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15533 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1 15534 rtx todec = gen_int_mode (-size, Pmode);
d4bacef0 15535 rtx par, set, mem;
61168ff1
RS
15536
15537 if (INTVAL (todec) != -size)
15538 {
d4ee4d25 15539 warning (0, "stack frame too large");
61168ff1
RS
15540 emit_insn (gen_trap ());
15541 return;
15542 }
a157febd 15543
e3b5732b 15544 if (crtl->limit_stack)
a157febd
GK
15545 {
15546 if (REG_P (stack_limit_rtx)
f676971a 15547 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15548 && REGNO (stack_limit_rtx) <= 31)
15549 {
5b71a4e7 15550 emit_insn (TARGET_32BIT
9ebbca7d
GK
15551 ? gen_addsi3 (tmp_reg,
15552 stack_limit_rtx,
15553 GEN_INT (size))
15554 : gen_adddi3 (tmp_reg,
15555 stack_limit_rtx,
15556 GEN_INT (size)));
5b71a4e7 15557
9ebbca7d
GK
15558 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15559 const0_rtx));
a157febd
GK
15560 }
15561 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15562 && TARGET_32BIT
f607bc57 15563 && DEFAULT_ABI == ABI_V4)
a157febd 15564 {
9ebbca7d 15565 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15566 gen_rtx_PLUS (Pmode,
15567 stack_limit_rtx,
9ebbca7d 15568 GEN_INT (size)));
5b71a4e7 15569
9ebbca7d
GK
15570 emit_insn (gen_elf_high (tmp_reg, toload));
15571 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15572 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15573 const0_rtx));
a157febd
GK
15574 }
15575 else
d4ee4d25 15576 warning (0, "stack limit expression is not supported");
a157febd
GK
15577 }
15578
d4bacef0 15579 if (copy_r12 || copy_r11)
f78c3290
NF
15580 emit_move_insn (copy_r11
15581 ? gen_rtx_REG (Pmode, 11)
15582 : gen_rtx_REG (Pmode, 12),
15583 stack_reg);
9ebbca7d 15584
d4bacef0 15585 if (size > 32767)
38c1f2d7 15586 {
d4bacef0
BE
15587 /* Need a note here so that try_split doesn't get confused. */
15588 if (get_last_insn () == NULL_RTX)
15589 emit_note (NOTE_INSN_DELETED);
15590 insn = emit_move_insn (tmp_reg, todec);
15591 try_split (PATTERN (insn), insn, 0);
15592 todec = tmp_reg;
9ebbca7d 15593 }
d4bacef0
BE
15594
15595 insn = emit_insn (TARGET_32BIT
15596 ? gen_movsi_update_stack (stack_reg, stack_reg,
15597 todec, stack_reg)
15598 : gen_movdi_di_update_stack (stack_reg, stack_reg,
15599 todec, stack_reg));
15600 /* Since we didn't use gen_frame_mem to generate the MEM, grab
15601 it now and set the alias set/attributes. The above gen_*_update
15602 calls will generate a PARALLEL with the MEM set being the first
15603 operation. */
15604 par = PATTERN (insn);
15605 gcc_assert (GET_CODE (par) == PARALLEL);
15606 set = XVECEXP (par, 0, 0);
15607 gcc_assert (GET_CODE (set) == SET);
15608 mem = SET_DEST (set);
15609 gcc_assert (MEM_P (mem));
15610 MEM_NOTRAP_P (mem) = 1;
15611 set_mem_alias_set (mem, get_frame_alias_set ());
f676971a 15612
9ebbca7d 15613 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a
ILT
15614 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
15615 gen_rtx_SET (VOIDmode, stack_reg,
15616 gen_rtx_PLUS (Pmode, stack_reg,
15617 GEN_INT (-size))));
9ebbca7d
GK
15618}
15619
a4f6c312
SS
15620/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15621 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15622 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15623 deduce these equivalences by itself so it wasn't necessary to hold
15624 its hand so much. */
9ebbca7d
GK
15625
15626static void
f676971a 15627rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15628 rtx reg2, rtx rreg)
9ebbca7d
GK
15629{
15630 rtx real, temp;
15631
e56c4463
JL
15632 /* copy_rtx will not make unique copies of registers, so we need to
15633 ensure we don't have unwanted sharing here. */
15634 if (reg == reg2)
15635 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15636
15637 if (reg == rreg)
15638 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15639
9ebbca7d
GK
15640 real = copy_rtx (PATTERN (insn));
15641
89e7058f
AH
15642 if (reg2 != NULL_RTX)
15643 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15644
15645 real = replace_rtx (real, reg,
9ebbca7d
GK
15646 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15647 STACK_POINTER_REGNUM),
15648 GEN_INT (val)));
f676971a 15649
9ebbca7d
GK
15650 /* We expect that 'real' is either a SET or a PARALLEL containing
15651 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15652 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15653
15654 if (GET_CODE (real) == SET)
15655 {
15656 rtx set = real;
f676971a 15657
9ebbca7d
GK
15658 temp = simplify_rtx (SET_SRC (set));
15659 if (temp)
15660 SET_SRC (set) = temp;
15661 temp = simplify_rtx (SET_DEST (set));
15662 if (temp)
15663 SET_DEST (set) = temp;
15664 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15665 {
9ebbca7d
GK
15666 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15667 if (temp)
15668 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15669 }
38c1f2d7 15670 }
37409796 15671 else
9ebbca7d
GK
15672 {
15673 int i;
37409796
NS
15674
15675 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15676 for (i = 0; i < XVECLEN (real, 0); i++)
15677 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15678 {
15679 rtx set = XVECEXP (real, 0, i);
f676971a 15680
9ebbca7d
GK
15681 temp = simplify_rtx (SET_SRC (set));
15682 if (temp)
15683 SET_SRC (set) = temp;
15684 temp = simplify_rtx (SET_DEST (set));
15685 if (temp)
15686 SET_DEST (set) = temp;
15687 if (GET_CODE (SET_DEST (set)) == MEM)
15688 {
15689 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15690 if (temp)
15691 XEXP (SET_DEST (set), 0) = temp;
15692 }
15693 RTX_FRAME_RELATED_P (set) = 1;
15694 }
15695 }
c19de7aa 15696
9ebbca7d 15697 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a 15698 add_reg_note (insn, REG_FRAME_RELATED_EXPR, real);
38c1f2d7
MM
15699}
15700
00b960c7
AH
15701/* Returns an insn that has a vrsave set operation with the
15702 appropriate CLOBBERs. */
15703
15704static rtx
a2369ed3 15705generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15706{
15707 int nclobs, i;
15708 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15709 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15710
a004eb82
AH
15711 clobs[0]
15712 = gen_rtx_SET (VOIDmode,
15713 vrsave,
15714 gen_rtx_UNSPEC_VOLATILE (SImode,
15715 gen_rtvec (2, reg, vrsave),
3aca4bff 15716 UNSPECV_SET_VRSAVE));
00b960c7
AH
15717
15718 nclobs = 1;
15719
9aa86737
AH
15720 /* We need to clobber the registers in the mask so the scheduler
15721 does not move sets to VRSAVE before sets of AltiVec registers.
15722
15723 However, if the function receives nonlocal gotos, reload will set
15724 all call saved registers live. We will end up with:
15725
15726 (set (reg 999) (mem))
15727 (parallel [ (set (reg vrsave) (unspec blah))
15728 (clobber (reg 999))])
15729
15730 The clobber will cause the store into reg 999 to be dead, and
15731 flow will attempt to delete an epilogue insn. In this case, we
15732 need an unspec use/set of the register. */
00b960c7
AH
15733
15734 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15735 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15736 {
15737 if (!epiloguep || call_used_regs [i])
15738 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15739 gen_rtx_REG (V4SImode, i));
15740 else
15741 {
15742 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15743
15744 clobs[nclobs++]
a004eb82
AH
15745 = gen_rtx_SET (VOIDmode,
15746 reg,
15747 gen_rtx_UNSPEC (V4SImode,
15748 gen_rtvec (1, reg), 27));
9aa86737
AH
15749 }
15750 }
00b960c7
AH
15751
15752 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15753
15754 for (i = 0; i < nclobs; ++i)
15755 XVECEXP (insn, 0, i) = clobs[i];
15756
15757 return insn;
15758}
15759
89e7058f
AH
15760/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15761 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15762
15763static void
f676971a 15764emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15765 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15766{
15767 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15768 rtx replacea, replaceb;
15769
15770 int_rtx = GEN_INT (offset);
15771
15772 /* Some cases that need register indexed addressing. */
15773 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15774 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15775 || (TARGET_SPE_ABI
15776 && SPE_VECTOR_MODE (mode)
15777 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15778 {
15779 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15780 flow path of instructions in the prologue. */
89e7058f
AH
15781 offset_rtx = gen_rtx_REG (Pmode, 11);
15782 emit_move_insn (offset_rtx, int_rtx);
15783
15784 replacea = offset_rtx;
15785 replaceb = int_rtx;
15786 }
15787 else
15788 {
15789 offset_rtx = int_rtx;
15790 replacea = NULL_RTX;
15791 replaceb = NULL_RTX;
15792 }
15793
15794 reg = gen_rtx_REG (mode, regno);
15795 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15796 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15797
15798 insn = emit_move_insn (mem, reg);
15799
15800 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15801}
15802
a3170dc6
AH
15803/* Emit an offset memory reference suitable for a frame store, while
15804 converting to a valid addressing mode. */
15805
15806static rtx
a2369ed3 15807gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15808{
15809 rtx int_rtx, offset_rtx;
15810
15811 int_rtx = GEN_INT (offset);
15812
4d4cbc0e 15813 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15814 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15815 {
15816 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15817 emit_move_insn (offset_rtx, int_rtx);
15818 }
15819 else
15820 offset_rtx = int_rtx;
15821
0be76840 15822 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15823}
15824
6d0a8091
DJ
15825/* Look for user-defined global regs. We should not save and restore these,
15826 and cannot use stmw/lmw if there are any in its range. */
15827
15828static bool
f78c3290 15829no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15830{
15831 int i;
e1ece9f1 15832 for (i = first; i < gpr ? 32 : 64 ; i++)
f78c3290 15833 if (global_regs[i])
6d0a8091
DJ
15834 return false;
15835 return true;
15836}
15837
699c914a
MS
15838#ifndef TARGET_FIX_AND_CONTINUE
15839#define TARGET_FIX_AND_CONTINUE 0
15840#endif
15841
f78c3290
NF
15842/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15843#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15844#define LAST_SAVRES_REGISTER 31
15845#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15846
15847static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15848
15849/* Return the symbol for an out-of-line register save/restore routine.
15850 We are saving/restoring GPRs if GPR is true. */
15851
15852static rtx
15853rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15854{
15855 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15856 rtx sym;
15857 int select = ((savep ? 1 : 0) << 2
15858 | (gpr
15859 /* On the SPE, we never have any FPRs, but we do have
15860 32/64-bit versions of the routines. */
15861 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15862 : 0) << 1
15863 | (exitp ? 1: 0));
15864
15865 /* Don't generate bogus routine names. */
15866 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15867
15868 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15869
15870 if (sym == NULL)
15871 {
15872 char name[30];
15873 const char *action;
15874 const char *regkind;
15875 const char *exit_suffix;
15876
15877 action = savep ? "save" : "rest";
15878
15879 /* SPE has slightly different names for its routines depending on
15880 whether we are saving 32-bit or 64-bit registers. */
15881 if (TARGET_SPE_ABI)
15882 {
15883 /* No floating point saves on the SPE. */
15884 gcc_assert (gpr);
15885
15886 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15887 }
15888 else
15889 regkind = gpr ? "gpr" : "fpr";
15890
15891 exit_suffix = exitp ? "_x" : "";
15892
15893 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15894
15895 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15896 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15897 }
15898
15899 return sym;
15900}
15901
15902/* Emit a sequence of insns, including a stack tie if needed, for
15903 resetting the stack pointer. If SAVRES is true, then don't reset the
15904 stack pointer, but move the base of the frame into r11 for use by
15905 out-of-line register restore routines. */
15906
ff35822b 15907static rtx
f78c3290
NF
15908rs6000_emit_stack_reset (rs6000_stack_t *info,
15909 rtx sp_reg_rtx, rtx frame_reg_rtx,
15910 int sp_offset, bool savres)
15911{
15912 /* This blockage is needed so that sched doesn't decide to move
15913 the sp change before the register restores. */
15914 if (frame_reg_rtx != sp_reg_rtx
15915 || (TARGET_SPE_ABI
15916 && info->spe_64bit_regs_used != 0
15917 && info->first_gp_reg_save != 32))
15918 rs6000_emit_stack_tie ();
15919
15920 if (frame_reg_rtx != sp_reg_rtx)
15921 {
f78c3290 15922 if (sp_offset != 0)
ff35822b
JJ
15923 return emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15924 GEN_INT (sp_offset)));
f78c3290 15925 else if (!savres)
ff35822b 15926 return emit_move_insn (sp_reg_rtx, frame_reg_rtx);
f78c3290
NF
15927 }
15928 else if (sp_offset != 0)
15929 {
15930 /* If we are restoring registers out-of-line, we will be using the
15931 "exit" variants of the restore routines, which will reset the
15932 stack for us. But we do need to point r11 into the right place
15933 for those routines. */
15934 rtx dest_reg = (savres
15935 ? gen_rtx_REG (Pmode, 11)
15936 : sp_reg_rtx);
15937
ff35822b
JJ
15938 rtx insn = emit_insn (gen_add3_insn (dest_reg, sp_reg_rtx,
15939 GEN_INT (sp_offset)));
15940 if (!savres)
15941 return insn;
f78c3290 15942 }
ff35822b 15943 return NULL_RTX;
f78c3290
NF
15944}
15945
15946/* Construct a parallel rtx describing the effect of a call to an
15947 out-of-line register save/restore routine. */
15948
15949static rtx
15950rs6000_make_savres_rtx (rs6000_stack_t *info,
15951 rtx frame_reg_rtx, int save_area_offset,
15952 enum machine_mode reg_mode,
15953 bool savep, bool gpr, bool exitp)
15954{
15955 int i;
15956 int offset, start_reg, end_reg, n_regs;
15957 int reg_size = GET_MODE_SIZE (reg_mode);
15958 rtx sym;
15959 rtvec p;
15960
15961 offset = 0;
15962 start_reg = (gpr
15963 ? info->first_gp_reg_save
15964 : info->first_fp_reg_save);
15965 end_reg = gpr ? 32 : 64;
15966 n_regs = end_reg - start_reg;
15967 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15968
15969 /* If we're saving registers, then we should never say we're exiting. */
15970 gcc_assert ((savep && !exitp) || !savep);
15971
15972 if (exitp)
15973 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15974
15975 RTVEC_ELT (p, offset++)
15976 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15977
15978 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15979 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
15980 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
15981
15982 for (i = 0; i < end_reg - start_reg; i++)
15983 {
15984 rtx addr, reg, mem;
15985 reg = gen_rtx_REG (reg_mode, start_reg + i);
15986 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15987 GEN_INT (save_area_offset + reg_size*i));
15988 mem = gen_frame_mem (reg_mode, addr);
15989
15990 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
15991 savep ? mem : reg,
15992 savep ? reg : mem);
15993 }
15994
15995 return gen_rtx_PARALLEL (VOIDmode, p);
15996}
15997
52ff33d0
NF
15998/* Determine whether the gp REG is really used. */
15999
16000static bool
16001rs6000_reg_live_or_pic_offset_p (int reg)
16002{
6fb5fa3c 16003 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
16004 && (!call_used_regs[reg]
16005 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16006 && TARGET_TOC && TARGET_MINIMAL_TOC)))
16007 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16008 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
16009 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
16010}
16011
f78c3290
NF
16012enum {
16013 SAVRES_MULTIPLE = 0x1,
16014 SAVRES_INLINE_FPRS = 0x2,
16015 SAVRES_INLINE_GPRS = 0x4
16016};
16017
16018/* Determine the strategy for savings/restoring registers. */
16019
16020static int
16021rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
16022 int using_static_chain_p, int sibcall)
16023{
16024 bool using_multiple_p;
16025 bool common;
16026 bool savres_fprs_inline;
16027 bool savres_gprs_inline;
16028 bool noclobber_global_gprs
16029 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
16030
16031 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
16032 && (!TARGET_SPE_ABI
16033 || info->spe_64bit_regs_used == 0)
16034 && info->first_gp_reg_save < 31
16035 && noclobber_global_gprs);
16036 /* Don't bother to try to save things out-of-line if r11 is occupied
16037 by the static chain. It would require too much fiddling and the
16038 static chain is rarely used anyway. */
16039 common = (using_static_chain_p
16040 || sibcall
16041 || crtl->calls_eh_return
16042 || !info->lr_save_p
16043 || cfun->machine->ra_need_lr
16044 || info->total_size > 32767);
16045 savres_fprs_inline = (common
16046 || info->first_fp_reg_save == 64
16047 || !no_global_regs_above (info->first_fp_reg_save,
16048 /*gpr=*/false)
16049 || FP_SAVE_INLINE (info->first_fp_reg_save));
16050 savres_gprs_inline = (common
16051 /* Saving CR interferes with the exit routines
16052 used on the SPE, so just punt here. */
16053 || (!savep
16054 && TARGET_SPE_ABI
16055 && info->spe_64bit_regs_used != 0
16056 && info->cr_save_p != 0)
16057 || info->first_gp_reg_save == 32
16058 || !noclobber_global_gprs
16059 || GP_SAVE_INLINE (info->first_gp_reg_save));
16060
16061 if (savep)
16062 /* If we are going to use store multiple, then don't even bother
16063 with the out-of-line routines, since the store-multiple instruction
16064 will always be smaller. */
16065 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16066 else
16067 {
16068 /* The situation is more complicated with load multiple. We'd
16069 prefer to use the out-of-line routines for restores, since the
16070 "exit" out-of-line routines can handle the restore of LR and
16071 the frame teardown. But we can only use the out-of-line
16072 routines if we know that we've used store multiple or
16073 out-of-line routines in the prologue, i.e. if we've saved all
16074 the registers from first_gp_reg_save. Otherwise, we risk
16075 loading garbage from the stack. Furthermore, we can only use
16076 the "exit" out-of-line gpr restore if we haven't saved any
16077 fprs. */
16078 bool saved_all = !savres_gprs_inline || using_multiple_p;
16079
16080 if (saved_all && info->first_fp_reg_save != 64)
16081 /* We can't use the exit routine; use load multiple if it's
16082 available. */
16083 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16084 }
16085
16086 return (using_multiple_p
16087 | (savres_fprs_inline << 1)
16088 | (savres_gprs_inline << 2));
16089}
16090
9ebbca7d
GK
16091/* Emit function prologue as insns. */
16092
9878760c 16093void
863d938c 16094rs6000_emit_prologue (void)
9878760c 16095{
4697a36c 16096 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 16097 enum machine_mode reg_mode = Pmode;
327e5343 16098 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16099 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
16100 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
16101 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 16102 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 16103 rtx insn;
f78c3290 16104 int strategy;
9ebbca7d 16105 int saving_FPRs_inline;
f78c3290 16106 int saving_GPRs_inline;
9ebbca7d 16107 int using_store_multiple;
f78c3290
NF
16108 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
16109 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
e1ece9f1 16110 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 16111 HOST_WIDE_INT sp_offset = 0;
f676971a 16112
699c914a
MS
16113 if (TARGET_FIX_AND_CONTINUE)
16114 {
16115 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 16116 address by modifying the first 5 instructions of the function
699c914a
MS
16117 to branch to the overriding function. This is necessary to
16118 permit function pointers that point to the old function to
16119 actually forward to the new function. */
16120 emit_insn (gen_nop ());
16121 emit_insn (gen_nop ());
de2ab0ca 16122 emit_insn (gen_nop ());
699c914a
MS
16123 emit_insn (gen_nop ());
16124 emit_insn (gen_nop ());
16125 }
16126
16127 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16128 {
16129 reg_mode = V2SImode;
16130 reg_size = 8;
16131 }
a3170dc6 16132
f78c3290
NF
16133 strategy = rs6000_savres_strategy (info, /*savep=*/true,
16134 /*static_chain_p=*/using_static_chain_p,
16135 /*sibcall=*/0);
16136 using_store_multiple = strategy & SAVRES_MULTIPLE;
16137 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16138 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
16139
16140 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
16141 if (! WORLD_SAVE_P (info)
16142 && info->push_p
acd0b319 16143 && (DEFAULT_ABI == ABI_V4
e3b5732b 16144 || crtl->calls_eh_return))
9ebbca7d 16145 {
f78c3290
NF
16146 bool need_r11 = (TARGET_SPE
16147 ? (!saving_GPRs_inline
16148 && info->spe_64bit_regs_used == 0)
16149 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
16150 if (info->total_size < 32767)
16151 sp_offset = info->total_size;
16152 else
f78c3290
NF
16153 frame_reg_rtx = (need_r11
16154 ? gen_rtx_REG (Pmode, 11)
16155 : frame_ptr_rtx);
f676971a 16156 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
16157 (frame_reg_rtx != sp_reg_rtx
16158 && (info->cr_save_p
16159 || info->lr_save_p
16160 || info->first_fp_reg_save < 64
16161 || info->first_gp_reg_save < 32
f78c3290
NF
16162 )),
16163 need_r11);
9ebbca7d
GK
16164 if (frame_reg_rtx != sp_reg_rtx)
16165 rs6000_emit_stack_tie ();
16166 }
16167
d62294f5 16168 /* Handle world saves specially here. */
f57fe068 16169 if (WORLD_SAVE_P (info))
d62294f5
FJ
16170 {
16171 int i, j, sz;
16172 rtx treg;
16173 rtvec p;
22fa69da 16174 rtx reg0;
d62294f5
FJ
16175
16176 /* save_world expects lr in r0. */
22fa69da 16177 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 16178 if (info->lr_save_p)
c4ad648e 16179 {
22fa69da 16180 insn = emit_move_insn (reg0,
1de43f85 16181 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
16182 RTX_FRAME_RELATED_P (insn) = 1;
16183 }
d62294f5
FJ
16184
16185 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 16186 assumptions about the offsets of various bits of the stack
992d08b1 16187 frame. */
37409796
NS
16188 gcc_assert (info->gp_save_offset == -220
16189 && info->fp_save_offset == -144
16190 && info->lr_save_offset == 8
16191 && info->cr_save_offset == 4
16192 && info->push_p
16193 && info->lr_save_p
e3b5732b 16194 && (!crtl->calls_eh_return
37409796
NS
16195 || info->ehrd_offset == -432)
16196 && info->vrsave_save_offset == -224
22fa69da 16197 && info->altivec_save_offset == -416);
d62294f5
FJ
16198
16199 treg = gen_rtx_REG (SImode, 11);
16200 emit_move_insn (treg, GEN_INT (-info->total_size));
16201
16202 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 16203 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
16204
16205 /* Preserve CR2 for save_world prologues */
22fa69da 16206 sz = 5;
d62294f5
FJ
16207 sz += 32 - info->first_gp_reg_save;
16208 sz += 64 - info->first_fp_reg_save;
16209 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16210 p = rtvec_alloc (sz);
16211 j = 0;
16212 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 16213 gen_rtx_REG (SImode,
1de43f85 16214 LR_REGNO));
d62294f5 16215 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16216 gen_rtx_SYMBOL_REF (Pmode,
16217 "*save_world"));
d62294f5 16218 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16219 properly. */
16220 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16221 {
696e45ba
ME
16222 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16223 ? DFmode : SFmode),
16224 info->first_fp_reg_save + i);
c4ad648e
AM
16225 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16226 GEN_INT (info->fp_save_offset
16227 + sp_offset + 8 * i));
696e45ba
ME
16228 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16229 ? DFmode : SFmode), addr);
c4ad648e
AM
16230
16231 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16232 }
d62294f5 16233 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16234 {
16235 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16236 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16237 GEN_INT (info->altivec_save_offset
16238 + sp_offset + 16 * i));
0be76840 16239 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16240
16241 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16242 }
d62294f5 16243 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16244 {
16245 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16246 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16247 GEN_INT (info->gp_save_offset
16248 + sp_offset + reg_size * i));
0be76840 16249 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16250
16251 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16252 }
16253
16254 {
16255 /* CR register traditionally saved as CR2. */
16256 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16257 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16258 GEN_INT (info->cr_save_offset
16259 + sp_offset));
0be76840 16260 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16261
16262 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16263 }
22fa69da
GK
16264 /* Explain about use of R0. */
16265 if (info->lr_save_p)
16266 {
16267 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16268 GEN_INT (info->lr_save_offset
16269 + sp_offset));
16270 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16271
22fa69da
GK
16272 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16273 }
16274 /* Explain what happens to the stack pointer. */
16275 {
16276 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16277 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16278 }
d62294f5
FJ
16279
16280 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16281 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16282 treg, GEN_INT (-info->total_size));
16283 sp_offset = info->total_size;
d62294f5
FJ
16284 }
16285
9ebbca7d 16286 /* If we use the link register, get it into r0. */
f57fe068 16287 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16288 {
52ff33d0
NF
16289 rtx addr, reg, mem;
16290
f8a57be8 16291 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16292 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16293 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16294
16295 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16296 GEN_INT (info->lr_save_offset + sp_offset));
16297 reg = gen_rtx_REG (Pmode, 0);
16298 mem = gen_rtx_MEM (Pmode, addr);
16299 /* This should not be of rs6000_sr_alias_set, because of
16300 __builtin_return_address. */
16301
16302 insn = emit_move_insn (mem, reg);
16303 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16304 NULL_RTX, NULL_RTX);
f8a57be8 16305 }
9ebbca7d
GK
16306
16307 /* If we need to save CR, put it into r12. */
f57fe068 16308 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16309 {
f8a57be8 16310 rtx set;
f676971a 16311
9ebbca7d 16312 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16313 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16314 RTX_FRAME_RELATED_P (insn) = 1;
16315 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16316 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16317 But that's OK. All we have to do is specify that _one_ condition
16318 code register is saved in this stack slot. The thrower's epilogue
16319 will then restore all the call-saved registers.
16320 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16321 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16322 gen_rtx_REG (SImode, CR2_REGNO));
bbbbb16a 16323 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16324 }
16325
a4f6c312
SS
16326 /* Do any required saving of fpr's. If only one or two to save, do
16327 it ourselves. Otherwise, call function. */
f57fe068 16328 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16329 {
16330 int i;
16331 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16332 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16333 && ! call_used_regs[info->first_fp_reg_save+i]))
696e45ba
ME
16334 emit_frame_save (frame_reg_rtx, frame_ptr_rtx,
16335 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16336 ? DFmode : SFmode,
89e7058f
AH
16337 info->first_fp_reg_save + i,
16338 info->fp_save_offset + sp_offset + 8 * i,
16339 info->total_size);
9ebbca7d 16340 }
f57fe068 16341 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16342 {
16343 rtx par;
16344
16345 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16346 info->fp_save_offset + sp_offset,
16347 DFmode,
16348 /*savep=*/true, /*gpr=*/false,
16349 /*exitp=*/false);
16350 insn = emit_insn (par);
16351 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16352 NULL_RTX, NULL_RTX);
16353 }
16354
16355 /* Save GPRs. This is done as a PARALLEL if we are using
16356 the store-multiple instructions. */
16357 if (!WORLD_SAVE_P (info)
16358 && TARGET_SPE_ABI
16359 && info->spe_64bit_regs_used != 0
16360 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16361 {
16362 int i;
f78c3290
NF
16363 rtx spe_save_area_ptr;
16364
16365 /* Determine whether we can address all of the registers that need
16366 to be saved with an offset from the stack pointer that fits in
16367 the small const field for SPE memory instructions. */
16368 int spe_regs_addressable_via_sp
16369 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16370 + (32 - info->first_gp_reg_save - 1) * reg_size)
16371 && saving_GPRs_inline);
16372 int spe_offset;
16373
16374 if (spe_regs_addressable_via_sp)
16375 {
16376 spe_save_area_ptr = frame_reg_rtx;
16377 spe_offset = info->spe_gp_save_offset + sp_offset;
16378 }
16379 else
16380 {
16381 /* Make r11 point to the start of the SPE save area. We need
16382 to be careful here if r11 is holding the static chain. If
16383 it is, then temporarily save it in r0. We would use r0 as
16384 our base register here, but using r0 as a base register in
16385 loads and stores means something different from what we
16386 would like. */
16387 int ool_adjust = (saving_GPRs_inline
16388 ? 0
16389 : (info->first_gp_reg_save
16390 - (FIRST_SAVRES_REGISTER+1))*8);
16391 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16392 + sp_offset - ool_adjust);
16393
16394 if (using_static_chain_p)
16395 {
16396 rtx r0 = gen_rtx_REG (Pmode, 0);
16397 gcc_assert (info->first_gp_reg_save > 11);
16398
16399 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16400 }
16401
16402 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16403 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16404 frame_reg_rtx,
16405 GEN_INT (offset)));
16406 /* We need to make sure the move to r11 gets noted for
16407 properly outputting unwind information. */
16408 if (!saving_GPRs_inline)
16409 rs6000_frame_related (insn, frame_reg_rtx, offset,
16410 NULL_RTX, NULL_RTX);
16411 spe_offset = 0;
16412 }
16413
16414 if (saving_GPRs_inline)
16415 {
16416 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16417 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16418 {
16419 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16420 rtx offset, addr, mem;
f676971a 16421
f78c3290
NF
16422 /* We're doing all this to ensure that the offset fits into
16423 the immediate offset of 'evstdd'. */
16424 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16425
16426 offset = GEN_INT (reg_size * i + spe_offset);
16427 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16428 mem = gen_rtx_MEM (V2SImode, addr);
16429
16430 insn = emit_move_insn (mem, reg);
16431
16432 rs6000_frame_related (insn, spe_save_area_ptr,
16433 info->spe_gp_save_offset
16434 + sp_offset + reg_size * i,
16435 offset, const0_rtx);
16436 }
16437 }
16438 else
9ebbca7d 16439 {
f78c3290 16440 rtx par;
9ebbca7d 16441
f78c3290
NF
16442 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16443 0, reg_mode,
16444 /*savep=*/true, /*gpr=*/true,
16445 /*exitp=*/false);
16446 insn = emit_insn (par);
16447 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16448 NULL_RTX, NULL_RTX);
9ebbca7d 16449 }
f78c3290
NF
16450
16451
16452 /* Move the static chain pointer back. */
16453 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16454 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16455 }
16456 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16457 {
16458 rtx par;
16459
16460 /* Need to adjust r11 if we saved any FPRs. */
16461 if (info->first_fp_reg_save != 64)
16462 {
16463 rtx r11 = gen_rtx_REG (reg_mode, 11);
16464 rtx offset = GEN_INT (info->total_size
16465 + (-8 * (64-info->first_fp_reg_save)));
16466 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16467 ? sp_reg_rtx : r11);
16468
16469 emit_insn (TARGET_32BIT
16470 ? gen_addsi3 (r11, ptr_reg, offset)
16471 : gen_adddi3 (r11, ptr_reg, offset));
16472 }
16473
16474 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16475 info->gp_save_offset + sp_offset,
16476 reg_mode,
16477 /*savep=*/true, /*gpr=*/true,
16478 /*exitp=*/false);
16479 insn = emit_insn (par);
f676971a 16480 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16481 NULL_RTX, NULL_RTX);
16482 }
f78c3290 16483 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16484 {
308c142a 16485 rtvec p;
9ebbca7d
GK
16486 int i;
16487 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16488 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16489 {
16490 rtx addr, reg, mem;
16491 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16492 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16493 GEN_INT (info->gp_save_offset
16494 + sp_offset
9ebbca7d 16495 + reg_size * i));
0be76840 16496 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16497
16498 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16499 }
16500 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16501 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16502 NULL_RTX, NULL_RTX);
b6c9286a 16503 }
f57fe068 16504 else if (!WORLD_SAVE_P (info))
b6c9286a 16505 {
9ebbca7d
GK
16506 int i;
16507 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16508 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16509 {
16510 rtx addr, reg, mem;
16511 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16512
52ff33d0
NF
16513 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16514 GEN_INT (info->gp_save_offset
16515 + sp_offset
16516 + reg_size * i));
16517 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16518
52ff33d0
NF
16519 insn = emit_move_insn (mem, reg);
16520 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16521 NULL_RTX, NULL_RTX);
16522 }
9ebbca7d
GK
16523 }
16524
83720594
RH
16525 /* ??? There's no need to emit actual instructions here, but it's the
16526 easiest way to get the frame unwind information emitted. */
e3b5732b 16527 if (crtl->calls_eh_return)
83720594 16528 {
78e1b90d
DE
16529 unsigned int i, regno;
16530
fc4767bb
JJ
16531 /* In AIX ABI we need to pretend we save r2 here. */
16532 if (TARGET_AIX)
16533 {
16534 rtx addr, reg, mem;
16535
16536 reg = gen_rtx_REG (reg_mode, 2);
16537 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16538 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16539 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16540
16541 insn = emit_move_insn (mem, reg);
f676971a 16542 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16543 NULL_RTX, NULL_RTX);
16544 PATTERN (insn) = gen_blockage ();
16545 }
16546
83720594
RH
16547 for (i = 0; ; ++i)
16548 {
83720594
RH
16549 regno = EH_RETURN_DATA_REGNO (i);
16550 if (regno == INVALID_REGNUM)
16551 break;
16552
89e7058f
AH
16553 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16554 info->ehrd_offset + sp_offset
16555 + reg_size * (int) i,
16556 info->total_size);
83720594
RH
16557 }
16558 }
16559
9ebbca7d 16560 /* Save CR if we use any that must be preserved. */
f57fe068 16561 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16562 {
16563 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16564 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16565 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16566 /* See the large comment above about why CR2_REGNO is used. */
16567 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16568
9ebbca7d
GK
16569 /* If r12 was used to hold the original sp, copy cr into r0 now
16570 that it's free. */
16571 if (REGNO (frame_reg_rtx) == 12)
16572 {
f8a57be8
GK
16573 rtx set;
16574
9ebbca7d 16575 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16576 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16577 RTX_FRAME_RELATED_P (insn) = 1;
16578 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
bbbbb16a 16579 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16580 }
16581 insn = emit_move_insn (mem, cr_save_rtx);
16582
f676971a 16583 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16584 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16585 }
16586
f676971a 16587 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16588 for which it was done previously. */
f57fe068 16589 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16590 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16591 {
bcb2d701 16592 if (info->total_size < 32767)
2b2c2fe5 16593 sp_offset = info->total_size;
bcb2d701
EC
16594 else
16595 frame_reg_rtx = frame_ptr_rtx;
16596 rs6000_emit_allocate_stack (info->total_size,
16597 (frame_reg_rtx != sp_reg_rtx
16598 && ((info->altivec_size != 0)
16599 || (info->vrsave_mask != 0)
f78c3290
NF
16600 )),
16601 FALSE);
bcb2d701
EC
16602 if (frame_reg_rtx != sp_reg_rtx)
16603 rs6000_emit_stack_tie ();
2b2c2fe5 16604 }
9ebbca7d
GK
16605
16606 /* Set frame pointer, if needed. */
16607 if (frame_pointer_needed)
16608 {
7d5175e1 16609 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16610 sp_reg_rtx);
16611 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16612 }
9878760c 16613
2b2c2fe5
EC
16614 /* Save AltiVec registers if needed. Save here because the red zone does
16615 not include AltiVec registers. */
16616 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16617 {
16618 int i;
16619
16620 /* There should be a non inline version of this, for when we
16621 are saving lots of vector registers. */
16622 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16623 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16624 {
16625 rtx areg, savereg, mem;
16626 int offset;
16627
16628 offset = info->altivec_save_offset + sp_offset
16629 + 16 * (i - info->first_altivec_reg_save);
16630
16631 savereg = gen_rtx_REG (V4SImode, i);
16632
16633 areg = gen_rtx_REG (Pmode, 0);
16634 emit_move_insn (areg, GEN_INT (offset));
16635
16636 /* AltiVec addressing mode is [reg+reg]. */
16637 mem = gen_frame_mem (V4SImode,
16638 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16639
16640 insn = emit_move_insn (mem, savereg);
16641
16642 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16643 areg, GEN_INT (offset));
16644 }
16645 }
16646
16647 /* VRSAVE is a bit vector representing which AltiVec registers
16648 are used. The OS uses this to determine which vector
16649 registers to save on a context switch. We need to save
16650 VRSAVE on the stack frame, add whatever AltiVec registers we
16651 used in this function, and do the corresponding magic in the
16652 epilogue. */
16653
16654 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16655 && info->vrsave_mask != 0)
16656 {
16657 rtx reg, mem, vrsave;
16658 int offset;
16659
16660 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16661 as frame_reg_rtx and r11 as the static chain pointer for
16662 nested functions. */
16663 reg = gen_rtx_REG (SImode, 0);
16664 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16665 if (TARGET_MACHO)
16666 emit_insn (gen_get_vrsave_internal (reg));
16667 else
16668 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16669
16670 if (!WORLD_SAVE_P (info))
16671 {
16672 /* Save VRSAVE. */
16673 offset = info->vrsave_save_offset + sp_offset;
16674 mem = gen_frame_mem (SImode,
16675 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16676 GEN_INT (offset)));
16677 insn = emit_move_insn (mem, reg);
16678 }
16679
16680 /* Include the registers in the mask. */
16681 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16682
16683 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16684 }
16685
1db02437 16686 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16687 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16688 || (DEFAULT_ABI == ABI_V4
16689 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16690 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16691 {
16692 /* If emit_load_toc_table will use the link register, we need to save
16693 it. We use R12 for this purpose because emit_load_toc_table
16694 can use register 0. This allows us to use a plain 'blr' to return
16695 from the procedure more often. */
16696 int save_LR_around_toc_setup = (TARGET_ELF
16697 && DEFAULT_ABI != ABI_AIX
16698 && flag_pic
16699 && ! info->lr_save_p
16700 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16701 if (save_LR_around_toc_setup)
16702 {
1de43f85 16703 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16704
c4ad648e 16705 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16706 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16707
c4ad648e 16708 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16709
c4ad648e 16710 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16711 RTX_FRAME_RELATED_P (insn) = 1;
16712 }
16713 else
16714 rs6000_emit_load_toc_table (TRUE);
16715 }
ee890fe2 16716
fcce224d 16717#if TARGET_MACHO
ee890fe2 16718 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16719 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16720 {
1de43f85 16721 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
08a6a74b 16722 rtx src = gen_rtx_SYMBOL_REF (Pmode, MACHOPIC_FUNCTION_BASE_NAME);
ee890fe2 16723
6d0a8091
DJ
16724 /* Save and restore LR locally around this call (in R0). */
16725 if (!info->lr_save_p)
6fb5fa3c 16726 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16727
6fb5fa3c 16728 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16729
6fb5fa3c
DB
16730 emit_move_insn (gen_rtx_REG (Pmode,
16731 RS6000_PIC_OFFSET_TABLE_REGNUM),
16732 lr);
6d0a8091
DJ
16733
16734 if (!info->lr_save_p)
6fb5fa3c 16735 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16736 }
fcce224d 16737#endif
9ebbca7d
GK
16738}
16739
9ebbca7d 16740/* Write function prologue. */
a4f6c312 16741
08c148a8 16742static void
f676971a 16743rs6000_output_function_prologue (FILE *file,
a2369ed3 16744 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16745{
16746 rs6000_stack_t *info = rs6000_stack_info ();
16747
4697a36c
MM
16748 if (TARGET_DEBUG_STACK)
16749 debug_stack_info (info);
9878760c 16750
a4f6c312
SS
16751 /* Write .extern for any function we will call to save and restore
16752 fp values. */
16753 if (info->first_fp_reg_save < 64
16754 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16755 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16756 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16757 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16758
c764f757
RK
16759 /* Write .extern for AIX common mode routines, if needed. */
16760 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16761 {
f6709c70
JW
16762 fputs ("\t.extern __mulh\n", file);
16763 fputs ("\t.extern __mull\n", file);
16764 fputs ("\t.extern __divss\n", file);
16765 fputs ("\t.extern __divus\n", file);
16766 fputs ("\t.extern __quoss\n", file);
16767 fputs ("\t.extern __quous\n", file);
c764f757
RK
16768 common_mode_defined = 1;
16769 }
9878760c 16770
9ebbca7d 16771 if (! HAVE_prologue)
979721f8 16772 {
9ebbca7d 16773 start_sequence ();
9dda4cc8 16774
a4f6c312
SS
16775 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16776 the "toplevel" insn chain. */
2e040219 16777 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16778 rs6000_emit_prologue ();
2e040219 16779 emit_note (NOTE_INSN_DELETED);
178c3eff 16780
a3c9585f 16781 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16782 {
16783 rtx insn;
16784 unsigned addr = 0;
16785 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16786 {
16787 INSN_ADDRESSES_NEW (insn, addr);
16788 addr += 4;
16789 }
16790 }
9dda4cc8 16791
9ebbca7d 16792 if (TARGET_DEBUG_STACK)
a4f6c312 16793 debug_rtx_list (get_insns (), 100);
c9d691e9 16794 final (get_insns (), file, FALSE);
9ebbca7d 16795 end_sequence ();
979721f8
MM
16796 }
16797
9ebbca7d
GK
16798 rs6000_pic_labelno++;
16799}
f676971a 16800
1c9c5e43
AM
16801/* Non-zero if vmx regs are restored before the frame pop, zero if
16802 we restore after the pop when possible. */
16803#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16804
f78c3290
NF
16805/* Reload CR from REG. */
16806
16807static void
16808rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16809{
16810 int count = 0;
16811 int i;
16812
16813 if (using_mfcr_multiple)
16814 {
16815 for (i = 0; i < 8; i++)
16816 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16817 count++;
16818 gcc_assert (count);
16819 }
16820
16821 if (using_mfcr_multiple && count > 1)
16822 {
16823 rtvec p;
16824 int ndx;
16825
16826 p = rtvec_alloc (count);
16827
16828 ndx = 0;
16829 for (i = 0; i < 8; i++)
16830 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16831 {
16832 rtvec r = rtvec_alloc (2);
16833 RTVEC_ELT (r, 0) = reg;
16834 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16835 RTVEC_ELT (p, ndx) =
16836 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16837 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16838 ndx++;
16839 }
16840 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16841 gcc_assert (ndx == count);
16842 }
16843 else
16844 for (i = 0; i < 8; i++)
16845 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16846 {
16847 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16848 CR0_REGNO+i),
16849 reg));
16850 }
16851}
16852
ff35822b
JJ
16853/* Return true if OFFSET from stack pointer can be clobbered by signals.
16854 V.4 doesn't have any stack cushion, AIX ABIs have 220 or 288 bytes
16855 below stack pointer not cloberred by signals. */
9878760c 16856
ff35822b
JJ
16857static inline bool
16858offset_below_red_zone_p (HOST_WIDE_INT offset)
16859{
16860 return offset < (DEFAULT_ABI == ABI_V4
16861 ? 0
16862 : TARGET_32BIT ? -220 : -288);
16863}
16864
16865/* Emit function epilogue as insns. */
9878760c 16866
9ebbca7d 16867void
a2369ed3 16868rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16869{
16870 rs6000_stack_t *info;
f78c3290 16871 int restoring_GPRs_inline;
9ebbca7d
GK
16872 int restoring_FPRs_inline;
16873 int using_load_multiple;
d296e02e 16874 int using_mtcr_multiple;
9ebbca7d 16875 int use_backchain_to_restore_sp;
f78c3290
NF
16876 int restore_lr;
16877 int strategy;
9ebbca7d
GK
16878 int sp_offset = 0;
16879 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16880 rtx frame_reg_rtx = sp_reg_rtx;
ff35822b
JJ
16881 rtx cfa_restores = NULL_RTX;
16882 rtx insn;
0e67400a 16883 enum machine_mode reg_mode = Pmode;
327e5343 16884 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16885 int i;
16886
c19de7aa
AH
16887 info = rs6000_stack_info ();
16888
16889 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16890 {
16891 reg_mode = V2SImode;
16892 reg_size = 8;
16893 }
16894
f78c3290
NF
16895 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16896 /*static_chain_p=*/0, sibcall);
16897 using_load_multiple = strategy & SAVRES_MULTIPLE;
16898 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16899 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16900 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16901 || rs6000_cpu == PROCESSOR_PPC603
16902 || rs6000_cpu == PROCESSOR_PPC750
16903 || optimize_size);
1c9c5e43
AM
16904 /* Restore via the backchain when we have a large frame, since this
16905 is more efficient than an addis, addi pair. The second condition
16906 here will not trigger at the moment; We don't actually need a
16907 frame pointer for alloca, but the generic parts of the compiler
16908 give us one anyway. */
16909 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16910 || info->total_size
16911 + (info->lr_save_p ? info->lr_save_offset : 0)
16912 > 32767
1c9c5e43
AM
16913 || (cfun->calls_alloca
16914 && !frame_pointer_needed));
f78c3290
NF
16915 restore_lr = (info->lr_save_p
16916 && restoring_GPRs_inline
16917 && restoring_FPRs_inline);
9ebbca7d 16918
f57fe068 16919 if (WORLD_SAVE_P (info))
d62294f5
FJ
16920 {
16921 int i, j;
16922 char rname[30];
16923 const char *alloc_rname;
16924 rtvec p;
16925
16926 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16927 stack slot (which is not likely to be our caller.)
16928 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16929 rest_world is similar, except any R10 parameter is ignored.
16930 The exception-handling stuff that was here in 2.95 is no
16931 longer necessary. */
d62294f5
FJ
16932
16933 p = rtvec_alloc (9
16934 + 1
f676971a 16935 + 32 - info->first_gp_reg_save
c4ad648e
AM
16936 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16937 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16938
e3b5732b 16939 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16940 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16941 alloc_rname = ggc_strdup (rname);
16942
16943 j = 0;
16944 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16945 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16946 gen_rtx_REG (Pmode,
1de43f85 16947 LR_REGNO));
d62294f5 16948 RTVEC_ELT (p, j++)
c4ad648e 16949 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16950 /* The instruction pattern requires a clobber here;
c4ad648e 16951 it is shared with the restVEC helper. */
d62294f5 16952 RTVEC_ELT (p, j++)
c4ad648e 16953 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16954
16955 {
c4ad648e
AM
16956 /* CR register traditionally saved as CR2. */
16957 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16958 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16959 GEN_INT (info->cr_save_offset));
0be76840 16960 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16961
16962 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16963 }
16964
16965 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16966 {
16967 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16968 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16969 GEN_INT (info->gp_save_offset
16970 + reg_size * i));
0be76840 16971 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16972
16973 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16974 }
d62294f5 16975 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16976 {
16977 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16978 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16979 GEN_INT (info->altivec_save_offset
16980 + 16 * i));
0be76840 16981 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16982
16983 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16984 }
d62294f5 16985 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e 16986 {
696e45ba
ME
16987 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16988 ? DFmode : SFmode),
16989 info->first_fp_reg_save + i);
c4ad648e
AM
16990 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16991 GEN_INT (info->fp_save_offset
16992 + 8 * i));
696e45ba
ME
16993 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16994 ? DFmode : SFmode), addr);
c4ad648e
AM
16995
16996 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16997 }
d62294f5 16998 RTVEC_ELT (p, j++)
c4ad648e 16999 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 17000 RTVEC_ELT (p, j++)
c4ad648e 17001 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 17002 RTVEC_ELT (p, j++)
c4ad648e 17003 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 17004 RTVEC_ELT (p, j++)
c4ad648e 17005 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 17006 RTVEC_ELT (p, j++)
c4ad648e 17007 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
17008 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
17009
17010 return;
17011 }
17012
45b194f8
AM
17013 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
17014 if (info->push_p)
2b2c2fe5 17015 sp_offset = info->total_size;
f676971a 17016
e6477eaa
AM
17017 /* Restore AltiVec registers if we must do so before adjusting the
17018 stack. */
17019 if (TARGET_ALTIVEC_ABI
17020 && info->altivec_size != 0
1c9c5e43
AM
17021 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17022 || (DEFAULT_ABI != ABI_V4
ff35822b 17023 && offset_below_red_zone_p (info->altivec_save_offset))))
9aa86737
AH
17024 {
17025 int i;
17026
e6477eaa
AM
17027 if (use_backchain_to_restore_sp)
17028 {
17029 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17030 emit_move_insn (frame_reg_rtx,
17031 gen_rtx_MEM (Pmode, sp_reg_rtx));
17032 sp_offset = 0;
17033 }
1c9c5e43
AM
17034 else if (frame_pointer_needed)
17035 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 17036
9aa86737
AH
17037 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17038 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17039 {
ff35822b 17040 rtx addr, areg, mem, reg;
9aa86737
AH
17041
17042 areg = gen_rtx_REG (Pmode, 0);
17043 emit_move_insn
17044 (areg, GEN_INT (info->altivec_save_offset
17045 + sp_offset
17046 + 16 * (i - info->first_altivec_reg_save)));
17047
17048 /* AltiVec addressing mode is [reg+reg]. */
17049 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 17050 mem = gen_frame_mem (V4SImode, addr);
9aa86737 17051
ff35822b
JJ
17052 reg = gen_rtx_REG (V4SImode, i);
17053 emit_move_insn (reg, mem);
17054 if (offset_below_red_zone_p (info->altivec_save_offset
17055 + (i - info->first_altivec_reg_save)
17056 * 16))
17057 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17058 cfa_restores);
9aa86737
AH
17059 }
17060 }
17061
e6477eaa
AM
17062 /* Restore VRSAVE if we must do so before adjusting the stack. */
17063 if (TARGET_ALTIVEC
17064 && TARGET_ALTIVEC_VRSAVE
17065 && info->vrsave_mask != 0
1c9c5e43
AM
17066 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17067 || (DEFAULT_ABI != ABI_V4
ff35822b 17068 && offset_below_red_zone_p (info->vrsave_save_offset))))
e6477eaa
AM
17069 {
17070 rtx addr, mem, reg;
17071
1c9c5e43 17072 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 17073 {
1c9c5e43
AM
17074 if (use_backchain_to_restore_sp)
17075 {
17076 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17077 emit_move_insn (frame_reg_rtx,
17078 gen_rtx_MEM (Pmode, sp_reg_rtx));
17079 sp_offset = 0;
17080 }
17081 else if (frame_pointer_needed)
17082 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
17083 }
17084
17085 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17086 GEN_INT (info->vrsave_save_offset + sp_offset));
17087 mem = gen_frame_mem (SImode, addr);
17088 reg = gen_rtx_REG (SImode, 12);
17089 emit_move_insn (reg, mem);
17090
17091 emit_insn (generate_set_vrsave (reg, info, 1));
17092 }
17093
ff35822b 17094 insn = NULL_RTX;
1c9c5e43
AM
17095 /* If we have a large stack frame, restore the old stack pointer
17096 using the backchain. */
2b2c2fe5
EC
17097 if (use_backchain_to_restore_sp)
17098 {
1c9c5e43 17099 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
17100 {
17101 /* Under V.4, don't reset the stack pointer until after we're done
17102 loading the saved registers. */
17103 if (DEFAULT_ABI == ABI_V4)
17104 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17105
ff35822b
JJ
17106 insn = emit_move_insn (frame_reg_rtx,
17107 gen_rtx_MEM (Pmode, sp_reg_rtx));
e6477eaa
AM
17108 sp_offset = 0;
17109 }
1c9c5e43
AM
17110 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17111 && DEFAULT_ABI == ABI_V4)
17112 /* frame_reg_rtx has been set up by the altivec restore. */
17113 ;
17114 else
17115 {
ff35822b 17116 insn = emit_move_insn (sp_reg_rtx, frame_reg_rtx);
1c9c5e43
AM
17117 frame_reg_rtx = sp_reg_rtx;
17118 }
17119 }
17120 /* If we have a frame pointer, we can restore the old stack pointer
17121 from it. */
17122 else if (frame_pointer_needed)
17123 {
17124 frame_reg_rtx = sp_reg_rtx;
17125 if (DEFAULT_ABI == ABI_V4)
17126 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17127
ff35822b
JJ
17128 insn = emit_insn (gen_add3_insn (frame_reg_rtx, hard_frame_pointer_rtx,
17129 GEN_INT (info->total_size)));
1c9c5e43 17130 sp_offset = 0;
2b2c2fe5 17131 }
45b194f8
AM
17132 else if (info->push_p
17133 && DEFAULT_ABI != ABI_V4
e3b5732b 17134 && !crtl->calls_eh_return)
2b2c2fe5 17135 {
ff35822b
JJ
17136 insn = emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx,
17137 GEN_INT (info->total_size)));
45b194f8 17138 sp_offset = 0;
2b2c2fe5 17139 }
ff35822b
JJ
17140 if (insn && frame_reg_rtx == sp_reg_rtx)
17141 {
17142 if (cfa_restores)
17143 {
17144 REG_NOTES (insn) = cfa_restores;
17145 cfa_restores = NULL_RTX;
17146 }
17147 add_reg_note (insn, REG_CFA_DEF_CFA, sp_reg_rtx);
17148 RTX_FRAME_RELATED_P (insn) = 1;
17149 }
2b2c2fe5 17150
e6477eaa 17151 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
17152 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17153 && TARGET_ALTIVEC_ABI
e6477eaa
AM
17154 && info->altivec_size != 0
17155 && (DEFAULT_ABI == ABI_V4
ff35822b 17156 || !offset_below_red_zone_p (info->altivec_save_offset)))
e6477eaa
AM
17157 {
17158 int i;
17159
17160 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17161 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17162 {
ff35822b 17163 rtx addr, areg, mem, reg;
e6477eaa
AM
17164
17165 areg = gen_rtx_REG (Pmode, 0);
17166 emit_move_insn
17167 (areg, GEN_INT (info->altivec_save_offset
17168 + sp_offset
17169 + 16 * (i - info->first_altivec_reg_save)));
17170
17171 /* AltiVec addressing mode is [reg+reg]. */
17172 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17173 mem = gen_frame_mem (V4SImode, addr);
17174
ff35822b
JJ
17175 reg = gen_rtx_REG (V4SImode, i);
17176 emit_move_insn (reg, mem);
17177 if (DEFAULT_ABI == ABI_V4)
17178 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17179 cfa_restores);
e6477eaa
AM
17180 }
17181 }
17182
17183 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
17184 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17185 && TARGET_ALTIVEC
e6477eaa
AM
17186 && TARGET_ALTIVEC_VRSAVE
17187 && info->vrsave_mask != 0
17188 && (DEFAULT_ABI == ABI_V4
ff35822b 17189 || !offset_below_red_zone_p (info->vrsave_save_offset)))
554c2941
AM
17190 {
17191 rtx addr, mem, reg;
17192
17193 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17194 GEN_INT (info->vrsave_save_offset + sp_offset));
17195 mem = gen_frame_mem (SImode, addr);
17196 reg = gen_rtx_REG (SImode, 12);
17197 emit_move_insn (reg, mem);
17198
17199 emit_insn (generate_set_vrsave (reg, info, 1));
17200 }
17201
f78c3290
NF
17202 /* Get the old lr if we saved it. If we are restoring registers
17203 out-of-line, then the out-of-line routines can do this for us. */
17204 if (restore_lr)
b6c9286a 17205 {
a3170dc6
AH
17206 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17207 info->lr_save_offset + sp_offset);
ba4828e0 17208
9ebbca7d 17209 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 17210 }
f676971a 17211
9ebbca7d
GK
17212 /* Get the old cr if we saved it. */
17213 if (info->cr_save_p)
17214 {
17215 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17216 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 17217 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 17218
9ebbca7d
GK
17219 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17220 }
f676971a 17221
ff35822b
JJ
17222 /* Set LR here to try to overlap restores below. LR is always saved
17223 above incoming stack, so it never needs REG_CFA_RESTORE. */
f78c3290 17224 if (restore_lr)
1de43f85 17225 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 17226 gen_rtx_REG (Pmode, 0));
f676971a 17227
83720594 17228 /* Load exception handler data registers, if needed. */
e3b5732b 17229 if (crtl->calls_eh_return)
83720594 17230 {
78e1b90d
DE
17231 unsigned int i, regno;
17232
fc4767bb
JJ
17233 if (TARGET_AIX)
17234 {
17235 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17236 GEN_INT (sp_offset + 5 * reg_size));
0be76840 17237 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
17238
17239 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17240 }
17241
83720594
RH
17242 for (i = 0; ; ++i)
17243 {
a3170dc6 17244 rtx mem;
83720594
RH
17245
17246 regno = EH_RETURN_DATA_REGNO (i);
17247 if (regno == INVALID_REGNUM)
17248 break;
17249
a3170dc6
AH
17250 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17251 info->ehrd_offset + sp_offset
17252 + reg_size * (int) i);
83720594
RH
17253
17254 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17255 }
17256 }
f676971a 17257
9ebbca7d
GK
17258 /* Restore GPRs. This is done as a PARALLEL if we are using
17259 the load-multiple instructions. */
f78c3290
NF
17260 if (TARGET_SPE_ABI
17261 && info->spe_64bit_regs_used != 0
17262 && info->first_gp_reg_save != 32)
52ff33d0 17263 {
52ff33d0
NF
17264 /* Determine whether we can address all of the registers that need
17265 to be saved with an offset from the stack pointer that fits in
17266 the small const field for SPE memory instructions. */
17267 int spe_regs_addressable_via_sp
f78c3290
NF
17268 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17269 + (32 - info->first_gp_reg_save - 1) * reg_size)
17270 && restoring_GPRs_inline);
52ff33d0
NF
17271 int spe_offset;
17272
17273 if (spe_regs_addressable_via_sp)
45b194f8 17274 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17275 else
17276 {
45b194f8 17277 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17278 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17279 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17280 There's no need to worry here because the static chain is passed
17281 anew to every function. */
f78c3290
NF
17282 int ool_adjust = (restoring_GPRs_inline
17283 ? 0
17284 : (info->first_gp_reg_save
17285 - (FIRST_SAVRES_REGISTER+1))*8);
17286
45b194f8
AM
17287 if (frame_reg_rtx == sp_reg_rtx)
17288 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17289 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17290 GEN_INT (info->spe_gp_save_offset
17291 + sp_offset
17292 - ool_adjust)));
45b194f8
AM
17293 /* Keep the invariant that frame_reg_rtx + sp_offset points
17294 at the top of the stack frame. */
17295 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17296
17297 spe_offset = 0;
17298 }
17299
f78c3290
NF
17300 if (restoring_GPRs_inline)
17301 {
17302 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17303 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17304 {
ff35822b 17305 rtx offset, addr, mem, reg;
52ff33d0 17306
f78c3290
NF
17307 /* We're doing all this to ensure that the immediate offset
17308 fits into the immediate field of 'evldd'. */
17309 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17310
f78c3290
NF
17311 offset = GEN_INT (spe_offset + reg_size * i);
17312 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17313 mem = gen_rtx_MEM (V2SImode, addr);
ff35822b 17314 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
52ff33d0 17315
ff35822b
JJ
17316 insn = emit_move_insn (reg, mem);
17317 if (DEFAULT_ABI == ABI_V4)
17318 {
17319 if (frame_pointer_needed
17320 && info->first_gp_reg_save + i
17321 == HARD_FRAME_POINTER_REGNUM)
17322 {
17323 add_reg_note (insn, REG_CFA_DEF_CFA,
17324 plus_constant (frame_reg_rtx,
17325 sp_offset));
17326 RTX_FRAME_RELATED_P (insn) = 1;
17327 }
17328
17329 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17330 cfa_restores);
17331 }
f78c3290
NF
17332 }
17333 }
17334 else
17335 {
17336 rtx par;
17337
17338 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17339 0, reg_mode,
17340 /*savep=*/false, /*gpr=*/true,
17341 /*exitp=*/true);
17342 emit_jump_insn (par);
f78c3290
NF
17343 /* We don't want anybody else emitting things after we jumped
17344 back. */
17345 return;
17346 }
52ff33d0 17347 }
f78c3290
NF
17348 else if (!restoring_GPRs_inline)
17349 {
17350 /* We are jumping to an out-of-line function. */
17351 bool can_use_exit = info->first_fp_reg_save == 64;
17352 rtx par;
17353
17354 /* Emit stack reset code if we need it. */
17355 if (can_use_exit)
17356 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17357 sp_offset, can_use_exit);
17358 else
17359 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17360 sp_reg_rtx,
17361 GEN_INT (sp_offset - info->fp_size)));
17362
17363 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17364 info->gp_save_offset, reg_mode,
17365 /*savep=*/false, /*gpr=*/true,
17366 /*exitp=*/can_use_exit);
17367
17368 if (can_use_exit)
17369 {
17370 if (info->cr_save_p)
ff35822b
JJ
17371 {
17372 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17373 using_mtcr_multiple);
17374 if (DEFAULT_ABI == ABI_V4)
17375 cfa_restores
17376 = alloc_reg_note (REG_CFA_RESTORE,
17377 gen_rtx_REG (SImode, CR2_REGNO),
17378 cfa_restores);
17379 }
f78c3290
NF
17380
17381 emit_jump_insn (par);
17382
17383 /* We don't want anybody else emitting things after we jumped
17384 back. */
17385 return;
17386 }
ff35822b
JJ
17387
17388 insn = emit_insn (par);
17389 if (DEFAULT_ABI == ABI_V4)
17390 {
17391 if (frame_pointer_needed)
17392 {
17393 add_reg_note (insn, REG_CFA_DEF_CFA,
17394 plus_constant (frame_reg_rtx, sp_offset));
17395 RTX_FRAME_RELATED_P (insn) = 1;
17396 }
17397
17398 for (i = info->first_gp_reg_save; i < 32; i++)
17399 cfa_restores
17400 = alloc_reg_note (REG_CFA_RESTORE,
17401 gen_rtx_REG (reg_mode, i), cfa_restores);
17402 }
f78c3290
NF
17403 }
17404 else if (using_load_multiple)
17405 {
17406 rtvec p;
17407 p = rtvec_alloc (32 - info->first_gp_reg_save);
17408 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17409 {
f676971a
EC
17410 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17411 GEN_INT (info->gp_save_offset
17412 + sp_offset
9ebbca7d 17413 + reg_size * i));
0be76840 17414 rtx mem = gen_frame_mem (reg_mode, addr);
ff35822b 17415 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
ba4828e0 17416
ff35822b
JJ
17417 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, reg, mem);
17418 if (DEFAULT_ABI == ABI_V4)
17419 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17420 cfa_restores);
17421 }
17422 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17423 if (DEFAULT_ABI == ABI_V4 && frame_pointer_needed)
17424 {
17425 add_reg_note (insn, REG_CFA_DEF_CFA,
17426 plus_constant (frame_reg_rtx, sp_offset));
17427 RTX_FRAME_RELATED_P (insn) = 1;
9ebbca7d 17428 }
f78c3290
NF
17429 }
17430 else
17431 {
17432 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17433 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17434 {
17435 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17436 GEN_INT (info->gp_save_offset
17437 + sp_offset
17438 + reg_size * i));
17439 rtx mem = gen_frame_mem (reg_mode, addr);
ff35822b 17440 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f78c3290 17441
ff35822b
JJ
17442 insn = emit_move_insn (reg, mem);
17443 if (DEFAULT_ABI == ABI_V4)
17444 {
17445 if (frame_pointer_needed
17446 && info->first_gp_reg_save + i
17447 == HARD_FRAME_POINTER_REGNUM)
17448 {
17449 add_reg_note (insn, REG_CFA_DEF_CFA,
17450 plus_constant (frame_reg_rtx, sp_offset));
17451 RTX_FRAME_RELATED_P (insn) = 1;
17452 }
17453
17454 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17455 cfa_restores);
17456 }
f78c3290
NF
17457 }
17458 }
9878760c 17459
9ebbca7d
GK
17460 /* Restore fpr's if we need to do it without calling a function. */
17461 if (restoring_FPRs_inline)
17462 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17463 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17464 && ! call_used_regs[info->first_fp_reg_save+i]))
17465 {
ff35822b 17466 rtx addr, mem, reg;
9ebbca7d 17467 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17468 GEN_INT (info->fp_save_offset
17469 + sp_offset
a4f6c312 17470 + 8 * i));
696e45ba
ME
17471 mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17472 ? DFmode : SFmode), addr);
ff35822b
JJ
17473 reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17474 ? DFmode : SFmode),
17475 info->first_fp_reg_save + i);
9ebbca7d 17476
ff35822b
JJ
17477 emit_move_insn (reg, mem);
17478 if (DEFAULT_ABI == ABI_V4)
17479 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17480 cfa_restores);
9ebbca7d 17481 }
8d30c4ee 17482
9ebbca7d
GK
17483 /* If we saved cr, restore it here. Just those that were used. */
17484 if (info->cr_save_p)
ff35822b
JJ
17485 {
17486 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
17487 if (DEFAULT_ABI == ABI_V4)
17488 cfa_restores
17489 = alloc_reg_note (REG_CFA_RESTORE, gen_rtx_REG (SImode, CR2_REGNO),
17490 cfa_restores);
17491 }
979721f8 17492
9ebbca7d 17493 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17494 have been done. */
ff35822b
JJ
17495 insn = rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17496 sp_offset, !restoring_FPRs_inline);
17497 if (insn)
17498 {
17499 if (cfa_restores)
17500 {
17501 REG_NOTES (insn) = cfa_restores;
17502 cfa_restores = NULL_RTX;
17503 }
17504 add_reg_note (insn, REG_CFA_DEF_CFA, sp_reg_rtx);
17505 RTX_FRAME_RELATED_P (insn) = 1;
17506 }
b6c9286a 17507
e3b5732b 17508 if (crtl->calls_eh_return)
83720594
RH
17509 {
17510 rtx sa = EH_RETURN_STACKADJ_RTX;
ff35822b 17511 emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx, sa));
83720594
RH
17512 }
17513
9ebbca7d
GK
17514 if (!sibcall)
17515 {
17516 rtvec p;
17517 if (! restoring_FPRs_inline)
f78c3290 17518 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17519 else
17520 p = rtvec_alloc (2);
b6c9286a 17521
e35b9579 17522 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17523 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17524 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17525 : gen_rtx_CLOBBER (VOIDmode,
17526 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17527
17528 /* If we have to restore more than two FP registers, branch to the
17529 restore function. It will return to our caller. */
17530 if (! restoring_FPRs_inline)
17531 {
17532 int i;
f78c3290
NF
17533 rtx sym;
17534
17535 sym = rs6000_savres_routine_sym (info,
17536 /*savep=*/false,
17537 /*gpr=*/false,
17538 /*exitp=*/true);
17539 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17540 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17541 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17542 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17543 {
17544 rtx addr, mem;
17545 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17546 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17547 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17548
f78c3290 17549 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17550 gen_rtx_SET (VOIDmode,
17551 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17552 mem);
b6c9286a
MM
17553 }
17554 }
f676971a 17555
9ebbca7d 17556 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17557 }
9878760c
RK
17558}
17559
17560/* Write function epilogue. */
17561
08c148a8 17562static void
f676971a 17563rs6000_output_function_epilogue (FILE *file,
a2369ed3 17564 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17565{
9ebbca7d 17566 if (! HAVE_epilogue)
9878760c 17567 {
9ebbca7d
GK
17568 rtx insn = get_last_insn ();
17569 /* If the last insn was a BARRIER, we don't have to write anything except
17570 the trace table. */
17571 if (GET_CODE (insn) == NOTE)
17572 insn = prev_nonnote_insn (insn);
17573 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17574 {
9ebbca7d
GK
17575 /* This is slightly ugly, but at least we don't have two
17576 copies of the epilogue-emitting code. */
17577 start_sequence ();
17578
17579 /* A NOTE_INSN_DELETED is supposed to be at the start
17580 and end of the "toplevel" insn chain. */
2e040219 17581 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17582 rs6000_emit_epilogue (FALSE);
2e040219 17583 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17584
a3c9585f 17585 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17586 {
17587 rtx insn;
17588 unsigned addr = 0;
17589 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17590 {
17591 INSN_ADDRESSES_NEW (insn, addr);
17592 addr += 4;
17593 }
17594 }
17595
9ebbca7d 17596 if (TARGET_DEBUG_STACK)
a4f6c312 17597 debug_rtx_list (get_insns (), 100);
c9d691e9 17598 final (get_insns (), file, FALSE);
9ebbca7d 17599 end_sequence ();
4697a36c 17600 }
9878760c 17601 }
b4ac57ab 17602
efdba735
SH
17603#if TARGET_MACHO
17604 macho_branch_islands ();
0e5da0be
GK
17605 /* Mach-O doesn't support labels at the end of objects, so if
17606 it looks like we might want one, insert a NOP. */
17607 {
17608 rtx insn = get_last_insn ();
17609 while (insn
17610 && NOTE_P (insn)
a38e7aa5 17611 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17612 insn = PREV_INSN (insn);
f676971a
EC
17613 if (insn
17614 && (LABEL_P (insn)
0e5da0be 17615 || (NOTE_P (insn)
a38e7aa5 17616 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17617 fputs ("\tnop\n", file);
17618 }
17619#endif
17620
9b30bae2 17621 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17622 on its format.
17623
17624 We don't output a traceback table if -finhibit-size-directive was
17625 used. The documentation for -finhibit-size-directive reads
17626 ``don't output a @code{.size} assembler directive, or anything
17627 else that would cause trouble if the function is split in the
17628 middle, and the two halves are placed at locations far apart in
17629 memory.'' The traceback table has this property, since it
17630 includes the offset from the start of the function to the
4d30c363
MM
17631 traceback table itself.
17632
17633 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17634 different traceback table. */
57ac7be9 17635 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
3c072c6b 17636 && rs6000_traceback != traceback_none && !cfun->is_thunk)
9b30bae2 17637 {
69c75916 17638 const char *fname = NULL;
3ac88239 17639 const char *language_string = lang_hooks.name;
6041bf2f 17640 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17641 int i;
57ac7be9 17642 int optional_tbtab;
8097c268 17643 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17644
17645 if (rs6000_traceback == traceback_full)
17646 optional_tbtab = 1;
17647 else if (rs6000_traceback == traceback_part)
17648 optional_tbtab = 0;
17649 else
17650 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17651
69c75916
AM
17652 if (optional_tbtab)
17653 {
17654 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17655 while (*fname == '.') /* V.4 encodes . in the name */
17656 fname++;
17657
17658 /* Need label immediately before tbtab, so we can compute
17659 its offset from the function start. */
17660 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17661 ASM_OUTPUT_LABEL (file, fname);
17662 }
314fc5a9
ILT
17663
17664 /* The .tbtab pseudo-op can only be used for the first eight
17665 expressions, since it can't handle the possibly variable
17666 length fields that follow. However, if you omit the optional
17667 fields, the assembler outputs zeros for all optional fields
17668 anyways, giving each variable length field is minimum length
17669 (as defined in sys/debug.h). Thus we can not use the .tbtab
17670 pseudo-op at all. */
17671
17672 /* An all-zero word flags the start of the tbtab, for debuggers
17673 that have to find it by searching forward from the entry
17674 point or from the current pc. */
19d2d16f 17675 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17676
17677 /* Tbtab format type. Use format type 0. */
19d2d16f 17678 fputs ("\t.byte 0,", file);
314fc5a9 17679
5fc921c1
DE
17680 /* Language type. Unfortunately, there does not seem to be any
17681 official way to discover the language being compiled, so we
17682 use language_string.
17683 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17684 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17685 a number, so for now use 9. */
5fc921c1 17686 if (! strcmp (language_string, "GNU C"))
314fc5a9 17687 i = 0;
6de9cd9a 17688 else if (! strcmp (language_string, "GNU F77")
7f62878c 17689 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17690 i = 1;
8b83775b 17691 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17692 i = 2;
5fc921c1
DE
17693 else if (! strcmp (language_string, "GNU Ada"))
17694 i = 3;
56438901
AM
17695 else if (! strcmp (language_string, "GNU C++")
17696 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17697 i = 9;
9517ead8
AG
17698 else if (! strcmp (language_string, "GNU Java"))
17699 i = 13;
5fc921c1
DE
17700 else if (! strcmp (language_string, "GNU Objective-C"))
17701 i = 14;
314fc5a9 17702 else
37409796 17703 gcc_unreachable ();
314fc5a9
ILT
17704 fprintf (file, "%d,", i);
17705
17706 /* 8 single bit fields: global linkage (not set for C extern linkage,
17707 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17708 from start of procedure stored in tbtab, internal function, function
17709 has controlled storage, function has no toc, function uses fp,
17710 function logs/aborts fp operations. */
17711 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17712 fprintf (file, "%d,",
17713 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17714
17715 /* 6 bitfields: function is interrupt handler, name present in
17716 proc table, function calls alloca, on condition directives
17717 (controls stack walks, 3 bits), saves condition reg, saves
17718 link reg. */
17719 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17720 set up as a frame pointer, even when there is no alloca call. */
17721 fprintf (file, "%d,",
6041bf2f
DE
17722 ((optional_tbtab << 6)
17723 | ((optional_tbtab & frame_pointer_needed) << 5)
17724 | (info->cr_save_p << 1)
17725 | (info->lr_save_p)));
314fc5a9 17726
6041bf2f 17727 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17728 (6 bits). */
17729 fprintf (file, "%d,",
4697a36c 17730 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17731
17732 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17733 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17734
6041bf2f
DE
17735 if (optional_tbtab)
17736 {
17737 /* Compute the parameter info from the function decl argument
17738 list. */
17739 tree decl;
17740 int next_parm_info_bit = 31;
314fc5a9 17741
6041bf2f
DE
17742 for (decl = DECL_ARGUMENTS (current_function_decl);
17743 decl; decl = TREE_CHAIN (decl))
17744 {
17745 rtx parameter = DECL_INCOMING_RTL (decl);
17746 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17747
6041bf2f
DE
17748 if (GET_CODE (parameter) == REG)
17749 {
ebb109ad 17750 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17751 {
17752 int bits;
17753
17754 float_parms++;
17755
37409796
NS
17756 switch (mode)
17757 {
17758 case SFmode:
e41b2a33 17759 case SDmode:
37409796
NS
17760 bits = 0x2;
17761 break;
17762
17763 case DFmode:
7393f7f8 17764 case DDmode:
37409796 17765 case TFmode:
7393f7f8 17766 case TDmode:
37409796
NS
17767 bits = 0x3;
17768 break;
17769
17770 default:
17771 gcc_unreachable ();
17772 }
6041bf2f
DE
17773
17774 /* If only one bit will fit, don't or in this entry. */
17775 if (next_parm_info_bit > 0)
17776 parm_info |= (bits << (next_parm_info_bit - 1));
17777 next_parm_info_bit -= 2;
17778 }
17779 else
17780 {
17781 fixed_parms += ((GET_MODE_SIZE (mode)
17782 + (UNITS_PER_WORD - 1))
17783 / UNITS_PER_WORD);
17784 next_parm_info_bit -= 1;
17785 }
17786 }
17787 }
17788 }
314fc5a9
ILT
17789
17790 /* Number of fixed point parameters. */
17791 /* This is actually the number of words of fixed point parameters; thus
17792 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17793 fprintf (file, "%d,", fixed_parms);
17794
17795 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17796 all on stack. */
17797 /* This is actually the number of fp registers that hold parameters;
17798 and thus the maximum value is 13. */
17799 /* Set parameters on stack bit if parameters are not in their original
17800 registers, regardless of whether they are on the stack? Xlc
17801 seems to set the bit when not optimizing. */
17802 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17803
6041bf2f
DE
17804 if (! optional_tbtab)
17805 return;
17806
314fc5a9
ILT
17807 /* Optional fields follow. Some are variable length. */
17808
17809 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17810 11 double float. */
17811 /* There is an entry for each parameter in a register, in the order that
17812 they occur in the parameter list. Any intervening arguments on the
17813 stack are ignored. If the list overflows a long (max possible length
17814 34 bits) then completely leave off all elements that don't fit. */
17815 /* Only emit this long if there was at least one parameter. */
17816 if (fixed_parms || float_parms)
17817 fprintf (file, "\t.long %d\n", parm_info);
17818
17819 /* Offset from start of code to tb table. */
19d2d16f 17820 fputs ("\t.long ", file);
314fc5a9 17821 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17822 if (TARGET_AIX)
17823 RS6000_OUTPUT_BASENAME (file, fname);
17824 else
17825 assemble_name (file, fname);
17826 putc ('-', file);
17827 rs6000_output_function_entry (file, fname);
19d2d16f 17828 putc ('\n', file);
314fc5a9
ILT
17829
17830 /* Interrupt handler mask. */
17831 /* Omit this long, since we never set the interrupt handler bit
17832 above. */
17833
17834 /* Number of CTL (controlled storage) anchors. */
17835 /* Omit this long, since the has_ctl bit is never set above. */
17836
17837 /* Displacement into stack of each CTL anchor. */
17838 /* Omit this list of longs, because there are no CTL anchors. */
17839
17840 /* Length of function name. */
69c75916
AM
17841 if (*fname == '*')
17842 ++fname;
296b8152 17843 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17844
17845 /* Function name. */
17846 assemble_string (fname, strlen (fname));
17847
17848 /* Register for alloca automatic storage; this is always reg 31.
17849 Only emit this if the alloca bit was set above. */
17850 if (frame_pointer_needed)
19d2d16f 17851 fputs ("\t.byte 31\n", file);
b1765bde
DE
17852
17853 fputs ("\t.align 2\n", file);
9b30bae2 17854 }
9878760c 17855}
17167fd8 17856\f
a4f6c312
SS
17857/* A C compound statement that outputs the assembler code for a thunk
17858 function, used to implement C++ virtual function calls with
17859 multiple inheritance. The thunk acts as a wrapper around a virtual
17860 function, adjusting the implicit object parameter before handing
17861 control off to the real function.
17862
17863 First, emit code to add the integer DELTA to the location that
17864 contains the incoming first argument. Assume that this argument
17865 contains a pointer, and is the one used to pass the `this' pointer
17866 in C++. This is the incoming argument *before* the function
17867 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17868 values of all other incoming arguments.
17167fd8
MM
17869
17870 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17871 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17872 not touch the return address. Hence returning from FUNCTION will
17873 return to whoever called the current `thunk'.
17167fd8 17874
a4f6c312
SS
17875 The effect must be as if FUNCTION had been called directly with the
17876 adjusted first argument. This macro is responsible for emitting
17877 all of the code for a thunk function; output_function_prologue()
17878 and output_function_epilogue() are not invoked.
17167fd8 17879
a4f6c312
SS
17880 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17881 been extracted from it.) It might possibly be useful on some
17882 targets, but probably not.
17167fd8 17883
a4f6c312
SS
17884 If you do not define this macro, the target-independent code in the
17885 C++ frontend will generate a less efficient heavyweight thunk that
17886 calls FUNCTION instead of jumping to it. The generic approach does
17887 not support varargs. */
17167fd8 17888
3961e8fe 17889static void
f676971a
EC
17890rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17891 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17892 tree function)
17167fd8 17893{
0a2aaacc 17894 rtx this_rtx, insn, funexp;
17167fd8 17895
5b71a4e7 17896 reload_completed = 1;
fe3ad572 17897 epilogue_completed = 1;
56a7189a 17898
5b71a4e7 17899 /* Mark the end of the (empty) prologue. */
2e040219 17900 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17901
5b71a4e7
DE
17902 /* Find the "this" pointer. If the function returns a structure,
17903 the structure return pointer is in r3. */
61f71b34 17904 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 17905 this_rtx = gen_rtx_REG (Pmode, 4);
56a7189a 17906 else
0a2aaacc 17907 this_rtx = gen_rtx_REG (Pmode, 3);
17167fd8 17908
5b71a4e7
DE
17909 /* Apply the constant offset, if required. */
17910 if (delta)
17911 {
17912 rtx delta_rtx = GEN_INT (delta);
17913 emit_insn (TARGET_32BIT
0a2aaacc
KG
17914 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
17915 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17167fd8
MM
17916 }
17917
5b71a4e7
DE
17918 /* Apply the offset from the vtable, if required. */
17919 if (vcall_offset)
17167fd8 17920 {
5b71a4e7
DE
17921 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17922 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17923
0a2aaacc 17924 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
eeff9307
JJ
17925 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17926 {
17927 emit_insn (TARGET_32BIT
17928 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17929 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17930 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17931 }
17932 else
17933 {
17934 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17935
17936 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17937 }
5b71a4e7 17938 emit_insn (TARGET_32BIT
0a2aaacc
KG
17939 ? gen_addsi3 (this_rtx, this_rtx, tmp)
17940 : gen_adddi3 (this_rtx, this_rtx, tmp));
17167fd8
MM
17941 }
17942
5b71a4e7
DE
17943 /* Generate a tail call to the target function. */
17944 if (!TREE_USED (function))
17945 {
17946 assemble_external (function);
17947 TREE_USED (function) = 1;
17948 }
17949 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17950 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17951
17952#if TARGET_MACHO
ab82a49f 17953 if (MACHOPIC_INDIRECT)
5b71a4e7 17954 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17955#endif
5b71a4e7
DE
17956
17957 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17958 generate sibcall RTL explicitly. */
5b71a4e7
DE
17959 insn = emit_call_insn (
17960 gen_rtx_PARALLEL (VOIDmode,
17961 gen_rtvec (4,
17962 gen_rtx_CALL (VOIDmode,
17963 funexp, const0_rtx),
17964 gen_rtx_USE (VOIDmode, const0_rtx),
17965 gen_rtx_USE (VOIDmode,
17966 gen_rtx_REG (SImode,
1de43f85 17967 LR_REGNO)),
5b71a4e7
DE
17968 gen_rtx_RETURN (VOIDmode))));
17969 SIBLING_CALL_P (insn) = 1;
17970 emit_barrier ();
17971
17972 /* Run just enough of rest_of_compilation to get the insns emitted.
17973 There's not really enough bulk here to make other passes such as
17974 instruction scheduling worth while. Note that use_thunk calls
17975 assemble_start_function and assemble_end_function. */
17976 insn = get_insns ();
55e092c4 17977 insn_locators_alloc ();
5b71a4e7
DE
17978 shorten_branches (insn);
17979 final_start_function (insn, file, 1);
c9d691e9 17980 final (insn, file, 1);
5b71a4e7 17981 final_end_function ();
d7087dd2 17982 free_after_compilation (cfun);
5b71a4e7
DE
17983
17984 reload_completed = 0;
fe3ad572 17985 epilogue_completed = 0;
9ebbca7d 17986}
9ebbca7d
GK
17987\f
17988/* A quick summary of the various types of 'constant-pool tables'
17989 under PowerPC:
17990
f676971a 17991 Target Flags Name One table per
9ebbca7d
GK
17992 AIX (none) AIX TOC object file
17993 AIX -mfull-toc AIX TOC object file
17994 AIX -mminimal-toc AIX minimal TOC translation unit
17995 SVR4/EABI (none) SVR4 SDATA object file
17996 SVR4/EABI -fpic SVR4 pic object file
17997 SVR4/EABI -fPIC SVR4 PIC translation unit
17998 SVR4/EABI -mrelocatable EABI TOC function
17999 SVR4/EABI -maix AIX TOC object file
f676971a 18000 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
18001 AIX minimal TOC translation unit
18002
18003 Name Reg. Set by entries contains:
18004 made by addrs? fp? sum?
18005
18006 AIX TOC 2 crt0 as Y option option
18007 AIX minimal TOC 30 prolog gcc Y Y option
18008 SVR4 SDATA 13 crt0 gcc N Y N
18009 SVR4 pic 30 prolog ld Y not yet N
18010 SVR4 PIC 30 prolog gcc Y option option
18011 EABI TOC 30 prolog gcc Y option option
18012
18013*/
18014
9ebbca7d
GK
18015/* Hash functions for the hash table. */
18016
18017static unsigned
a2369ed3 18018rs6000_hash_constant (rtx k)
9ebbca7d 18019{
46b33600
RH
18020 enum rtx_code code = GET_CODE (k);
18021 enum machine_mode mode = GET_MODE (k);
18022 unsigned result = (code << 3) ^ mode;
18023 const char *format;
18024 int flen, fidx;
f676971a 18025
46b33600
RH
18026 format = GET_RTX_FORMAT (code);
18027 flen = strlen (format);
18028 fidx = 0;
9ebbca7d 18029
46b33600
RH
18030 switch (code)
18031 {
18032 case LABEL_REF:
18033 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
18034
18035 case CONST_DOUBLE:
18036 if (mode != VOIDmode)
18037 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
18038 flen = 2;
18039 break;
18040
18041 case CODE_LABEL:
18042 fidx = 3;
18043 break;
18044
18045 default:
18046 break;
18047 }
9ebbca7d
GK
18048
18049 for (; fidx < flen; fidx++)
18050 switch (format[fidx])
18051 {
18052 case 's':
18053 {
18054 unsigned i, len;
18055 const char *str = XSTR (k, fidx);
18056 len = strlen (str);
18057 result = result * 613 + len;
18058 for (i = 0; i < len; i++)
18059 result = result * 613 + (unsigned) str[i];
17167fd8
MM
18060 break;
18061 }
9ebbca7d
GK
18062 case 'u':
18063 case 'e':
18064 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
18065 break;
18066 case 'i':
18067 case 'n':
18068 result = result * 613 + (unsigned) XINT (k, fidx);
18069 break;
18070 case 'w':
18071 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
18072 result = result * 613 + (unsigned) XWINT (k, fidx);
18073 else
18074 {
18075 size_t i;
9390387d 18076 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
18077 result = result * 613 + (unsigned) (XWINT (k, fidx)
18078 >> CHAR_BIT * i);
18079 }
18080 break;
09501938
DE
18081 case '0':
18082 break;
9ebbca7d 18083 default:
37409796 18084 gcc_unreachable ();
9ebbca7d 18085 }
46b33600 18086
9ebbca7d
GK
18087 return result;
18088}
18089
18090static unsigned
a2369ed3 18091toc_hash_function (const void *hash_entry)
9ebbca7d 18092{
f676971a 18093 const struct toc_hash_struct *thc =
a9098fd0
GK
18094 (const struct toc_hash_struct *) hash_entry;
18095 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
18096}
18097
18098/* Compare H1 and H2 for equivalence. */
18099
18100static int
a2369ed3 18101toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
18102{
18103 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
18104 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
18105
a9098fd0
GK
18106 if (((const struct toc_hash_struct *) h1)->key_mode
18107 != ((const struct toc_hash_struct *) h2)->key_mode)
18108 return 0;
18109
5692c7bc 18110 return rtx_equal_p (r1, r2);
9ebbca7d
GK
18111}
18112
28e510bd
MM
18113/* These are the names given by the C++ front-end to vtables, and
18114 vtable-like objects. Ideally, this logic should not be here;
18115 instead, there should be some programmatic way of inquiring as
18116 to whether or not an object is a vtable. */
18117
18118#define VTABLE_NAME_P(NAME) \
9390387d 18119 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
18120 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
18121 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 18122 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 18123 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd 18124
ee06c6a5
DE
18125#ifdef NO_DOLLAR_IN_LABEL
18126/* Return a GGC-allocated character string translating dollar signs in
18127 input NAME to underscores. Used by XCOFF ASM_OUTPUT_LABELREF. */
18128
18129const char *
18130rs6000_xcoff_strip_dollar (const char *name)
18131{
18132 char *strip, *p;
18133 int len;
18134
18135 p = strchr (name, '$');
18136
18137 if (p == 0 || p == name)
18138 return name;
18139
18140 len = strlen (name);
18141 strip = (char *) alloca (len + 1);
18142 strcpy (strip, name);
18143 p = strchr (strip, '$');
18144 while (p)
18145 {
18146 *p = '_';
18147 p = strchr (p + 1, '$');
18148 }
18149
18150 return ggc_alloc_string (strip, len);
18151}
18152#endif
18153
28e510bd 18154void
a2369ed3 18155rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
18156{
18157 /* Currently C++ toc references to vtables can be emitted before it
18158 is decided whether the vtable is public or private. If this is
18159 the case, then the linker will eventually complain that there is
f676971a 18160 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
18161 we emit the TOC reference to reference the symbol and not the
18162 section. */
18163 const char *name = XSTR (x, 0);
54ee9799 18164
f676971a 18165 if (VTABLE_NAME_P (name))
54ee9799
DE
18166 {
18167 RS6000_OUTPUT_BASENAME (file, name);
18168 }
18169 else
18170 assemble_name (file, name);
28e510bd
MM
18171}
18172
a4f6c312
SS
18173/* Output a TOC entry. We derive the entry name from what is being
18174 written. */
9878760c
RK
18175
18176void
a2369ed3 18177output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
18178{
18179 char buf[256];
3cce094d 18180 const char *name = buf;
9878760c 18181 rtx base = x;
16fdeb48 18182 HOST_WIDE_INT offset = 0;
9878760c 18183
37409796 18184 gcc_assert (!TARGET_NO_TOC);
4697a36c 18185
9ebbca7d
GK
18186 /* When the linker won't eliminate them, don't output duplicate
18187 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
18188 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
18189 CODE_LABELs. */
18190 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
18191 {
18192 struct toc_hash_struct *h;
18193 void * * found;
f676971a 18194
17211ab5 18195 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 18196 time because GGC is not initialized at that point. */
17211ab5 18197 if (toc_hash_table == NULL)
f676971a 18198 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
18199 toc_hash_eq, NULL);
18200
5ead67f6 18201 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 18202 h->key = x;
a9098fd0 18203 h->key_mode = mode;
9ebbca7d 18204 h->labelno = labelno;
f676971a 18205
bbbbb16a 18206 found = htab_find_slot (toc_hash_table, h, INSERT);
9ebbca7d
GK
18207 if (*found == NULL)
18208 *found = h;
f676971a 18209 else /* This is indeed a duplicate.
9ebbca7d
GK
18210 Set this label equal to that label. */
18211 {
18212 fputs ("\t.set ", file);
18213 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18214 fprintf (file, "%d,", labelno);
18215 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 18216 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
18217 found)->labelno));
18218 return;
18219 }
18220 }
18221
18222 /* If we're going to put a double constant in the TOC, make sure it's
18223 aligned properly when strict alignment is on. */
ff1720ed
RK
18224 if (GET_CODE (x) == CONST_DOUBLE
18225 && STRICT_ALIGNMENT
a9098fd0 18226 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
18227 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
18228 ASM_OUTPUT_ALIGN (file, 3);
18229 }
18230
4977bab6 18231 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 18232
37c37a57
RK
18233 /* Handle FP constants specially. Note that if we have a minimal
18234 TOC, things we put here aren't actually in the TOC, so we can allow
18235 FP constants. */
00b79d54
BE
18236 if (GET_CODE (x) == CONST_DOUBLE &&
18237 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
18238 {
18239 REAL_VALUE_TYPE rv;
18240 long k[4];
18241
18242 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18243 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18244 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
18245 else
18246 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
18247
18248 if (TARGET_64BIT)
18249 {
18250 if (TARGET_MINIMAL_TOC)
18251 fputs (DOUBLE_INT_ASM_OP, file);
18252 else
18253 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18254 k[0] & 0xffffffff, k[1] & 0xffffffff,
18255 k[2] & 0xffffffff, k[3] & 0xffffffff);
18256 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
18257 k[0] & 0xffffffff, k[1] & 0xffffffff,
18258 k[2] & 0xffffffff, k[3] & 0xffffffff);
18259 return;
18260 }
18261 else
18262 {
18263 if (TARGET_MINIMAL_TOC)
18264 fputs ("\t.long ", file);
18265 else
18266 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18267 k[0] & 0xffffffff, k[1] & 0xffffffff,
18268 k[2] & 0xffffffff, k[3] & 0xffffffff);
18269 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18270 k[0] & 0xffffffff, k[1] & 0xffffffff,
18271 k[2] & 0xffffffff, k[3] & 0xffffffff);
18272 return;
18273 }
18274 }
00b79d54
BE
18275 else if (GET_CODE (x) == CONST_DOUBLE &&
18276 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 18277 {
042259f2
DE
18278 REAL_VALUE_TYPE rv;
18279 long k[2];
0adc764e 18280
042259f2 18281 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18282
18283 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18284 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18285 else
18286 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 18287
13ded975
DE
18288 if (TARGET_64BIT)
18289 {
18290 if (TARGET_MINIMAL_TOC)
2bfcf297 18291 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18292 else
2f0552b6
AM
18293 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18294 k[0] & 0xffffffff, k[1] & 0xffffffff);
18295 fprintf (file, "0x%lx%08lx\n",
18296 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18297 return;
18298 }
1875cc88 18299 else
13ded975
DE
18300 {
18301 if (TARGET_MINIMAL_TOC)
2bfcf297 18302 fputs ("\t.long ", file);
13ded975 18303 else
2f0552b6
AM
18304 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18305 k[0] & 0xffffffff, k[1] & 0xffffffff);
18306 fprintf (file, "0x%lx,0x%lx\n",
18307 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18308 return;
18309 }
9878760c 18310 }
00b79d54
BE
18311 else if (GET_CODE (x) == CONST_DOUBLE &&
18312 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 18313 {
042259f2
DE
18314 REAL_VALUE_TYPE rv;
18315 long l;
9878760c 18316
042259f2 18317 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18318 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18319 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18320 else
18321 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 18322
31bfaa0b
DE
18323 if (TARGET_64BIT)
18324 {
18325 if (TARGET_MINIMAL_TOC)
2bfcf297 18326 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 18327 else
2f0552b6
AM
18328 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18329 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
18330 return;
18331 }
042259f2 18332 else
31bfaa0b
DE
18333 {
18334 if (TARGET_MINIMAL_TOC)
2bfcf297 18335 fputs ("\t.long ", file);
31bfaa0b 18336 else
2f0552b6
AM
18337 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18338 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
18339 return;
18340 }
042259f2 18341 }
f176e826 18342 else if (GET_MODE (x) == VOIDmode
a9098fd0 18343 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 18344 {
e2c953b6 18345 unsigned HOST_WIDE_INT low;
042259f2
DE
18346 HOST_WIDE_INT high;
18347
18348 if (GET_CODE (x) == CONST_DOUBLE)
18349 {
18350 low = CONST_DOUBLE_LOW (x);
18351 high = CONST_DOUBLE_HIGH (x);
18352 }
18353 else
18354#if HOST_BITS_PER_WIDE_INT == 32
18355 {
18356 low = INTVAL (x);
0858c623 18357 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18358 }
18359#else
18360 {
c4ad648e
AM
18361 low = INTVAL (x) & 0xffffffff;
18362 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18363 }
18364#endif
9878760c 18365
a9098fd0
GK
18366 /* TOC entries are always Pmode-sized, but since this
18367 is a bigendian machine then if we're putting smaller
18368 integer constants in the TOC we have to pad them.
18369 (This is still a win over putting the constants in
18370 a separate constant pool, because then we'd have
02a4ec28
FS
18371 to have both a TOC entry _and_ the actual constant.)
18372
18373 For a 32-bit target, CONST_INT values are loaded and shifted
18374 entirely within `low' and can be stored in one TOC entry. */
18375
37409796
NS
18376 /* It would be easy to make this work, but it doesn't now. */
18377 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18378
18379 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18380 {
18381#if HOST_BITS_PER_WIDE_INT == 32
18382 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18383 POINTER_SIZE, &low, &high, 0);
18384#else
18385 low |= high << 32;
18386 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18387 high = (HOST_WIDE_INT) low >> 32;
18388 low &= 0xffffffff;
18389#endif
18390 }
a9098fd0 18391
13ded975
DE
18392 if (TARGET_64BIT)
18393 {
18394 if (TARGET_MINIMAL_TOC)
2bfcf297 18395 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18396 else
2f0552b6
AM
18397 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18398 (long) high & 0xffffffff, (long) low & 0xffffffff);
18399 fprintf (file, "0x%lx%08lx\n",
18400 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18401 return;
18402 }
1875cc88 18403 else
13ded975 18404 {
02a4ec28
FS
18405 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18406 {
18407 if (TARGET_MINIMAL_TOC)
2bfcf297 18408 fputs ("\t.long ", file);
02a4ec28 18409 else
2bfcf297 18410 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18411 (long) high & 0xffffffff, (long) low & 0xffffffff);
18412 fprintf (file, "0x%lx,0x%lx\n",
18413 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18414 }
13ded975 18415 else
02a4ec28
FS
18416 {
18417 if (TARGET_MINIMAL_TOC)
2bfcf297 18418 fputs ("\t.long ", file);
02a4ec28 18419 else
2f0552b6
AM
18420 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18421 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18422 }
13ded975
DE
18423 return;
18424 }
9878760c
RK
18425 }
18426
18427 if (GET_CODE (x) == CONST)
18428 {
37409796 18429 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18430
9878760c
RK
18431 base = XEXP (XEXP (x, 0), 0);
18432 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18433 }
f676971a 18434
37409796
NS
18435 switch (GET_CODE (base))
18436 {
18437 case SYMBOL_REF:
18438 name = XSTR (base, 0);
18439 break;
18440
18441 case LABEL_REF:
18442 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18443 CODE_LABEL_NUMBER (XEXP (base, 0)));
18444 break;
18445
18446 case CODE_LABEL:
18447 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18448 break;
18449
18450 default:
18451 gcc_unreachable ();
18452 }
9878760c 18453
1875cc88 18454 if (TARGET_MINIMAL_TOC)
2bfcf297 18455 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18456 else
18457 {
5773a50f
DE
18458 fputs ("\t.tc ", file);
18459 RS6000_OUTPUT_BASENAME (file, name);
9878760c 18460
1875cc88 18461 if (offset < 0)
16fdeb48 18462 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18463 else if (offset)
16fdeb48 18464 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18465
19d2d16f 18466 fputs ("[TC],", file);
1875cc88 18467 }
581bc4de
MM
18468
18469 /* Currently C++ toc references to vtables can be emitted before it
18470 is decided whether the vtable is public or private. If this is
18471 the case, then the linker will eventually complain that there is
18472 a TOC reference to an unknown section. Thus, for vtables only,
18473 we emit the TOC reference to reference the symbol and not the
18474 section. */
28e510bd 18475 if (VTABLE_NAME_P (name))
581bc4de 18476 {
54ee9799 18477 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18478 if (offset < 0)
16fdeb48 18479 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18480 else if (offset > 0)
16fdeb48 18481 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18482 }
18483 else
18484 output_addr_const (file, x);
19d2d16f 18485 putc ('\n', file);
9878760c
RK
18486}
18487\f
18488/* Output an assembler pseudo-op to write an ASCII string of N characters
18489 starting at P to FILE.
18490
18491 On the RS/6000, we have to do this using the .byte operation and
18492 write out special characters outside the quoted string.
18493 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18494 so we must artificially break them up early. */
9878760c
RK
18495
18496void
a2369ed3 18497output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18498{
18499 char c;
18500 int i, count_string;
d330fd93
KG
18501 const char *for_string = "\t.byte \"";
18502 const char *for_decimal = "\t.byte ";
18503 const char *to_close = NULL;
9878760c
RK
18504
18505 count_string = 0;
18506 for (i = 0; i < n; i++)
18507 {
18508 c = *p++;
18509 if (c >= ' ' && c < 0177)
18510 {
18511 if (for_string)
18512 fputs (for_string, file);
18513 putc (c, file);
18514
18515 /* Write two quotes to get one. */
18516 if (c == '"')
18517 {
18518 putc (c, file);
18519 ++count_string;
18520 }
18521
18522 for_string = NULL;
18523 for_decimal = "\"\n\t.byte ";
18524 to_close = "\"\n";
18525 ++count_string;
18526
18527 if (count_string >= 512)
18528 {
18529 fputs (to_close, file);
18530
18531 for_string = "\t.byte \"";
18532 for_decimal = "\t.byte ";
18533 to_close = NULL;
18534 count_string = 0;
18535 }
18536 }
18537 else
18538 {
18539 if (for_decimal)
18540 fputs (for_decimal, file);
18541 fprintf (file, "%d", c);
18542
18543 for_string = "\n\t.byte \"";
18544 for_decimal = ", ";
18545 to_close = "\n";
18546 count_string = 0;
18547 }
18548 }
18549
18550 /* Now close the string if we have written one. Then end the line. */
18551 if (to_close)
9ebbca7d 18552 fputs (to_close, file);
9878760c
RK
18553}
18554\f
18555/* Generate a unique section name for FILENAME for a section type
18556 represented by SECTION_DESC. Output goes into BUF.
18557
18558 SECTION_DESC can be any string, as long as it is different for each
18559 possible section type.
18560
18561 We name the section in the same manner as xlc. The name begins with an
18562 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18563 names) with the last period replaced by the string SECTION_DESC. If
18564 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18565 the name. */
9878760c
RK
18566
18567void
f676971a 18568rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18569 const char *section_desc)
9878760c 18570{
9ebbca7d 18571 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18572 char *p;
18573 int len;
9878760c
RK
18574
18575 after_last_slash = filename;
18576 for (q = filename; *q; q++)
11e5fe42
RK
18577 {
18578 if (*q == '/')
18579 after_last_slash = q + 1;
18580 else if (*q == '.')
18581 last_period = q;
18582 }
9878760c 18583
11e5fe42 18584 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18585 *buf = (char *) xmalloc (len);
9878760c
RK
18586
18587 p = *buf;
18588 *p++ = '_';
18589
18590 for (q = after_last_slash; *q; q++)
18591 {
11e5fe42 18592 if (q == last_period)
c4ad648e 18593 {
9878760c
RK
18594 strcpy (p, section_desc);
18595 p += strlen (section_desc);
e3981aab 18596 break;
c4ad648e 18597 }
9878760c 18598
e9a780ec 18599 else if (ISALNUM (*q))
c4ad648e 18600 *p++ = *q;
9878760c
RK
18601 }
18602
11e5fe42 18603 if (last_period == 0)
9878760c
RK
18604 strcpy (p, section_desc);
18605 else
18606 *p = '\0';
18607}
e165f3f0 18608\f
a4f6c312 18609/* Emit profile function. */
411707f4 18610
411707f4 18611void
a2369ed3 18612output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18613{
858081ad
AH
18614 /* Non-standard profiling for kernels, which just saves LR then calls
18615 _mcount without worrying about arg saves. The idea is to change
18616 the function prologue as little as possible as it isn't easy to
18617 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18618 if (TARGET_PROFILE_KERNEL)
18619 return;
18620
8480e480
CC
18621 if (DEFAULT_ABI == ABI_AIX)
18622 {
9739c90c
JJ
18623#ifndef NO_PROFILE_COUNTERS
18624# define NO_PROFILE_COUNTERS 0
18625#endif
f676971a 18626 if (NO_PROFILE_COUNTERS)
bbbbb16a
ILT
18627 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
18628 LCT_NORMAL, VOIDmode, 0);
9739c90c
JJ
18629 else
18630 {
18631 char buf[30];
18632 const char *label_name;
18633 rtx fun;
411707f4 18634
9739c90c
JJ
18635 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18636 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18637 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18638
bbbbb16a
ILT
18639 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
18640 LCT_NORMAL, VOIDmode, 1, fun, Pmode);
9739c90c 18641 }
8480e480 18642 }
ee890fe2
SS
18643 else if (DEFAULT_ABI == ABI_DARWIN)
18644 {
d5fa86ba 18645 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18646 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18647
18648 /* Be conservative and always set this, at least for now. */
e3b5732b 18649 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18650
18651#if TARGET_MACHO
18652 /* For PIC code, set up a stub and collect the caller's address
18653 from r0, which is where the prologue puts it. */
11abc112 18654 if (MACHOPIC_INDIRECT
e3b5732b 18655 && crtl->uses_pic_offset_table)
11abc112 18656 caller_addr_regno = 0;
ee890fe2
SS
18657#endif
18658 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
bbbbb16a 18659 LCT_NORMAL, VOIDmode, 1,
ee890fe2
SS
18660 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18661 }
411707f4
CC
18662}
18663
a4f6c312 18664/* Write function profiler code. */
e165f3f0
RK
18665
18666void
a2369ed3 18667output_function_profiler (FILE *file, int labelno)
e165f3f0 18668{
3daf36a4 18669 char buf[100];
e165f3f0 18670
38c1f2d7 18671 switch (DEFAULT_ABI)
3daf36a4 18672 {
38c1f2d7 18673 default:
37409796 18674 gcc_unreachable ();
38c1f2d7
MM
18675
18676 case ABI_V4:
09eeeacb
AM
18677 if (!TARGET_32BIT)
18678 {
d4ee4d25 18679 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18680 return;
18681 }
ffcfcb5f 18682 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18683 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18684 if (NO_PROFILE_COUNTERS)
18685 {
18686 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18687 reg_names[0], reg_names[1]);
18688 }
18689 else if (TARGET_SECURE_PLT && flag_pic)
18690 {
18691 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18692 reg_names[0], reg_names[1]);
18693 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18694 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18695 reg_names[12], reg_names[12]);
18696 assemble_name (file, buf);
18697 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18698 assemble_name (file, buf);
18699 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18700 }
18701 else if (flag_pic == 1)
38c1f2d7 18702 {
dfdfa60f 18703 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18704 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18705 reg_names[0], reg_names[1]);
17167fd8 18706 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18707 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18708 assemble_name (file, buf);
17167fd8 18709 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18710 }
9ebbca7d 18711 else if (flag_pic > 1)
38c1f2d7 18712 {
71625f3d
AM
18713 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18714 reg_names[0], reg_names[1]);
9ebbca7d 18715 /* Now, we need to get the address of the label. */
71625f3d 18716 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18717 assemble_name (file, buf);
9ebbca7d
GK
18718 fputs ("-.\n1:", file);
18719 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18720 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18721 reg_names[0], reg_names[11]);
18722 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18723 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18724 }
38c1f2d7
MM
18725 else
18726 {
17167fd8 18727 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18728 assemble_name (file, buf);
dfdfa60f 18729 fputs ("@ha\n", file);
71625f3d
AM
18730 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18731 reg_names[0], reg_names[1]);
a260abc9 18732 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18733 assemble_name (file, buf);
17167fd8 18734 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18735 }
18736
50d440bc 18737 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18738 fprintf (file, "\tbl %s%s\n",
18739 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18740 break;
18741
18742 case ABI_AIX:
ee890fe2 18743 case ABI_DARWIN:
ffcfcb5f
AM
18744 if (!TARGET_PROFILE_KERNEL)
18745 {
a3c9585f 18746 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18747 }
18748 else
18749 {
37409796 18750 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18751
18752 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18753 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18754
6de9cd9a 18755 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18756 {
18757 asm_fprintf (file, "\tstd %s,24(%s)\n",
18758 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18759 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18760 asm_fprintf (file, "\tld %s,24(%s)\n",
18761 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18762 }
18763 else
18764 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18765 }
38c1f2d7
MM
18766 break;
18767 }
e165f3f0 18768}
a251ffd0 18769
b54cf83a 18770\f
44cd321e
PS
18771
18772/* The following variable value is the last issued insn. */
18773
18774static rtx last_scheduled_insn;
18775
18776/* The following variable helps to balance issuing of load and
18777 store instructions */
18778
18779static int load_store_pendulum;
18780
b54cf83a
DE
18781/* Power4 load update and store update instructions are cracked into a
18782 load or store and an integer insn which are executed in the same cycle.
18783 Branches have their own dispatch slot which does not count against the
18784 GCC issue rate, but it changes the program flow so there are no other
18785 instructions to issue in this cycle. */
18786
18787static int
f676971a
EC
18788rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18789 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18790 rtx insn, int more)
b54cf83a 18791{
44cd321e 18792 last_scheduled_insn = insn;
b54cf83a
DE
18793 if (GET_CODE (PATTERN (insn)) == USE
18794 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18795 {
18796 cached_can_issue_more = more;
18797 return cached_can_issue_more;
18798 }
18799
18800 if (insn_terminates_group_p (insn, current_group))
18801 {
18802 cached_can_issue_more = 0;
18803 return cached_can_issue_more;
18804 }
b54cf83a 18805
d296e02e
AP
18806 /* If no reservation, but reach here */
18807 if (recog_memoized (insn) < 0)
18808 return more;
18809
ec507f2d 18810 if (rs6000_sched_groups)
b54cf83a 18811 {
cbe26ab8 18812 if (is_microcoded_insn (insn))
44cd321e 18813 cached_can_issue_more = 0;
cbe26ab8 18814 else if (is_cracked_insn (insn))
44cd321e
PS
18815 cached_can_issue_more = more > 2 ? more - 2 : 0;
18816 else
18817 cached_can_issue_more = more - 1;
18818
18819 return cached_can_issue_more;
b54cf83a 18820 }
165b263e 18821
d296e02e
AP
18822 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18823 return 0;
18824
44cd321e
PS
18825 cached_can_issue_more = more - 1;
18826 return cached_can_issue_more;
b54cf83a
DE
18827}
18828
a251ffd0
TG
18829/* Adjust the cost of a scheduling dependency. Return the new cost of
18830 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18831
c237e94a 18832static int
0a4f0294 18833rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18834{
44cd321e 18835 enum attr_type attr_type;
a251ffd0 18836
44cd321e 18837 if (! recog_memoized (insn))
a251ffd0
TG
18838 return 0;
18839
44cd321e 18840 switch (REG_NOTE_KIND (link))
a251ffd0 18841 {
44cd321e
PS
18842 case REG_DEP_TRUE:
18843 {
18844 /* Data dependency; DEP_INSN writes a register that INSN reads
18845 some cycles later. */
18846
18847 /* Separate a load from a narrower, dependent store. */
18848 if (rs6000_sched_groups
18849 && GET_CODE (PATTERN (insn)) == SET
18850 && GET_CODE (PATTERN (dep_insn)) == SET
18851 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18852 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18853 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18854 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18855 return cost + 14;
18856
18857 attr_type = get_attr_type (insn);
18858
18859 switch (attr_type)
18860 {
18861 case TYPE_JMPREG:
18862 /* Tell the first scheduling pass about the latency between
18863 a mtctr and bctr (and mtlr and br/blr). The first
18864 scheduling pass will not know about this latency since
18865 the mtctr instruction, which has the latency associated
18866 to it, will be generated by reload. */
18867 return TARGET_POWER ? 5 : 4;
18868 case TYPE_BRANCH:
18869 /* Leave some extra cycles between a compare and its
18870 dependent branch, to inhibit expensive mispredicts. */
18871 if ((rs6000_cpu_attr == CPU_PPC603
18872 || rs6000_cpu_attr == CPU_PPC604
18873 || rs6000_cpu_attr == CPU_PPC604E
18874 || rs6000_cpu_attr == CPU_PPC620
18875 || rs6000_cpu_attr == CPU_PPC630
18876 || rs6000_cpu_attr == CPU_PPC750
18877 || rs6000_cpu_attr == CPU_PPC7400
18878 || rs6000_cpu_attr == CPU_PPC7450
18879 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18880 || rs6000_cpu_attr == CPU_POWER5
18881 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18882 && recog_memoized (dep_insn)
18883 && (INSN_CODE (dep_insn) >= 0))
982afe02 18884
44cd321e
PS
18885 switch (get_attr_type (dep_insn))
18886 {
18887 case TYPE_CMP:
18888 case TYPE_COMPARE:
18889 case TYPE_DELAYED_COMPARE:
18890 case TYPE_IMUL_COMPARE:
18891 case TYPE_LMUL_COMPARE:
18892 case TYPE_FPCOMPARE:
18893 case TYPE_CR_LOGICAL:
18894 case TYPE_DELAYED_CR:
18895 return cost + 2;
18896 default:
18897 break;
18898 }
18899 break;
18900
18901 case TYPE_STORE:
18902 case TYPE_STORE_U:
18903 case TYPE_STORE_UX:
18904 case TYPE_FPSTORE:
18905 case TYPE_FPSTORE_U:
18906 case TYPE_FPSTORE_UX:
18907 if ((rs6000_cpu == PROCESSOR_POWER6)
18908 && recog_memoized (dep_insn)
18909 && (INSN_CODE (dep_insn) >= 0))
18910 {
18911
18912 if (GET_CODE (PATTERN (insn)) != SET)
18913 /* If this happens, we have to extend this to schedule
18914 optimally. Return default for now. */
18915 return cost;
18916
18917 /* Adjust the cost for the case where the value written
18918 by a fixed point operation is used as the address
18919 gen value on a store. */
18920 switch (get_attr_type (dep_insn))
18921 {
18922 case TYPE_LOAD:
18923 case TYPE_LOAD_U:
18924 case TYPE_LOAD_UX:
18925 case TYPE_CNTLZ:
18926 {
18927 if (! store_data_bypass_p (dep_insn, insn))
18928 return 4;
18929 break;
18930 }
18931 case TYPE_LOAD_EXT:
18932 case TYPE_LOAD_EXT_U:
18933 case TYPE_LOAD_EXT_UX:
18934 case TYPE_VAR_SHIFT_ROTATE:
18935 case TYPE_VAR_DELAYED_COMPARE:
18936 {
18937 if (! store_data_bypass_p (dep_insn, insn))
18938 return 6;
18939 break;
18940 }
18941 case TYPE_INTEGER:
18942 case TYPE_COMPARE:
18943 case TYPE_FAST_COMPARE:
18944 case TYPE_EXTS:
18945 case TYPE_SHIFT:
18946 case TYPE_INSERT_WORD:
18947 case TYPE_INSERT_DWORD:
18948 case TYPE_FPLOAD_U:
18949 case TYPE_FPLOAD_UX:
18950 case TYPE_STORE_U:
18951 case TYPE_STORE_UX:
18952 case TYPE_FPSTORE_U:
18953 case TYPE_FPSTORE_UX:
18954 {
18955 if (! store_data_bypass_p (dep_insn, insn))
18956 return 3;
18957 break;
18958 }
18959 case TYPE_IMUL:
18960 case TYPE_IMUL2:
18961 case TYPE_IMUL3:
18962 case TYPE_LMUL:
18963 case TYPE_IMUL_COMPARE:
18964 case TYPE_LMUL_COMPARE:
18965 {
18966 if (! store_data_bypass_p (dep_insn, insn))
18967 return 17;
18968 break;
18969 }
18970 case TYPE_IDIV:
18971 {
18972 if (! store_data_bypass_p (dep_insn, insn))
18973 return 45;
18974 break;
18975 }
18976 case TYPE_LDIV:
18977 {
18978 if (! store_data_bypass_p (dep_insn, insn))
18979 return 57;
18980 break;
18981 }
18982 default:
18983 break;
18984 }
18985 }
18986 break;
18987
18988 case TYPE_LOAD:
18989 case TYPE_LOAD_U:
18990 case TYPE_LOAD_UX:
18991 case TYPE_LOAD_EXT:
18992 case TYPE_LOAD_EXT_U:
18993 case TYPE_LOAD_EXT_UX:
18994 if ((rs6000_cpu == PROCESSOR_POWER6)
18995 && recog_memoized (dep_insn)
18996 && (INSN_CODE (dep_insn) >= 0))
18997 {
18998
18999 /* Adjust the cost for the case where the value written
19000 by a fixed point instruction is used within the address
19001 gen portion of a subsequent load(u)(x) */
19002 switch (get_attr_type (dep_insn))
19003 {
19004 case TYPE_LOAD:
19005 case TYPE_LOAD_U:
19006 case TYPE_LOAD_UX:
19007 case TYPE_CNTLZ:
19008 {
19009 if (set_to_load_agen (dep_insn, insn))
19010 return 4;
19011 break;
19012 }
19013 case TYPE_LOAD_EXT:
19014 case TYPE_LOAD_EXT_U:
19015 case TYPE_LOAD_EXT_UX:
19016 case TYPE_VAR_SHIFT_ROTATE:
19017 case TYPE_VAR_DELAYED_COMPARE:
19018 {
19019 if (set_to_load_agen (dep_insn, insn))
19020 return 6;
19021 break;
19022 }
19023 case TYPE_INTEGER:
19024 case TYPE_COMPARE:
19025 case TYPE_FAST_COMPARE:
19026 case TYPE_EXTS:
19027 case TYPE_SHIFT:
19028 case TYPE_INSERT_WORD:
19029 case TYPE_INSERT_DWORD:
19030 case TYPE_FPLOAD_U:
19031 case TYPE_FPLOAD_UX:
19032 case TYPE_STORE_U:
19033 case TYPE_STORE_UX:
19034 case TYPE_FPSTORE_U:
19035 case TYPE_FPSTORE_UX:
19036 {
19037 if (set_to_load_agen (dep_insn, insn))
19038 return 3;
19039 break;
19040 }
19041 case TYPE_IMUL:
19042 case TYPE_IMUL2:
19043 case TYPE_IMUL3:
19044 case TYPE_LMUL:
19045 case TYPE_IMUL_COMPARE:
19046 case TYPE_LMUL_COMPARE:
19047 {
19048 if (set_to_load_agen (dep_insn, insn))
19049 return 17;
19050 break;
19051 }
19052 case TYPE_IDIV:
19053 {
19054 if (set_to_load_agen (dep_insn, insn))
19055 return 45;
19056 break;
19057 }
19058 case TYPE_LDIV:
19059 {
19060 if (set_to_load_agen (dep_insn, insn))
19061 return 57;
19062 break;
19063 }
19064 default:
19065 break;
19066 }
19067 }
19068 break;
19069
19070 case TYPE_FPLOAD:
19071 if ((rs6000_cpu == PROCESSOR_POWER6)
19072 && recog_memoized (dep_insn)
19073 && (INSN_CODE (dep_insn) >= 0)
19074 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
19075 return 2;
19076
19077 default:
19078 break;
19079 }
c9dbf840 19080
a251ffd0 19081 /* Fall out to return default cost. */
44cd321e
PS
19082 }
19083 break;
19084
19085 case REG_DEP_OUTPUT:
19086 /* Output dependency; DEP_INSN writes a register that INSN writes some
19087 cycles later. */
19088 if ((rs6000_cpu == PROCESSOR_POWER6)
19089 && recog_memoized (dep_insn)
19090 && (INSN_CODE (dep_insn) >= 0))
19091 {
19092 attr_type = get_attr_type (insn);
19093
19094 switch (attr_type)
19095 {
19096 case TYPE_FP:
19097 if (get_attr_type (dep_insn) == TYPE_FP)
19098 return 1;
19099 break;
19100 case TYPE_FPLOAD:
19101 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
19102 return 2;
19103 break;
19104 default:
19105 break;
19106 }
19107 }
19108 case REG_DEP_ANTI:
19109 /* Anti dependency; DEP_INSN reads a register that INSN writes some
19110 cycles later. */
19111 return 0;
19112
19113 default:
19114 gcc_unreachable ();
a251ffd0
TG
19115 }
19116
19117 return cost;
19118}
b6c9286a 19119
cbe26ab8 19120/* The function returns a true if INSN is microcoded.
839a4992 19121 Return false otherwise. */
cbe26ab8
DN
19122
19123static bool
19124is_microcoded_insn (rtx insn)
19125{
19126 if (!insn || !INSN_P (insn)
19127 || GET_CODE (PATTERN (insn)) == USE
19128 || GET_CODE (PATTERN (insn)) == CLOBBER)
19129 return false;
19130
d296e02e
AP
19131 if (rs6000_cpu_attr == CPU_CELL)
19132 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
19133
ec507f2d 19134 if (rs6000_sched_groups)
cbe26ab8
DN
19135 {
19136 enum attr_type type = get_attr_type (insn);
19137 if (type == TYPE_LOAD_EXT_U
19138 || type == TYPE_LOAD_EXT_UX
19139 || type == TYPE_LOAD_UX
19140 || type == TYPE_STORE_UX
19141 || type == TYPE_MFCR)
c4ad648e 19142 return true;
cbe26ab8
DN
19143 }
19144
19145 return false;
19146}
19147
cbe26ab8
DN
19148/* The function returns true if INSN is cracked into 2 instructions
19149 by the processor (and therefore occupies 2 issue slots). */
19150
19151static bool
19152is_cracked_insn (rtx insn)
19153{
19154 if (!insn || !INSN_P (insn)
19155 || GET_CODE (PATTERN (insn)) == USE
19156 || GET_CODE (PATTERN (insn)) == CLOBBER)
19157 return false;
19158
ec507f2d 19159 if (rs6000_sched_groups)
cbe26ab8
DN
19160 {
19161 enum attr_type type = get_attr_type (insn);
19162 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
19163 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
19164 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
19165 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
19166 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
19167 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
19168 || type == TYPE_IDIV || type == TYPE_LDIV
19169 || type == TYPE_INSERT_WORD)
19170 return true;
cbe26ab8
DN
19171 }
19172
19173 return false;
19174}
19175
19176/* The function returns true if INSN can be issued only from
a3c9585f 19177 the branch slot. */
cbe26ab8
DN
19178
19179static bool
19180is_branch_slot_insn (rtx insn)
19181{
19182 if (!insn || !INSN_P (insn)
19183 || GET_CODE (PATTERN (insn)) == USE
19184 || GET_CODE (PATTERN (insn)) == CLOBBER)
19185 return false;
19186
ec507f2d 19187 if (rs6000_sched_groups)
cbe26ab8
DN
19188 {
19189 enum attr_type type = get_attr_type (insn);
19190 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 19191 return true;
cbe26ab8
DN
19192 return false;
19193 }
19194
19195 return false;
19196}
79ae11c4 19197
44cd321e
PS
19198/* The function returns true if out_inst sets a value that is
19199 used in the address generation computation of in_insn */
19200static bool
19201set_to_load_agen (rtx out_insn, rtx in_insn)
19202{
19203 rtx out_set, in_set;
19204
19205 /* For performance reasons, only handle the simple case where
19206 both loads are a single_set. */
19207 out_set = single_set (out_insn);
19208 if (out_set)
19209 {
19210 in_set = single_set (in_insn);
19211 if (in_set)
19212 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
19213 }
19214
19215 return false;
19216}
19217
19218/* The function returns true if the target storage location of
19219 out_insn is adjacent to the target storage location of in_insn */
19220/* Return 1 if memory locations are adjacent. */
19221
19222static bool
19223adjacent_mem_locations (rtx insn1, rtx insn2)
19224{
19225
e3a0e200
PB
19226 rtx a = get_store_dest (PATTERN (insn1));
19227 rtx b = get_store_dest (PATTERN (insn2));
19228
44cd321e
PS
19229 if ((GET_CODE (XEXP (a, 0)) == REG
19230 || (GET_CODE (XEXP (a, 0)) == PLUS
19231 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
19232 && (GET_CODE (XEXP (b, 0)) == REG
19233 || (GET_CODE (XEXP (b, 0)) == PLUS
19234 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
19235 {
f98e8938 19236 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 19237 rtx reg0, reg1;
44cd321e
PS
19238
19239 if (GET_CODE (XEXP (a, 0)) == PLUS)
19240 {
19241 reg0 = XEXP (XEXP (a, 0), 0);
19242 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
19243 }
19244 else
19245 reg0 = XEXP (a, 0);
19246
19247 if (GET_CODE (XEXP (b, 0)) == PLUS)
19248 {
19249 reg1 = XEXP (XEXP (b, 0), 0);
19250 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
19251 }
19252 else
19253 reg1 = XEXP (b, 0);
19254
19255 val_diff = val1 - val0;
19256
19257 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
19258 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
19259 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
19260 }
19261
19262 return false;
19263}
19264
a4f6c312 19265/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
19266 priority INSN_PRIORITY (INSN). Increase the priority to execute the
19267 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
19268 define this macro if you do not need to adjust the scheduling
19269 priorities of insns. */
bef84347 19270
c237e94a 19271static int
a2369ed3 19272rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 19273{
a4f6c312
SS
19274 /* On machines (like the 750) which have asymmetric integer units,
19275 where one integer unit can do multiply and divides and the other
19276 can't, reduce the priority of multiply/divide so it is scheduled
19277 before other integer operations. */
bef84347
VM
19278
19279#if 0
2c3c49de 19280 if (! INSN_P (insn))
bef84347
VM
19281 return priority;
19282
19283 if (GET_CODE (PATTERN (insn)) == USE)
19284 return priority;
19285
19286 switch (rs6000_cpu_attr) {
19287 case CPU_PPC750:
19288 switch (get_attr_type (insn))
19289 {
19290 default:
19291 break;
19292
19293 case TYPE_IMUL:
19294 case TYPE_IDIV:
3cb999d8
DE
19295 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19296 priority, priority);
bef84347
VM
19297 if (priority >= 0 && priority < 0x01000000)
19298 priority >>= 3;
19299 break;
19300 }
19301 }
19302#endif
19303
44cd321e 19304 if (insn_must_be_first_in_group (insn)
79ae11c4 19305 && reload_completed
f676971a 19306 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
19307 && rs6000_sched_restricted_insns_priority)
19308 {
19309
c4ad648e
AM
19310 /* Prioritize insns that can be dispatched only in the first
19311 dispatch slot. */
79ae11c4 19312 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
19313 /* Attach highest priority to insn. This means that in
19314 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 19315 precede 'priority' (critical path) considerations. */
f676971a 19316 return current_sched_info->sched_max_insns_priority;
79ae11c4 19317 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 19318 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
19319 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19320 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
19321 return (priority + 1);
19322 }
79ae11c4 19323
44cd321e
PS
19324 if (rs6000_cpu == PROCESSOR_POWER6
19325 && ((load_store_pendulum == -2 && is_load_insn (insn))
19326 || (load_store_pendulum == 2 && is_store_insn (insn))))
19327 /* Attach highest priority to insn if the scheduler has just issued two
19328 stores and this instruction is a load, or two loads and this instruction
19329 is a store. Power6 wants loads and stores scheduled alternately
19330 when possible */
19331 return current_sched_info->sched_max_insns_priority;
19332
bef84347
VM
19333 return priority;
19334}
19335
d296e02e
AP
19336/* Return true if the instruction is nonpipelined on the Cell. */
19337static bool
19338is_nonpipeline_insn (rtx insn)
19339{
19340 enum attr_type type;
19341 if (!insn || !INSN_P (insn)
19342 || GET_CODE (PATTERN (insn)) == USE
19343 || GET_CODE (PATTERN (insn)) == CLOBBER)
19344 return false;
19345
19346 type = get_attr_type (insn);
19347 if (type == TYPE_IMUL
19348 || type == TYPE_IMUL2
19349 || type == TYPE_IMUL3
19350 || type == TYPE_LMUL
19351 || type == TYPE_IDIV
19352 || type == TYPE_LDIV
19353 || type == TYPE_SDIV
19354 || type == TYPE_DDIV
19355 || type == TYPE_SSQRT
19356 || type == TYPE_DSQRT
19357 || type == TYPE_MFCR
19358 || type == TYPE_MFCRF
19359 || type == TYPE_MFJMPR)
19360 {
19361 return true;
19362 }
19363 return false;
19364}
19365
19366
a4f6c312
SS
19367/* Return how many instructions the machine can issue per cycle. */
19368
c237e94a 19369static int
863d938c 19370rs6000_issue_rate (void)
b6c9286a 19371{
3317bab1
DE
19372 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19373 if (!reload_completed)
19374 return 1;
19375
b6c9286a 19376 switch (rs6000_cpu_attr) {
3cb999d8
DE
19377 case CPU_RIOS1: /* ? */
19378 case CPU_RS64A:
19379 case CPU_PPC601: /* ? */
ed947a96 19380 case CPU_PPC7450:
3cb999d8 19381 return 3;
b54cf83a 19382 case CPU_PPC440:
b6c9286a 19383 case CPU_PPC603:
bef84347 19384 case CPU_PPC750:
ed947a96 19385 case CPU_PPC7400:
be12c2b0 19386 case CPU_PPC8540:
d296e02e 19387 case CPU_CELL:
fa41c305
EW
19388 case CPU_PPCE300C2:
19389 case CPU_PPCE300C3:
edae5fe3 19390 case CPU_PPCE500MC:
f676971a 19391 return 2;
3cb999d8 19392 case CPU_RIOS2:
b6c9286a 19393 case CPU_PPC604:
19684119 19394 case CPU_PPC604E:
b6c9286a 19395 case CPU_PPC620:
3cb999d8 19396 case CPU_PPC630:
b6c9286a 19397 return 4;
cbe26ab8 19398 case CPU_POWER4:
ec507f2d 19399 case CPU_POWER5:
44cd321e 19400 case CPU_POWER6:
cbe26ab8 19401 return 5;
b6c9286a
MM
19402 default:
19403 return 1;
19404 }
19405}
19406
be12c2b0
VM
19407/* Return how many instructions to look ahead for better insn
19408 scheduling. */
19409
19410static int
863d938c 19411rs6000_use_sched_lookahead (void)
be12c2b0
VM
19412{
19413 if (rs6000_cpu_attr == CPU_PPC8540)
19414 return 4;
d296e02e
AP
19415 if (rs6000_cpu_attr == CPU_CELL)
19416 return (reload_completed ? 8 : 0);
be12c2b0
VM
19417 return 0;
19418}
19419
d296e02e
AP
19420/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19421static int
19422rs6000_use_sched_lookahead_guard (rtx insn)
19423{
19424 if (rs6000_cpu_attr != CPU_CELL)
19425 return 1;
19426
19427 if (insn == NULL_RTX || !INSN_P (insn))
19428 abort ();
982afe02 19429
d296e02e
AP
19430 if (!reload_completed
19431 || is_nonpipeline_insn (insn)
19432 || is_microcoded_insn (insn))
19433 return 0;
19434
19435 return 1;
19436}
19437
569fa502
DN
19438/* Determine is PAT refers to memory. */
19439
19440static bool
19441is_mem_ref (rtx pat)
19442{
19443 const char * fmt;
19444 int i, j;
19445 bool ret = false;
19446
1de59bbd
DE
19447 /* stack_tie does not produce any real memory traffic. */
19448 if (GET_CODE (pat) == UNSPEC
19449 && XINT (pat, 1) == UNSPEC_TIE)
19450 return false;
19451
569fa502
DN
19452 if (GET_CODE (pat) == MEM)
19453 return true;
19454
19455 /* Recursively process the pattern. */
19456 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19457
19458 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19459 {
19460 if (fmt[i] == 'e')
19461 ret |= is_mem_ref (XEXP (pat, i));
19462 else if (fmt[i] == 'E')
19463 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19464 ret |= is_mem_ref (XVECEXP (pat, i, j));
19465 }
19466
19467 return ret;
19468}
19469
19470/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19471
569fa502
DN
19472static bool
19473is_load_insn1 (rtx pat)
19474{
19475 if (!pat || pat == NULL_RTX)
19476 return false;
19477
19478 if (GET_CODE (pat) == SET)
19479 return is_mem_ref (SET_SRC (pat));
19480
19481 if (GET_CODE (pat) == PARALLEL)
19482 {
19483 int i;
19484
19485 for (i = 0; i < XVECLEN (pat, 0); i++)
19486 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19487 return true;
19488 }
19489
19490 return false;
19491}
19492
19493/* Determine if INSN loads from memory. */
19494
19495static bool
19496is_load_insn (rtx insn)
19497{
19498 if (!insn || !INSN_P (insn))
19499 return false;
19500
19501 if (GET_CODE (insn) == CALL_INSN)
19502 return false;
19503
19504 return is_load_insn1 (PATTERN (insn));
19505}
19506
19507/* Determine if PAT is a PATTERN of a store insn. */
19508
19509static bool
19510is_store_insn1 (rtx pat)
19511{
19512 if (!pat || pat == NULL_RTX)
19513 return false;
19514
19515 if (GET_CODE (pat) == SET)
19516 return is_mem_ref (SET_DEST (pat));
19517
19518 if (GET_CODE (pat) == PARALLEL)
19519 {
19520 int i;
19521
19522 for (i = 0; i < XVECLEN (pat, 0); i++)
19523 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19524 return true;
19525 }
19526
19527 return false;
19528}
19529
19530/* Determine if INSN stores to memory. */
19531
19532static bool
19533is_store_insn (rtx insn)
19534{
19535 if (!insn || !INSN_P (insn))
19536 return false;
19537
19538 return is_store_insn1 (PATTERN (insn));
19539}
19540
e3a0e200
PB
19541/* Return the dest of a store insn. */
19542
19543static rtx
19544get_store_dest (rtx pat)
19545{
19546 gcc_assert (is_store_insn1 (pat));
19547
19548 if (GET_CODE (pat) == SET)
19549 return SET_DEST (pat);
19550 else if (GET_CODE (pat) == PARALLEL)
19551 {
19552 int i;
19553
19554 for (i = 0; i < XVECLEN (pat, 0); i++)
19555 {
19556 rtx inner_pat = XVECEXP (pat, 0, i);
19557 if (GET_CODE (inner_pat) == SET
19558 && is_mem_ref (SET_DEST (inner_pat)))
19559 return inner_pat;
19560 }
19561 }
19562 /* We shouldn't get here, because we should have either a simple
19563 store insn or a store with update which are covered above. */
19564 gcc_unreachable();
19565}
19566
569fa502
DN
19567/* Returns whether the dependence between INSN and NEXT is considered
19568 costly by the given target. */
19569
19570static bool
b198261f 19571rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19572{
b198261f
MK
19573 rtx insn;
19574 rtx next;
19575
aabcd309 19576 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19577 allow all dependent insns in the same group.
569fa502
DN
19578 This is the most aggressive option. */
19579 if (rs6000_sched_costly_dep == no_dep_costly)
19580 return false;
19581
f676971a 19582 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19583 do not allow dependent instructions in the same group.
19584 This is the most conservative option. */
19585 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19586 return true;
569fa502 19587
b198261f
MK
19588 insn = DEP_PRO (dep);
19589 next = DEP_CON (dep);
19590
f676971a
EC
19591 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19592 && is_load_insn (next)
569fa502
DN
19593 && is_store_insn (insn))
19594 /* Prevent load after store in the same group. */
19595 return true;
19596
19597 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19598 && is_load_insn (next)
569fa502 19599 && is_store_insn (insn)
e2f6ff94 19600 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19601 /* Prevent load after store in the same group if it is a true
19602 dependence. */
569fa502 19603 return true;
f676971a
EC
19604
19605 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19606 and will not be scheduled in the same group. */
19607 if (rs6000_sched_costly_dep <= max_dep_latency
19608 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19609 return true;
19610
19611 return false;
19612}
19613
f676971a 19614/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19615 skipping any "non-active" insns - insns that will not actually occupy
19616 an issue slot. Return NULL_RTX if such an insn is not found. */
19617
19618static rtx
19619get_next_active_insn (rtx insn, rtx tail)
19620{
f489aff8 19621 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19622 return NULL_RTX;
19623
f489aff8 19624 while (1)
cbe26ab8 19625 {
f489aff8
AM
19626 insn = NEXT_INSN (insn);
19627 if (insn == NULL_RTX || insn == tail)
19628 return NULL_RTX;
cbe26ab8 19629
f489aff8
AM
19630 if (CALL_P (insn)
19631 || JUMP_P (insn)
19632 || (NONJUMP_INSN_P (insn)
19633 && GET_CODE (PATTERN (insn)) != USE
19634 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19635 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19636 break;
19637 }
19638 return insn;
cbe26ab8
DN
19639}
19640
44cd321e
PS
19641/* We are about to begin issuing insns for this clock cycle. */
19642
19643static int
19644rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19645 rtx *ready ATTRIBUTE_UNUSED,
19646 int *pn_ready ATTRIBUTE_UNUSED,
19647 int clock_var ATTRIBUTE_UNUSED)
19648{
d296e02e
AP
19649 int n_ready = *pn_ready;
19650
44cd321e
PS
19651 if (sched_verbose)
19652 fprintf (dump, "// rs6000_sched_reorder :\n");
19653
d296e02e
AP
19654 /* Reorder the ready list, if the second to last ready insn
19655 is a nonepipeline insn. */
19656 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19657 {
19658 if (is_nonpipeline_insn (ready[n_ready - 1])
19659 && (recog_memoized (ready[n_ready - 2]) > 0))
19660 /* Simply swap first two insns. */
19661 {
19662 rtx tmp = ready[n_ready - 1];
19663 ready[n_ready - 1] = ready[n_ready - 2];
19664 ready[n_ready - 2] = tmp;
19665 }
19666 }
19667
44cd321e
PS
19668 if (rs6000_cpu == PROCESSOR_POWER6)
19669 load_store_pendulum = 0;
19670
19671 return rs6000_issue_rate ();
19672}
19673
19674/* Like rs6000_sched_reorder, but called after issuing each insn. */
19675
19676static int
19677rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19678 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19679{
19680 if (sched_verbose)
19681 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19682
19683 /* For Power6, we need to handle some special cases to try and keep the
19684 store queue from overflowing and triggering expensive flushes.
19685
19686 This code monitors how load and store instructions are being issued
19687 and skews the ready list one way or the other to increase the likelihood
19688 that a desired instruction is issued at the proper time.
19689
19690 A couple of things are done. First, we maintain a "load_store_pendulum"
19691 to track the current state of load/store issue.
19692
19693 - If the pendulum is at zero, then no loads or stores have been
19694 issued in the current cycle so we do nothing.
19695
19696 - If the pendulum is 1, then a single load has been issued in this
19697 cycle and we attempt to locate another load in the ready list to
19698 issue with it.
19699
2f8e468b 19700 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19701 issued in this cycle, so we increase the priority of the first load
19702 in the ready list to increase it's likelihood of being chosen first
19703 in the next cycle.
19704
19705 - If the pendulum is -1, then a single store has been issued in this
19706 cycle and we attempt to locate another store in the ready list to
19707 issue with it, preferring a store to an adjacent memory location to
19708 facilitate store pairing in the store queue.
19709
19710 - If the pendulum is 2, then two loads have already been
19711 issued in this cycle, so we increase the priority of the first store
19712 in the ready list to increase it's likelihood of being chosen first
19713 in the next cycle.
19714
19715 - If the pendulum < -2 or > 2, then do nothing.
19716
19717 Note: This code covers the most common scenarios. There exist non
19718 load/store instructions which make use of the LSU and which
19719 would need to be accounted for to strictly model the behavior
19720 of the machine. Those instructions are currently unaccounted
19721 for to help minimize compile time overhead of this code.
19722 */
19723 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19724 {
19725 int pos;
19726 int i;
19727 rtx tmp;
19728
19729 if (is_store_insn (last_scheduled_insn))
19730 /* Issuing a store, swing the load_store_pendulum to the left */
19731 load_store_pendulum--;
19732 else if (is_load_insn (last_scheduled_insn))
19733 /* Issuing a load, swing the load_store_pendulum to the right */
19734 load_store_pendulum++;
19735 else
19736 return cached_can_issue_more;
19737
19738 /* If the pendulum is balanced, or there is only one instruction on
19739 the ready list, then all is well, so return. */
19740 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19741 return cached_can_issue_more;
19742
19743 if (load_store_pendulum == 1)
19744 {
19745 /* A load has been issued in this cycle. Scan the ready list
19746 for another load to issue with it */
19747 pos = *pn_ready-1;
19748
19749 while (pos >= 0)
19750 {
19751 if (is_load_insn (ready[pos]))
19752 {
19753 /* Found a load. Move it to the head of the ready list,
19754 and adjust it's priority so that it is more likely to
19755 stay there */
19756 tmp = ready[pos];
19757 for (i=pos; i<*pn_ready-1; i++)
19758 ready[i] = ready[i + 1];
19759 ready[*pn_ready-1] = tmp;
e855c69d
AB
19760
19761 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19762 INSN_PRIORITY (tmp)++;
19763 break;
19764 }
19765 pos--;
19766 }
19767 }
19768 else if (load_store_pendulum == -2)
19769 {
19770 /* Two stores have been issued in this cycle. Increase the
19771 priority of the first load in the ready list to favor it for
19772 issuing in the next cycle. */
19773 pos = *pn_ready-1;
19774
19775 while (pos >= 0)
19776 {
19777 if (is_load_insn (ready[pos])
e855c69d
AB
19778 && !sel_sched_p ()
19779 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19780 {
19781 INSN_PRIORITY (ready[pos])++;
19782
19783 /* Adjust the pendulum to account for the fact that a load
19784 was found and increased in priority. This is to prevent
19785 increasing the priority of multiple loads */
19786 load_store_pendulum--;
19787
19788 break;
19789 }
19790 pos--;
19791 }
19792 }
19793 else if (load_store_pendulum == -1)
19794 {
19795 /* A store has been issued in this cycle. Scan the ready list for
19796 another store to issue with it, preferring a store to an adjacent
19797 memory location */
19798 int first_store_pos = -1;
19799
19800 pos = *pn_ready-1;
19801
19802 while (pos >= 0)
19803 {
19804 if (is_store_insn (ready[pos]))
19805 {
19806 /* Maintain the index of the first store found on the
19807 list */
19808 if (first_store_pos == -1)
19809 first_store_pos = pos;
19810
19811 if (is_store_insn (last_scheduled_insn)
19812 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19813 {
19814 /* Found an adjacent store. Move it to the head of the
19815 ready list, and adjust it's priority so that it is
19816 more likely to stay there */
19817 tmp = ready[pos];
19818 for (i=pos; i<*pn_ready-1; i++)
19819 ready[i] = ready[i + 1];
19820 ready[*pn_ready-1] = tmp;
e855c69d
AB
19821
19822 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e 19823 INSN_PRIORITY (tmp)++;
e855c69d 19824
44cd321e
PS
19825 first_store_pos = -1;
19826
19827 break;
19828 };
19829 }
19830 pos--;
19831 }
19832
19833 if (first_store_pos >= 0)
19834 {
19835 /* An adjacent store wasn't found, but a non-adjacent store was,
19836 so move the non-adjacent store to the front of the ready
19837 list, and adjust its priority so that it is more likely to
19838 stay there. */
19839 tmp = ready[first_store_pos];
19840 for (i=first_store_pos; i<*pn_ready-1; i++)
19841 ready[i] = ready[i + 1];
19842 ready[*pn_ready-1] = tmp;
e855c69d 19843 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19844 INSN_PRIORITY (tmp)++;
19845 }
19846 }
19847 else if (load_store_pendulum == 2)
19848 {
19849 /* Two loads have been issued in this cycle. Increase the priority
19850 of the first store in the ready list to favor it for issuing in
19851 the next cycle. */
19852 pos = *pn_ready-1;
19853
19854 while (pos >= 0)
19855 {
19856 if (is_store_insn (ready[pos])
e855c69d
AB
19857 && !sel_sched_p ()
19858 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19859 {
19860 INSN_PRIORITY (ready[pos])++;
19861
19862 /* Adjust the pendulum to account for the fact that a store
19863 was found and increased in priority. This is to prevent
19864 increasing the priority of multiple stores */
19865 load_store_pendulum++;
19866
19867 break;
19868 }
19869 pos--;
19870 }
19871 }
19872 }
19873
19874 return cached_can_issue_more;
19875}
19876
839a4992 19877/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19878 of group WHICH_GROUP.
19879
19880 If WHICH_GROUP == current_group, this function will return true if INSN
19881 causes the termination of the current group (i.e, the dispatch group to
19882 which INSN belongs). This means that INSN will be the last insn in the
19883 group it belongs to.
19884
19885 If WHICH_GROUP == previous_group, this function will return true if INSN
19886 causes the termination of the previous group (i.e, the dispatch group that
19887 precedes the group to which INSN belongs). This means that INSN will be
19888 the first insn in the group it belongs to). */
19889
19890static bool
19891insn_terminates_group_p (rtx insn, enum group_termination which_group)
19892{
44cd321e 19893 bool first, last;
cbe26ab8
DN
19894
19895 if (! insn)
19896 return false;
569fa502 19897
44cd321e
PS
19898 first = insn_must_be_first_in_group (insn);
19899 last = insn_must_be_last_in_group (insn);
cbe26ab8 19900
44cd321e 19901 if (first && last)
cbe26ab8
DN
19902 return true;
19903
19904 if (which_group == current_group)
44cd321e 19905 return last;
cbe26ab8 19906 else if (which_group == previous_group)
44cd321e
PS
19907 return first;
19908
19909 return false;
19910}
19911
19912
19913static bool
19914insn_must_be_first_in_group (rtx insn)
19915{
19916 enum attr_type type;
19917
19918 if (!insn
19919 || insn == NULL_RTX
19920 || GET_CODE (insn) == NOTE
19921 || GET_CODE (PATTERN (insn)) == USE
19922 || GET_CODE (PATTERN (insn)) == CLOBBER)
19923 return false;
19924
19925 switch (rs6000_cpu)
cbe26ab8 19926 {
44cd321e
PS
19927 case PROCESSOR_POWER5:
19928 if (is_cracked_insn (insn))
19929 return true;
19930 case PROCESSOR_POWER4:
19931 if (is_microcoded_insn (insn))
19932 return true;
19933
19934 if (!rs6000_sched_groups)
19935 return false;
19936
19937 type = get_attr_type (insn);
19938
19939 switch (type)
19940 {
19941 case TYPE_MFCR:
19942 case TYPE_MFCRF:
19943 case TYPE_MTCR:
19944 case TYPE_DELAYED_CR:
19945 case TYPE_CR_LOGICAL:
19946 case TYPE_MTJMPR:
19947 case TYPE_MFJMPR:
19948 case TYPE_IDIV:
19949 case TYPE_LDIV:
19950 case TYPE_LOAD_L:
19951 case TYPE_STORE_C:
19952 case TYPE_ISYNC:
19953 case TYPE_SYNC:
19954 return true;
19955 default:
19956 break;
19957 }
19958 break;
19959 case PROCESSOR_POWER6:
19960 type = get_attr_type (insn);
19961
19962 switch (type)
19963 {
19964 case TYPE_INSERT_DWORD:
19965 case TYPE_EXTS:
19966 case TYPE_CNTLZ:
19967 case TYPE_SHIFT:
19968 case TYPE_VAR_SHIFT_ROTATE:
19969 case TYPE_TRAP:
19970 case TYPE_IMUL:
19971 case TYPE_IMUL2:
19972 case TYPE_IMUL3:
19973 case TYPE_LMUL:
19974 case TYPE_IDIV:
19975 case TYPE_INSERT_WORD:
19976 case TYPE_DELAYED_COMPARE:
19977 case TYPE_IMUL_COMPARE:
19978 case TYPE_LMUL_COMPARE:
19979 case TYPE_FPCOMPARE:
19980 case TYPE_MFCR:
19981 case TYPE_MTCR:
19982 case TYPE_MFJMPR:
19983 case TYPE_MTJMPR:
19984 case TYPE_ISYNC:
19985 case TYPE_SYNC:
19986 case TYPE_LOAD_L:
19987 case TYPE_STORE_C:
19988 case TYPE_LOAD_U:
19989 case TYPE_LOAD_UX:
19990 case TYPE_LOAD_EXT_UX:
19991 case TYPE_STORE_U:
19992 case TYPE_STORE_UX:
19993 case TYPE_FPLOAD_U:
19994 case TYPE_FPLOAD_UX:
19995 case TYPE_FPSTORE_U:
19996 case TYPE_FPSTORE_UX:
19997 return true;
19998 default:
19999 break;
20000 }
20001 break;
20002 default:
20003 break;
20004 }
20005
20006 return false;
20007}
20008
20009static bool
20010insn_must_be_last_in_group (rtx insn)
20011{
20012 enum attr_type type;
20013
20014 if (!insn
20015 || insn == NULL_RTX
20016 || GET_CODE (insn) == NOTE
20017 || GET_CODE (PATTERN (insn)) == USE
20018 || GET_CODE (PATTERN (insn)) == CLOBBER)
20019 return false;
20020
20021 switch (rs6000_cpu) {
20022 case PROCESSOR_POWER4:
20023 case PROCESSOR_POWER5:
20024 if (is_microcoded_insn (insn))
20025 return true;
20026
20027 if (is_branch_slot_insn (insn))
20028 return true;
20029
20030 break;
20031 case PROCESSOR_POWER6:
20032 type = get_attr_type (insn);
20033
20034 switch (type)
20035 {
20036 case TYPE_EXTS:
20037 case TYPE_CNTLZ:
20038 case TYPE_SHIFT:
20039 case TYPE_VAR_SHIFT_ROTATE:
20040 case TYPE_TRAP:
20041 case TYPE_IMUL:
20042 case TYPE_IMUL2:
20043 case TYPE_IMUL3:
20044 case TYPE_LMUL:
20045 case TYPE_IDIV:
20046 case TYPE_DELAYED_COMPARE:
20047 case TYPE_IMUL_COMPARE:
20048 case TYPE_LMUL_COMPARE:
20049 case TYPE_FPCOMPARE:
20050 case TYPE_MFCR:
20051 case TYPE_MTCR:
20052 case TYPE_MFJMPR:
20053 case TYPE_MTJMPR:
20054 case TYPE_ISYNC:
20055 case TYPE_SYNC:
20056 case TYPE_LOAD_L:
20057 case TYPE_STORE_C:
20058 return true;
20059 default:
20060 break;
cbe26ab8 20061 }
44cd321e
PS
20062 break;
20063 default:
20064 break;
20065 }
cbe26ab8
DN
20066
20067 return false;
20068}
20069
839a4992 20070/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
20071 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
20072
20073static bool
20074is_costly_group (rtx *group_insns, rtx next_insn)
20075{
20076 int i;
cbe26ab8
DN
20077 int issue_rate = rs6000_issue_rate ();
20078
20079 for (i = 0; i < issue_rate; i++)
20080 {
e2f6ff94
MK
20081 sd_iterator_def sd_it;
20082 dep_t dep;
cbe26ab8 20083 rtx insn = group_insns[i];
b198261f 20084
cbe26ab8 20085 if (!insn)
c4ad648e 20086 continue;
b198261f 20087
e2f6ff94 20088 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 20089 {
b198261f
MK
20090 rtx next = DEP_CON (dep);
20091
20092 if (next == next_insn
20093 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
20094 return true;
c4ad648e 20095 }
cbe26ab8
DN
20096 }
20097
20098 return false;
20099}
20100
f676971a 20101/* Utility of the function redefine_groups.
cbe26ab8
DN
20102 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
20103 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
20104 to keep it "far" (in a separate group) from GROUP_INSNS, following
20105 one of the following schemes, depending on the value of the flag
20106 -minsert_sched_nops = X:
20107 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 20108 in order to force NEXT_INSN into a separate group.
f676971a
EC
20109 (2) X < sched_finish_regroup_exact: insert exactly X nops.
20110 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
20111 insertion (has a group just ended, how many vacant issue slots remain in the
20112 last group, and how many dispatch groups were encountered so far). */
20113
f676971a 20114static int
c4ad648e
AM
20115force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
20116 rtx next_insn, bool *group_end, int can_issue_more,
20117 int *group_count)
cbe26ab8
DN
20118{
20119 rtx nop;
20120 bool force;
20121 int issue_rate = rs6000_issue_rate ();
20122 bool end = *group_end;
20123 int i;
20124
20125 if (next_insn == NULL_RTX)
20126 return can_issue_more;
20127
20128 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
20129 return can_issue_more;
20130
20131 force = is_costly_group (group_insns, next_insn);
20132 if (!force)
20133 return can_issue_more;
20134
20135 if (sched_verbose > 6)
20136 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 20137 *group_count ,can_issue_more);
cbe26ab8
DN
20138
20139 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
20140 {
20141 if (*group_end)
c4ad648e 20142 can_issue_more = 0;
cbe26ab8
DN
20143
20144 /* Since only a branch can be issued in the last issue_slot, it is
20145 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
20146 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
20147 in this case the last nop will start a new group and the branch
20148 will be forced to the new group. */
cbe26ab8 20149 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 20150 can_issue_more--;
cbe26ab8
DN
20151
20152 while (can_issue_more > 0)
c4ad648e 20153 {
9390387d 20154 nop = gen_nop ();
c4ad648e
AM
20155 emit_insn_before (nop, next_insn);
20156 can_issue_more--;
20157 }
cbe26ab8
DN
20158
20159 *group_end = true;
20160 return 0;
f676971a 20161 }
cbe26ab8
DN
20162
20163 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
20164 {
20165 int n_nops = rs6000_sched_insert_nops;
20166
f676971a 20167 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 20168 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 20169 if (can_issue_more == 0)
c4ad648e 20170 can_issue_more = issue_rate;
cbe26ab8
DN
20171 can_issue_more--;
20172 if (can_issue_more == 0)
c4ad648e
AM
20173 {
20174 can_issue_more = issue_rate - 1;
20175 (*group_count)++;
20176 end = true;
20177 for (i = 0; i < issue_rate; i++)
20178 {
20179 group_insns[i] = 0;
20180 }
20181 }
cbe26ab8
DN
20182
20183 while (n_nops > 0)
c4ad648e
AM
20184 {
20185 nop = gen_nop ();
20186 emit_insn_before (nop, next_insn);
20187 if (can_issue_more == issue_rate - 1) /* new group begins */
20188 end = false;
20189 can_issue_more--;
20190 if (can_issue_more == 0)
20191 {
20192 can_issue_more = issue_rate - 1;
20193 (*group_count)++;
20194 end = true;
20195 for (i = 0; i < issue_rate; i++)
20196 {
20197 group_insns[i] = 0;
20198 }
20199 }
20200 n_nops--;
20201 }
cbe26ab8
DN
20202
20203 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 20204 can_issue_more++;
cbe26ab8 20205
c4ad648e
AM
20206 /* Is next_insn going to start a new group? */
20207 *group_end
20208 = (end
cbe26ab8
DN
20209 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20210 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20211 || (can_issue_more < issue_rate &&
c4ad648e 20212 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20213 if (*group_end && end)
c4ad648e 20214 (*group_count)--;
cbe26ab8
DN
20215
20216 if (sched_verbose > 6)
c4ad648e
AM
20217 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
20218 *group_count, can_issue_more);
f676971a
EC
20219 return can_issue_more;
20220 }
cbe26ab8
DN
20221
20222 return can_issue_more;
20223}
20224
20225/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 20226 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
20227 form in practice. It tries to achieve this synchronization by forcing the
20228 estimated processor grouping on the compiler (as opposed to the function
20229 'pad_goups' which tries to force the scheduler's grouping on the processor).
20230
20231 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
20232 examines the (estimated) dispatch groups that will be formed by the processor
20233 dispatcher. It marks these group boundaries to reflect the estimated
20234 processor grouping, overriding the grouping that the scheduler had marked.
20235 Depending on the value of the flag '-minsert-sched-nops' this function can
20236 force certain insns into separate groups or force a certain distance between
20237 them by inserting nops, for example, if there exists a "costly dependence"
20238 between the insns.
20239
20240 The function estimates the group boundaries that the processor will form as
0fa2e4df 20241 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
20242 each insn. A subsequent insn will start a new group if one of the following
20243 4 cases applies:
20244 - no more vacant issue slots remain in the current dispatch group.
20245 - only the last issue slot, which is the branch slot, is vacant, but the next
20246 insn is not a branch.
20247 - only the last 2 or less issue slots, including the branch slot, are vacant,
20248 which means that a cracked insn (which occupies two issue slots) can't be
20249 issued in this group.
f676971a 20250 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
20251 start a new group. */
20252
20253static int
20254redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20255{
20256 rtx insn, next_insn;
20257 int issue_rate;
20258 int can_issue_more;
20259 int slot, i;
20260 bool group_end;
20261 int group_count = 0;
20262 rtx *group_insns;
20263
20264 /* Initialize. */
20265 issue_rate = rs6000_issue_rate ();
5ead67f6 20266 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 20267 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
20268 {
20269 group_insns[i] = 0;
20270 }
20271 can_issue_more = issue_rate;
20272 slot = 0;
20273 insn = get_next_active_insn (prev_head_insn, tail);
20274 group_end = false;
20275
20276 while (insn != NULL_RTX)
20277 {
20278 slot = (issue_rate - can_issue_more);
20279 group_insns[slot] = insn;
20280 can_issue_more =
c4ad648e 20281 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 20282 if (insn_terminates_group_p (insn, current_group))
c4ad648e 20283 can_issue_more = 0;
cbe26ab8
DN
20284
20285 next_insn = get_next_active_insn (insn, tail);
20286 if (next_insn == NULL_RTX)
c4ad648e 20287 return group_count + 1;
cbe26ab8 20288
c4ad648e
AM
20289 /* Is next_insn going to start a new group? */
20290 group_end
20291 = (can_issue_more == 0
20292 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20293 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20294 || (can_issue_more < issue_rate &&
20295 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20296
f676971a 20297 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
20298 next_insn, &group_end, can_issue_more,
20299 &group_count);
cbe26ab8
DN
20300
20301 if (group_end)
c4ad648e
AM
20302 {
20303 group_count++;
20304 can_issue_more = 0;
20305 for (i = 0; i < issue_rate; i++)
20306 {
20307 group_insns[i] = 0;
20308 }
20309 }
cbe26ab8
DN
20310
20311 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 20312 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 20313 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 20314 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
20315
20316 insn = next_insn;
20317 if (can_issue_more == 0)
c4ad648e
AM
20318 can_issue_more = issue_rate;
20319 } /* while */
cbe26ab8
DN
20320
20321 return group_count;
20322}
20323
20324/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20325 dispatch group boundaries that the scheduler had marked. Pad with nops
20326 any dispatch groups which have vacant issue slots, in order to force the
20327 scheduler's grouping on the processor dispatcher. The function
20328 returns the number of dispatch groups found. */
20329
20330static int
20331pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20332{
20333 rtx insn, next_insn;
20334 rtx nop;
20335 int issue_rate;
20336 int can_issue_more;
20337 int group_end;
20338 int group_count = 0;
20339
20340 /* Initialize issue_rate. */
20341 issue_rate = rs6000_issue_rate ();
20342 can_issue_more = issue_rate;
20343
20344 insn = get_next_active_insn (prev_head_insn, tail);
20345 next_insn = get_next_active_insn (insn, tail);
20346
20347 while (insn != NULL_RTX)
20348 {
20349 can_issue_more =
20350 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20351
20352 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20353
20354 if (next_insn == NULL_RTX)
c4ad648e 20355 break;
cbe26ab8
DN
20356
20357 if (group_end)
c4ad648e
AM
20358 {
20359 /* If the scheduler had marked group termination at this location
e855c69d 20360 (between insn and next_insn), and neither insn nor next_insn will
c4ad648e
AM
20361 force group termination, pad the group with nops to force group
20362 termination. */
20363 if (can_issue_more
20364 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20365 && !insn_terminates_group_p (insn, current_group)
20366 && !insn_terminates_group_p (next_insn, previous_group))
20367 {
9390387d 20368 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20369 can_issue_more--;
20370
20371 while (can_issue_more)
20372 {
20373 nop = gen_nop ();
20374 emit_insn_before (nop, next_insn);
20375 can_issue_more--;
20376 }
20377 }
20378
20379 can_issue_more = issue_rate;
20380 group_count++;
20381 }
cbe26ab8
DN
20382
20383 insn = next_insn;
20384 next_insn = get_next_active_insn (insn, tail);
20385 }
20386
20387 return group_count;
20388}
20389
44cd321e
PS
20390/* We're beginning a new block. Initialize data structures as necessary. */
20391
20392static void
20393rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20394 int sched_verbose ATTRIBUTE_UNUSED,
20395 int max_ready ATTRIBUTE_UNUSED)
982afe02 20396{
44cd321e
PS
20397 last_scheduled_insn = NULL_RTX;
20398 load_store_pendulum = 0;
20399}
20400
cbe26ab8
DN
20401/* The following function is called at the end of scheduling BB.
20402 After reload, it inserts nops at insn group bundling. */
20403
20404static void
38f391a5 20405rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20406{
20407 int n_groups;
20408
20409 if (sched_verbose)
20410 fprintf (dump, "=== Finishing schedule.\n");
20411
ec507f2d 20412 if (reload_completed && rs6000_sched_groups)
cbe26ab8 20413 {
e855c69d
AB
20414 /* Do not run sched_finish hook when selective scheduling enabled. */
20415 if (sel_sched_p ())
20416 return;
20417
cbe26ab8 20418 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20419 return;
cbe26ab8
DN
20420
20421 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20422 n_groups = pad_groups (dump, sched_verbose,
20423 current_sched_info->prev_head,
20424 current_sched_info->next_tail);
cbe26ab8 20425 else
c4ad648e
AM
20426 n_groups = redefine_groups (dump, sched_verbose,
20427 current_sched_info->prev_head,
20428 current_sched_info->next_tail);
cbe26ab8
DN
20429
20430 if (sched_verbose >= 6)
20431 {
20432 fprintf (dump, "ngroups = %d\n", n_groups);
20433 print_rtl (dump, current_sched_info->prev_head);
20434 fprintf (dump, "Done finish_sched\n");
20435 }
20436 }
20437}
e855c69d
AB
20438
20439struct _rs6000_sched_context
20440{
20441 short cached_can_issue_more;
20442 rtx last_scheduled_insn;
20443 int load_store_pendulum;
20444};
20445
20446typedef struct _rs6000_sched_context rs6000_sched_context_def;
20447typedef rs6000_sched_context_def *rs6000_sched_context_t;
20448
20449/* Allocate store for new scheduling context. */
20450static void *
20451rs6000_alloc_sched_context (void)
20452{
20453 return xmalloc (sizeof (rs6000_sched_context_def));
20454}
20455
20456/* If CLEAN_P is true then initializes _SC with clean data,
20457 and from the global context otherwise. */
20458static void
20459rs6000_init_sched_context (void *_sc, bool clean_p)
20460{
20461 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20462
20463 if (clean_p)
20464 {
20465 sc->cached_can_issue_more = 0;
20466 sc->last_scheduled_insn = NULL_RTX;
20467 sc->load_store_pendulum = 0;
20468 }
20469 else
20470 {
20471 sc->cached_can_issue_more = cached_can_issue_more;
20472 sc->last_scheduled_insn = last_scheduled_insn;
20473 sc->load_store_pendulum = load_store_pendulum;
20474 }
20475}
20476
20477/* Sets the global scheduling context to the one pointed to by _SC. */
20478static void
20479rs6000_set_sched_context (void *_sc)
20480{
20481 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20482
20483 gcc_assert (sc != NULL);
20484
20485 cached_can_issue_more = sc->cached_can_issue_more;
20486 last_scheduled_insn = sc->last_scheduled_insn;
20487 load_store_pendulum = sc->load_store_pendulum;
20488}
20489
20490/* Free _SC. */
20491static void
20492rs6000_free_sched_context (void *_sc)
20493{
20494 gcc_assert (_sc != NULL);
20495
20496 free (_sc);
20497}
20498
b6c9286a 20499\f
b6c9286a
MM
20500/* Length in units of the trampoline for entering a nested function. */
20501
20502int
863d938c 20503rs6000_trampoline_size (void)
b6c9286a
MM
20504{
20505 int ret = 0;
20506
20507 switch (DEFAULT_ABI)
20508 {
20509 default:
37409796 20510 gcc_unreachable ();
b6c9286a
MM
20511
20512 case ABI_AIX:
8f802bfb 20513 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20514 break;
20515
4dabc42d 20516 case ABI_DARWIN:
b6c9286a 20517 case ABI_V4:
03a7e1a5 20518 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20519 break;
b6c9286a
MM
20520 }
20521
20522 return ret;
20523}
20524
20525/* Emit RTL insns to initialize the variable parts of a trampoline.
20526 FNADDR is an RTX for the address of the function's pure code.
20527 CXT is an RTX for the static chain value for the function. */
20528
20529void
a2369ed3 20530rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20531{
8bd04c56 20532 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20533 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20534
20535 switch (DEFAULT_ABI)
20536 {
20537 default:
37409796 20538 gcc_unreachable ();
b6c9286a 20539
8bd04c56 20540/* Macros to shorten the code expansions below. */
9613eaff 20541#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20542#define MEM_PLUS(addr,offset) \
9613eaff 20543 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20544
b6c9286a
MM
20545 /* Under AIX, just build the 3 word function descriptor */
20546 case ABI_AIX:
8bd04c56 20547 {
9613eaff
SH
20548 rtx fn_reg = gen_reg_rtx (Pmode);
20549 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20550 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20551 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20552 emit_move_insn (MEM_DEREF (addr), fn_reg);
20553 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20554 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20555 }
b6c9286a
MM
20556 break;
20557
4dabc42d
TC
20558 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20559 case ABI_DARWIN:
b6c9286a 20560 case ABI_V4:
9613eaff 20561 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
bbbbb16a 20562 LCT_NORMAL, VOIDmode, 4,
9613eaff 20563 addr, Pmode,
eaf1bcf1 20564 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20565 fnaddr, Pmode,
20566 ctx_reg, Pmode);
b6c9286a 20567 break;
b6c9286a
MM
20568 }
20569
20570 return;
20571}
7509c759
MM
20572
20573\f
91d231cb 20574/* Table of valid machine attributes. */
a4f6c312 20575
91d231cb 20576const struct attribute_spec rs6000_attribute_table[] =
7509c759 20577{
91d231cb 20578 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20579 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20580 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20581 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20582 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20583 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20584#ifdef SUBTARGET_ATTRIBUTE_TABLE
20585 SUBTARGET_ATTRIBUTE_TABLE,
20586#endif
a5c76ee6 20587 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20588};
7509c759 20589
8bb418a3
ZL
20590/* Handle the "altivec" attribute. The attribute may have
20591 arguments as follows:
f676971a 20592
8bb418a3
ZL
20593 __attribute__((altivec(vector__)))
20594 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20595 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20596
20597 and may appear more than once (e.g., 'vector bool char') in a
20598 given declaration. */
20599
20600static tree
f90ac3f0
UP
20601rs6000_handle_altivec_attribute (tree *node,
20602 tree name ATTRIBUTE_UNUSED,
20603 tree args,
8bb418a3
ZL
20604 int flags ATTRIBUTE_UNUSED,
20605 bool *no_add_attrs)
20606{
20607 tree type = *node, result = NULL_TREE;
20608 enum machine_mode mode;
20609 int unsigned_p;
20610 char altivec_type
20611 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20612 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20613 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20614 : '?');
8bb418a3
ZL
20615
20616 while (POINTER_TYPE_P (type)
20617 || TREE_CODE (type) == FUNCTION_TYPE
20618 || TREE_CODE (type) == METHOD_TYPE
20619 || TREE_CODE (type) == ARRAY_TYPE)
20620 type = TREE_TYPE (type);
20621
20622 mode = TYPE_MODE (type);
20623
f90ac3f0
UP
20624 /* Check for invalid AltiVec type qualifiers. */
20625 if (type == long_unsigned_type_node || type == long_integer_type_node)
20626 {
20627 if (TARGET_64BIT)
20628 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20629 else if (rs6000_warn_altivec_long)
d4ee4d25 20630 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20631 }
20632 else if (type == long_long_unsigned_type_node
20633 || type == long_long_integer_type_node)
20634 error ("use of %<long long%> in AltiVec types is invalid");
20635 else if (type == double_type_node)
20636 error ("use of %<double%> in AltiVec types is invalid");
20637 else if (type == long_double_type_node)
20638 error ("use of %<long double%> in AltiVec types is invalid");
20639 else if (type == boolean_type_node)
20640 error ("use of boolean types in AltiVec types is invalid");
20641 else if (TREE_CODE (type) == COMPLEX_TYPE)
20642 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20643 else if (DECIMAL_FLOAT_MODE_P (mode))
20644 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20645
20646 switch (altivec_type)
20647 {
20648 case 'v':
8df83eae 20649 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20650 switch (mode)
20651 {
c4ad648e
AM
20652 case SImode:
20653 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20654 break;
20655 case HImode:
20656 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20657 break;
20658 case QImode:
20659 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20660 break;
20661 case SFmode: result = V4SF_type_node; break;
20662 /* If the user says 'vector int bool', we may be handed the 'bool'
20663 attribute _before_ the 'vector' attribute, and so select the
20664 proper type in the 'b' case below. */
20665 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20666 result = type;
20667 default: break;
8bb418a3
ZL
20668 }
20669 break;
20670 case 'b':
20671 switch (mode)
20672 {
c4ad648e
AM
20673 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20674 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20675 case QImode: case V16QImode: result = bool_V16QI_type_node;
20676 default: break;
8bb418a3
ZL
20677 }
20678 break;
20679 case 'p':
20680 switch (mode)
20681 {
c4ad648e
AM
20682 case V8HImode: result = pixel_V8HI_type_node;
20683 default: break;
8bb418a3
ZL
20684 }
20685 default: break;
20686 }
20687
4f538d42
UW
20688 /* Propagate qualifiers attached to the element type
20689 onto the vector type. */
20690 if (result && result != type && TYPE_QUALS (type))
20691 result = build_qualified_type (result, TYPE_QUALS (type));
7958a2a6 20692
8bb418a3
ZL
20693 *no_add_attrs = true; /* No need to hang on to the attribute. */
20694
f90ac3f0 20695 if (result)
5dc11954 20696 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20697
20698 return NULL_TREE;
20699}
20700
f18eca82
ZL
20701/* AltiVec defines four built-in scalar types that serve as vector
20702 elements; we must teach the compiler how to mangle them. */
20703
20704static const char *
3101faab 20705rs6000_mangle_type (const_tree type)
f18eca82 20706{
608063c3
JB
20707 type = TYPE_MAIN_VARIANT (type);
20708
20709 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20710 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20711 return NULL;
20712
f18eca82
ZL
20713 if (type == bool_char_type_node) return "U6__boolc";
20714 if (type == bool_short_type_node) return "U6__bools";
20715 if (type == pixel_type_node) return "u7__pixel";
20716 if (type == bool_int_type_node) return "U6__booli";
20717
337bde91
DE
20718 /* Mangle IBM extended float long double as `g' (__float128) on
20719 powerpc*-linux where long-double-64 previously was the default. */
20720 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20721 && TARGET_ELF
20722 && TARGET_LONG_DOUBLE_128
20723 && !TARGET_IEEEQUAD)
20724 return "g";
20725
f18eca82
ZL
20726 /* For all other types, use normal C++ mangling. */
20727 return NULL;
20728}
20729
a5c76ee6
ZW
20730/* Handle a "longcall" or "shortcall" attribute; arguments as in
20731 struct attribute_spec.handler. */
a4f6c312 20732
91d231cb 20733static tree
f676971a
EC
20734rs6000_handle_longcall_attribute (tree *node, tree name,
20735 tree args ATTRIBUTE_UNUSED,
20736 int flags ATTRIBUTE_UNUSED,
a2369ed3 20737 bool *no_add_attrs)
91d231cb
JM
20738{
20739 if (TREE_CODE (*node) != FUNCTION_TYPE
20740 && TREE_CODE (*node) != FIELD_DECL
20741 && TREE_CODE (*node) != TYPE_DECL)
20742 {
29d08eba
JM
20743 warning (OPT_Wattributes, "%qE attribute only applies to functions",
20744 name);
91d231cb
JM
20745 *no_add_attrs = true;
20746 }
6a4cee5f 20747
91d231cb 20748 return NULL_TREE;
7509c759
MM
20749}
20750
a5c76ee6
ZW
20751/* Set longcall attributes on all functions declared when
20752 rs6000_default_long_calls is true. */
20753static void
a2369ed3 20754rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20755{
20756 if (rs6000_default_long_calls
20757 && (TREE_CODE (type) == FUNCTION_TYPE
20758 || TREE_CODE (type) == METHOD_TYPE))
20759 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20760 NULL_TREE,
20761 TYPE_ATTRIBUTES (type));
16d6f994
EC
20762
20763#if TARGET_MACHO
20764 darwin_set_default_type_attributes (type);
20765#endif
a5c76ee6
ZW
20766}
20767
3cb999d8
DE
20768/* Return a reference suitable for calling a function with the
20769 longcall attribute. */
a4f6c312 20770
9390387d 20771rtx
a2369ed3 20772rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20773{
d330fd93 20774 const char *call_name;
6a4cee5f
MM
20775 tree node;
20776
20777 if (GET_CODE (call_ref) != SYMBOL_REF)
20778 return call_ref;
20779
20780 /* System V adds '.' to the internal name, so skip them. */
20781 call_name = XSTR (call_ref, 0);
20782 if (*call_name == '.')
20783 {
20784 while (*call_name == '.')
20785 call_name++;
20786
20787 node = get_identifier (call_name);
39403d82 20788 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20789 }
20790
20791 return force_reg (Pmode, call_ref);
20792}
7509c759 20793\f
77ccdfed
EC
20794#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20795#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20796#endif
20797
20798/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20799 struct attribute_spec.handler. */
20800static tree
20801rs6000_handle_struct_attribute (tree *node, tree name,
20802 tree args ATTRIBUTE_UNUSED,
20803 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20804{
20805 tree *type = NULL;
20806 if (DECL_P (*node))
20807 {
20808 if (TREE_CODE (*node) == TYPE_DECL)
20809 type = &TREE_TYPE (*node);
20810 }
20811 else
20812 type = node;
20813
20814 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20815 || TREE_CODE (*type) == UNION_TYPE)))
20816 {
29d08eba 20817 warning (OPT_Wattributes, "%qE attribute ignored", name);
77ccdfed
EC
20818 *no_add_attrs = true;
20819 }
20820
20821 else if ((is_attribute_p ("ms_struct", name)
20822 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20823 || ((is_attribute_p ("gcc_struct", name)
20824 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20825 {
29d08eba
JM
20826 warning (OPT_Wattributes, "%qE incompatible attribute ignored",
20827 name);
77ccdfed
EC
20828 *no_add_attrs = true;
20829 }
20830
20831 return NULL_TREE;
20832}
20833
20834static bool
3101faab 20835rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20836{
20837 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20838 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20839 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20840}
20841\f
b64a1b53
RH
20842#ifdef USING_ELFOS_H
20843
d6b5193b 20844/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20845
d6b5193b
RS
20846static void
20847rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20848{
20849 if (DEFAULT_ABI == ABI_AIX
20850 && TARGET_MINIMAL_TOC
20851 && !TARGET_RELOCATABLE)
20852 {
20853 if (!toc_initialized)
20854 {
20855 toc_initialized = 1;
20856 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20857 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20858 fprintf (asm_out_file, "\t.tc ");
20859 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20860 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20861 fprintf (asm_out_file, "\n");
20862
20863 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20864 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20865 fprintf (asm_out_file, " = .+32768\n");
20866 }
20867 else
20868 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20869 }
20870 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20871 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20872 else
20873 {
20874 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20875 if (!toc_initialized)
20876 {
20877 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20878 fprintf (asm_out_file, " = .+32768\n");
20879 toc_initialized = 1;
20880 }
20881 }
20882}
20883
20884/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20885
b64a1b53 20886static void
d6b5193b
RS
20887rs6000_elf_asm_init_sections (void)
20888{
20889 toc_section
20890 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20891
20892 sdata2_section
20893 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20894 SDATA2_SECTION_ASM_OP);
20895}
20896
20897/* Implement TARGET_SELECT_RTX_SECTION. */
20898
20899static section *
f676971a 20900rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20901 unsigned HOST_WIDE_INT align)
7509c759 20902{
a9098fd0 20903 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20904 return toc_section;
7509c759 20905 else
d6b5193b 20906 return default_elf_select_rtx_section (mode, x, align);
7509c759 20907}
d9407988 20908\f
d1908feb
JJ
20909/* For a SYMBOL_REF, set generic flags and then perform some
20910 target-specific processing.
20911
d1908feb
JJ
20912 When the AIX ABI is requested on a non-AIX system, replace the
20913 function name with the real name (with a leading .) rather than the
20914 function descriptor name. This saves a lot of overriding code to
20915 read the prefixes. */
d9407988 20916
fb49053f 20917static void
a2369ed3 20918rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20919{
d1908feb 20920 default_encode_section_info (decl, rtl, first);
b2003250 20921
d1908feb
JJ
20922 if (first
20923 && TREE_CODE (decl) == FUNCTION_DECL
20924 && !TARGET_AIX
20925 && DEFAULT_ABI == ABI_AIX)
d9407988 20926 {
c6a2438a 20927 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20928 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20929 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20930 str[0] = '.';
20931 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20932 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20933 }
d9407988
MM
20934}
20935
21d9bb3f 20936static inline bool
0a2aaacc 20937compare_section_name (const char *section, const char *templ)
21d9bb3f
PB
20938{
20939 int len;
20940
0a2aaacc
KG
20941 len = strlen (templ);
20942 return (strncmp (section, templ, len) == 0
21d9bb3f
PB
20943 && (section[len] == 0 || section[len] == '.'));
20944}
20945
c1b7d95a 20946bool
3101faab 20947rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20948{
20949 if (rs6000_sdata == SDATA_NONE)
20950 return false;
20951
7482ad25
AF
20952 /* We want to merge strings, so we never consider them small data. */
20953 if (TREE_CODE (decl) == STRING_CST)
20954 return false;
20955
20956 /* Functions are never in the small data area. */
20957 if (TREE_CODE (decl) == FUNCTION_DECL)
20958 return false;
20959
0e5dbd9b
DE
20960 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20961 {
20962 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20963 if (compare_section_name (section, ".sdata")
20964 || compare_section_name (section, ".sdata2")
20965 || compare_section_name (section, ".gnu.linkonce.s")
20966 || compare_section_name (section, ".sbss")
20967 || compare_section_name (section, ".sbss2")
20968 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20969 || strcmp (section, ".PPC.EMB.sdata0") == 0
20970 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20971 return true;
20972 }
20973 else
20974 {
20975 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20976
20977 if (size > 0
307b599c 20978 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20979 /* If it's not public, and we're not going to reference it there,
20980 there's no need to put it in the small data section. */
0e5dbd9b
DE
20981 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20982 return true;
20983 }
20984
20985 return false;
20986}
20987
b91da81f 20988#endif /* USING_ELFOS_H */
aacd3885
RS
20989\f
20990/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20991
aacd3885 20992static bool
3101faab 20993rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20994{
20995 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20996}
a6c2a102 20997\f
000034eb 20998/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20999 ADDR can be effectively incremented by incrementing REG.
21000
21001 r0 is special and we must not select it as an address
21002 register by this routine since our caller will try to
21003 increment the returned register via an "la" instruction. */
000034eb 21004
9390387d 21005rtx
a2369ed3 21006find_addr_reg (rtx addr)
000034eb
DE
21007{
21008 while (GET_CODE (addr) == PLUS)
21009 {
02441cd6
JL
21010 if (GET_CODE (XEXP (addr, 0)) == REG
21011 && REGNO (XEXP (addr, 0)) != 0)
000034eb 21012 addr = XEXP (addr, 0);
02441cd6
JL
21013 else if (GET_CODE (XEXP (addr, 1)) == REG
21014 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
21015 addr = XEXP (addr, 1);
21016 else if (CONSTANT_P (XEXP (addr, 0)))
21017 addr = XEXP (addr, 1);
21018 else if (CONSTANT_P (XEXP (addr, 1)))
21019 addr = XEXP (addr, 0);
21020 else
37409796 21021 gcc_unreachable ();
000034eb 21022 }
37409796
NS
21023 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
21024 return addr;
000034eb
DE
21025}
21026
a6c2a102 21027void
a2369ed3 21028rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
21029{
21030 fatal_insn ("bad address", op);
21031}
c8023011 21032
ee890fe2
SS
21033#if TARGET_MACHO
21034
efdba735 21035static tree branch_island_list = 0;
ee890fe2 21036
efdba735
SH
21037/* Remember to generate a branch island for far calls to the given
21038 function. */
ee890fe2 21039
f676971a 21040static void
c4ad648e
AM
21041add_compiler_branch_island (tree label_name, tree function_name,
21042 int line_number)
ee890fe2 21043{
efdba735 21044 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 21045 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
21046 TREE_CHAIN (branch_island) = branch_island_list;
21047 branch_island_list = branch_island;
ee890fe2
SS
21048}
21049
efdba735
SH
21050#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
21051#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
21052#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
21053 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 21054
efdba735
SH
21055/* Generate far-jump branch islands for everything on the
21056 branch_island_list. Invoked immediately after the last instruction
21057 of the epilogue has been emitted; the branch-islands must be
21058 appended to, and contiguous with, the function body. Mach-O stubs
21059 are generated in machopic_output_stub(). */
ee890fe2 21060
efdba735
SH
21061static void
21062macho_branch_islands (void)
21063{
21064 char tmp_buf[512];
21065 tree branch_island;
21066
21067 for (branch_island = branch_island_list;
21068 branch_island;
21069 branch_island = TREE_CHAIN (branch_island))
21070 {
21071 const char *label =
21072 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
21073 const char *name =
11abc112 21074 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
21075 char name_buf[512];
21076 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
21077 if (name[0] == '*' || name[0] == '&')
21078 strcpy (name_buf, name+1);
21079 else
21080 {
21081 name_buf[0] = '_';
21082 strcpy (name_buf+1, name);
21083 }
21084 strcpy (tmp_buf, "\n");
21085 strcat (tmp_buf, label);
ee890fe2 21086#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21087 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21088 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21089#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
21090 if (flag_pic)
21091 {
21092 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
21093 strcat (tmp_buf, label);
21094 strcat (tmp_buf, "_pic\n");
21095 strcat (tmp_buf, label);
21096 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 21097
efdba735
SH
21098 strcat (tmp_buf, "\taddis r11,r11,ha16(");
21099 strcat (tmp_buf, name_buf);
21100 strcat (tmp_buf, " - ");
21101 strcat (tmp_buf, label);
21102 strcat (tmp_buf, "_pic)\n");
f676971a 21103
efdba735 21104 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 21105
efdba735
SH
21106 strcat (tmp_buf, "\taddi r12,r11,lo16(");
21107 strcat (tmp_buf, name_buf);
21108 strcat (tmp_buf, " - ");
21109 strcat (tmp_buf, label);
21110 strcat (tmp_buf, "_pic)\n");
f676971a 21111
efdba735
SH
21112 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
21113 }
21114 else
21115 {
21116 strcat (tmp_buf, ":\nlis r12,hi16(");
21117 strcat (tmp_buf, name_buf);
21118 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
21119 strcat (tmp_buf, name_buf);
21120 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
21121 }
21122 output_asm_insn (tmp_buf, 0);
ee890fe2 21123#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21124 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21125 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21126#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 21127 }
ee890fe2 21128
efdba735 21129 branch_island_list = 0;
ee890fe2
SS
21130}
21131
21132/* NO_PREVIOUS_DEF checks in the link list whether the function name is
21133 already there or not. */
21134
efdba735 21135static int
a2369ed3 21136no_previous_def (tree function_name)
ee890fe2 21137{
efdba735
SH
21138 tree branch_island;
21139 for (branch_island = branch_island_list;
21140 branch_island;
21141 branch_island = TREE_CHAIN (branch_island))
21142 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
21143 return 0;
21144 return 1;
21145}
21146
21147/* GET_PREV_LABEL gets the label name from the previous definition of
21148 the function. */
21149
efdba735 21150static tree
a2369ed3 21151get_prev_label (tree function_name)
ee890fe2 21152{
efdba735
SH
21153 tree branch_island;
21154 for (branch_island = branch_island_list;
21155 branch_island;
21156 branch_island = TREE_CHAIN (branch_island))
21157 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21158 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
21159 return 0;
21160}
21161
75b1b789
MS
21162#ifndef DARWIN_LINKER_GENERATES_ISLANDS
21163#define DARWIN_LINKER_GENERATES_ISLANDS 0
21164#endif
21165
21166/* KEXTs still need branch islands. */
21167#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
21168 || flag_mkernel || flag_apple_kext)
21169
ee890fe2 21170/* INSN is either a function call or a millicode call. It may have an
f676971a 21171 unconditional jump in its delay slot.
ee890fe2
SS
21172
21173 CALL_DEST is the routine we are calling. */
21174
21175char *
c4ad648e
AM
21176output_call (rtx insn, rtx *operands, int dest_operand_number,
21177 int cookie_operand_number)
ee890fe2
SS
21178{
21179 static char buf[256];
75b1b789
MS
21180 if (DARWIN_GENERATE_ISLANDS
21181 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 21182 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
21183 {
21184 tree labelname;
efdba735 21185 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 21186
ee890fe2
SS
21187 if (no_previous_def (funname))
21188 {
ee890fe2
SS
21189 rtx label_rtx = gen_label_rtx ();
21190 char *label_buf, temp_buf[256];
21191 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
21192 CODE_LABEL_NUMBER (label_rtx));
21193 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
21194 labelname = get_identifier (label_buf);
a38e7aa5 21195 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
21196 }
21197 else
21198 labelname = get_prev_label (funname);
21199
efdba735
SH
21200 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
21201 instruction will reach 'foo', otherwise link as 'bl L42'".
21202 "L42" should be a 'branch island', that will do a far jump to
21203 'foo'. Branch islands are generated in
21204 macho_branch_islands(). */
ee890fe2 21205 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 21206 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
21207 }
21208 else
efdba735
SH
21209 sprintf (buf, "bl %%z%d", dest_operand_number);
21210 return buf;
ee890fe2
SS
21211}
21212
ee890fe2
SS
21213/* Generate PIC and indirect symbol stubs. */
21214
21215void
a2369ed3 21216machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
21217{
21218 unsigned int length;
a4f6c312
SS
21219 char *symbol_name, *lazy_ptr_name;
21220 char *local_label_0;
ee890fe2
SS
21221 static int label = 0;
21222
df56a27f 21223 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 21224 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 21225
ee890fe2 21226
ee890fe2 21227 length = strlen (symb);
5ead67f6 21228 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21229 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
21230
5ead67f6 21231 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21232 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
21233
ee890fe2 21234 if (flag_pic == 2)
56c779bc 21235 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 21236 else
56c779bc 21237 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
21238
21239 if (flag_pic == 2)
21240 {
d974312d
DJ
21241 fprintf (file, "\t.align 5\n");
21242
21243 fprintf (file, "%s:\n", stub);
21244 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21245
876455fa 21246 label++;
5ead67f6 21247 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 21248 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 21249
ee890fe2
SS
21250 fprintf (file, "\tmflr r0\n");
21251 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
21252 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
21253 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
21254 lazy_ptr_name, local_label_0);
21255 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
21256 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
21257 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
21258 lazy_ptr_name, local_label_0);
21259 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
21260 fprintf (file, "\tbctr\n");
21261 }
21262 else
d974312d
DJ
21263 {
21264 fprintf (file, "\t.align 4\n");
21265
21266 fprintf (file, "%s:\n", stub);
21267 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21268
21269 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
21270 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21271 (TARGET_64BIT ? "ldu" : "lwzu"),
21272 lazy_ptr_name);
d974312d
DJ
21273 fprintf (file, "\tmtctr r12\n");
21274 fprintf (file, "\tbctr\n");
21275 }
f676971a 21276
56c779bc 21277 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
21278 fprintf (file, "%s:\n", lazy_ptr_name);
21279 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
21280 fprintf (file, "%sdyld_stub_binding_helper\n",
21281 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
21282}
21283
21284/* Legitimize PIC addresses. If the address is already
21285 position-independent, we return ORIG. Newly generated
21286 position-independent addresses go into a reg. This is REG if non
21287 zero, otherwise we allocate register(s) as necessary. */
21288
4fbbe694 21289#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
21290
21291rtx
f676971a 21292rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 21293 rtx reg)
ee890fe2
SS
21294{
21295 rtx base, offset;
21296
21297 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21298 reg = gen_reg_rtx (Pmode);
21299
21300 if (GET_CODE (orig) == CONST)
21301 {
37409796
NS
21302 rtx reg_temp;
21303
ee890fe2
SS
21304 if (GET_CODE (XEXP (orig, 0)) == PLUS
21305 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21306 return orig;
21307
37409796 21308 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 21309
37409796
NS
21310 /* Use a different reg for the intermediate value, as
21311 it will be marked UNCHANGING. */
b3a13419 21312 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
21313 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21314 Pmode, reg_temp);
21315 offset =
21316 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21317 Pmode, reg);
bb8df8a6 21318
ee890fe2
SS
21319 if (GET_CODE (offset) == CONST_INT)
21320 {
21321 if (SMALL_INT (offset))
ed8908e7 21322 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
21323 else if (! reload_in_progress && ! reload_completed)
21324 offset = force_reg (Pmode, offset);
21325 else
c859cda6
DJ
21326 {
21327 rtx mem = force_const_mem (Pmode, orig);
21328 return machopic_legitimize_pic_address (mem, Pmode, reg);
21329 }
ee890fe2 21330 }
f1c25d3b 21331 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
21332 }
21333
21334 /* Fall back on generic machopic code. */
21335 return machopic_legitimize_pic_address (orig, mode, reg);
21336}
21337
c4e18b1c
GK
21338/* Output a .machine directive for the Darwin assembler, and call
21339 the generic start_file routine. */
21340
21341static void
21342rs6000_darwin_file_start (void)
21343{
94ff898d 21344 static const struct
c4e18b1c
GK
21345 {
21346 const char *arg;
21347 const char *name;
21348 int if_set;
21349 } mapping[] = {
55dbfb48 21350 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
21351 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21352 { "power4", "ppc970", 0 },
21353 { "G5", "ppc970", 0 },
21354 { "7450", "ppc7450", 0 },
21355 { "7400", "ppc7400", MASK_ALTIVEC },
21356 { "G4", "ppc7400", 0 },
21357 { "750", "ppc750", 0 },
21358 { "740", "ppc750", 0 },
21359 { "G3", "ppc750", 0 },
21360 { "604e", "ppc604e", 0 },
21361 { "604", "ppc604", 0 },
21362 { "603e", "ppc603", 0 },
21363 { "603", "ppc603", 0 },
21364 { "601", "ppc601", 0 },
21365 { NULL, "ppc", 0 } };
21366 const char *cpu_id = "";
21367 size_t i;
94ff898d 21368
9390387d 21369 rs6000_file_start ();
192d0f89 21370 darwin_file_start ();
c4e18b1c
GK
21371
21372 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21373 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21374 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21375 && rs6000_select[i].string[0] != '\0')
21376 cpu_id = rs6000_select[i].string;
21377
21378 /* Look through the mapping array. Pick the first name that either
21379 matches the argument, has a bit set in IF_SET that is also set
21380 in the target flags, or has a NULL name. */
21381
21382 i = 0;
21383 while (mapping[i].arg != NULL
21384 && strcmp (mapping[i].arg, cpu_id) != 0
21385 && (mapping[i].if_set & target_flags) == 0)
21386 i++;
21387
21388 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21389}
21390
ee890fe2 21391#endif /* TARGET_MACHO */
7c262518
RH
21392
21393#if TARGET_ELF
9b580a0b
RH
21394static int
21395rs6000_elf_reloc_rw_mask (void)
7c262518 21396{
9b580a0b
RH
21397 if (flag_pic)
21398 return 3;
21399 else if (DEFAULT_ABI == ABI_AIX)
21400 return 2;
21401 else
21402 return 0;
7c262518 21403}
d9f6800d
RH
21404
21405/* Record an element in the table of global constructors. SYMBOL is
21406 a SYMBOL_REF of the function to be called; PRIORITY is a number
21407 between 0 and MAX_INIT_PRIORITY.
21408
21409 This differs from default_named_section_asm_out_constructor in
21410 that we have special handling for -mrelocatable. */
21411
21412static void
a2369ed3 21413rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
21414{
21415 const char *section = ".ctors";
21416 char buf[16];
21417
21418 if (priority != DEFAULT_INIT_PRIORITY)
21419 {
21420 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21421 /* Invert the numbering so the linker puts us in the proper
21422 order; constructors are run from right to left, and the
21423 linker sorts in increasing order. */
21424 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21425 section = buf;
21426 }
21427
d6b5193b 21428 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21429 assemble_align (POINTER_SIZE);
d9f6800d
RH
21430
21431 if (TARGET_RELOCATABLE)
21432 {
21433 fputs ("\t.long (", asm_out_file);
21434 output_addr_const (asm_out_file, symbol);
21435 fputs (")@fixup\n", asm_out_file);
21436 }
21437 else
c8af3574 21438 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21439}
21440
21441static void
a2369ed3 21442rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21443{
21444 const char *section = ".dtors";
21445 char buf[16];
21446
21447 if (priority != DEFAULT_INIT_PRIORITY)
21448 {
21449 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21450 /* Invert the numbering so the linker puts us in the proper
21451 order; constructors are run from right to left, and the
21452 linker sorts in increasing order. */
21453 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21454 section = buf;
21455 }
21456
d6b5193b 21457 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21458 assemble_align (POINTER_SIZE);
d9f6800d
RH
21459
21460 if (TARGET_RELOCATABLE)
21461 {
21462 fputs ("\t.long (", asm_out_file);
21463 output_addr_const (asm_out_file, symbol);
21464 fputs (")@fixup\n", asm_out_file);
21465 }
21466 else
c8af3574 21467 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21468}
9739c90c
JJ
21469
21470void
a2369ed3 21471rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21472{
21473 if (TARGET_64BIT)
21474 {
21475 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21476 ASM_OUTPUT_LABEL (file, name);
21477 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21478 rs6000_output_function_entry (file, name);
21479 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21480 if (DOT_SYMBOLS)
9739c90c 21481 {
85b776df 21482 fputs ("\t.size\t", file);
9739c90c 21483 assemble_name (file, name);
85b776df
AM
21484 fputs (",24\n\t.type\t.", file);
21485 assemble_name (file, name);
21486 fputs (",@function\n", file);
21487 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21488 {
21489 fputs ("\t.globl\t.", file);
21490 assemble_name (file, name);
21491 putc ('\n', file);
21492 }
9739c90c 21493 }
85b776df
AM
21494 else
21495 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21496 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21497 rs6000_output_function_entry (file, name);
21498 fputs (":\n", file);
9739c90c
JJ
21499 return;
21500 }
21501
21502 if (TARGET_RELOCATABLE
7f970b70 21503 && !TARGET_SECURE_PLT
e3b5732b 21504 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21505 && uses_TOC ())
9739c90c
JJ
21506 {
21507 char buf[256];
21508
21509 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21510
21511 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21512 fprintf (file, "\t.long ");
21513 assemble_name (file, buf);
21514 putc ('-', file);
21515 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21516 assemble_name (file, buf);
21517 putc ('\n', file);
21518 }
21519
21520 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21521 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21522
21523 if (DEFAULT_ABI == ABI_AIX)
21524 {
21525 const char *desc_name, *orig_name;
21526
21527 orig_name = (*targetm.strip_name_encoding) (name);
21528 desc_name = orig_name;
21529 while (*desc_name == '.')
21530 desc_name++;
21531
21532 if (TREE_PUBLIC (decl))
21533 fprintf (file, "\t.globl %s\n", desc_name);
21534
21535 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21536 fprintf (file, "%s:\n", desc_name);
21537 fprintf (file, "\t.long %s\n", orig_name);
21538 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21539 if (DEFAULT_ABI == ABI_AIX)
21540 fputs ("\t.long 0\n", file);
21541 fprintf (file, "\t.previous\n");
21542 }
21543 ASM_OUTPUT_LABEL (file, name);
21544}
1334b570
AM
21545
21546static void
21547rs6000_elf_end_indicate_exec_stack (void)
21548{
21549 if (TARGET_32BIT)
21550 file_end_indicate_exec_stack ();
21551}
7c262518
RH
21552#endif
21553
cbaaba19 21554#if TARGET_XCOFF
0d5817b2
DE
21555static void
21556rs6000_xcoff_asm_output_anchor (rtx symbol)
21557{
21558 char buffer[100];
21559
21560 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21561 SYMBOL_REF_BLOCK_OFFSET (symbol));
21562 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21563}
21564
7c262518 21565static void
a2369ed3 21566rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21567{
21568 fputs (GLOBAL_ASM_OP, stream);
21569 RS6000_OUTPUT_BASENAME (stream, name);
21570 putc ('\n', stream);
21571}
21572
d6b5193b
RS
21573/* A get_unnamed_decl callback, used for read-only sections. PTR
21574 points to the section string variable. */
21575
21576static void
21577rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21578{
890f9edf
OH
21579 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21580 *(const char *const *) directive,
21581 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21582}
21583
21584/* Likewise for read-write sections. */
21585
21586static void
21587rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21588{
890f9edf
OH
21589 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21590 *(const char *const *) directive,
21591 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21592}
21593
21594/* A get_unnamed_section callback, used for switching to toc_section. */
21595
21596static void
21597rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21598{
21599 if (TARGET_MINIMAL_TOC)
21600 {
21601 /* toc_section is always selected at least once from
21602 rs6000_xcoff_file_start, so this is guaranteed to
21603 always be defined once and only once in each file. */
21604 if (!toc_initialized)
21605 {
21606 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21607 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21608 toc_initialized = 1;
21609 }
21610 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21611 (TARGET_32BIT ? "" : ",3"));
21612 }
21613 else
21614 fputs ("\t.toc\n", asm_out_file);
21615}
21616
21617/* Implement TARGET_ASM_INIT_SECTIONS. */
21618
21619static void
21620rs6000_xcoff_asm_init_sections (void)
21621{
21622 read_only_data_section
21623 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21624 &xcoff_read_only_section_name);
21625
21626 private_data_section
21627 = get_unnamed_section (SECTION_WRITE,
21628 rs6000_xcoff_output_readwrite_section_asm_op,
21629 &xcoff_private_data_section_name);
21630
21631 read_only_private_data_section
21632 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21633 &xcoff_private_data_section_name);
21634
21635 toc_section
21636 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21637
21638 readonly_data_section = read_only_data_section;
21639 exception_section = data_section;
21640}
21641
9b580a0b
RH
21642static int
21643rs6000_xcoff_reloc_rw_mask (void)
21644{
21645 return 3;
21646}
21647
b275d088 21648static void
c18a5b6c
MM
21649rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21650 tree decl ATTRIBUTE_UNUSED)
7c262518 21651{
0e5dbd9b
DE
21652 int smclass;
21653 static const char * const suffix[3] = { "PR", "RO", "RW" };
21654
21655 if (flags & SECTION_CODE)
21656 smclass = 0;
21657 else if (flags & SECTION_WRITE)
21658 smclass = 2;
21659 else
21660 smclass = 1;
21661
5b5198f7 21662 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21663 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21664 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21665}
ae46c4e0 21666
d6b5193b 21667static section *
f676971a 21668rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21669 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21670{
9b580a0b 21671 if (decl_readonly_section (decl, reloc))
ae46c4e0 21672 {
0e5dbd9b 21673 if (TREE_PUBLIC (decl))
d6b5193b 21674 return read_only_data_section;
ae46c4e0 21675 else
d6b5193b 21676 return read_only_private_data_section;
ae46c4e0
RH
21677 }
21678 else
21679 {
0e5dbd9b 21680 if (TREE_PUBLIC (decl))
d6b5193b 21681 return data_section;
ae46c4e0 21682 else
d6b5193b 21683 return private_data_section;
ae46c4e0
RH
21684 }
21685}
21686
21687static void
a2369ed3 21688rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21689{
21690 const char *name;
ae46c4e0 21691
5b5198f7
DE
21692 /* Use select_section for private and uninitialized data. */
21693 if (!TREE_PUBLIC (decl)
21694 || DECL_COMMON (decl)
0e5dbd9b
DE
21695 || DECL_INITIAL (decl) == NULL_TREE
21696 || DECL_INITIAL (decl) == error_mark_node
21697 || (flag_zero_initialized_in_bss
21698 && initializer_zerop (DECL_INITIAL (decl))))
21699 return;
21700
21701 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21702 name = (*targetm.strip_name_encoding) (name);
21703 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21704}
b64a1b53 21705
fb49053f
RH
21706/* Select section for constant in constant pool.
21707
21708 On RS/6000, all constants are in the private read-only data area.
21709 However, if this is being placed in the TOC it must be output as a
21710 toc entry. */
21711
d6b5193b 21712static section *
f676971a 21713rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21714 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21715{
21716 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21717 return toc_section;
b64a1b53 21718 else
d6b5193b 21719 return read_only_private_data_section;
b64a1b53 21720}
772c5265
RH
21721
21722/* Remove any trailing [DS] or the like from the symbol name. */
21723
21724static const char *
a2369ed3 21725rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21726{
21727 size_t len;
21728 if (*name == '*')
21729 name++;
21730 len = strlen (name);
21731 if (name[len - 1] == ']')
21732 return ggc_alloc_string (name, len - 4);
21733 else
21734 return name;
21735}
21736
5add3202
DE
21737/* Section attributes. AIX is always PIC. */
21738
21739static unsigned int
a2369ed3 21740rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21741{
5b5198f7 21742 unsigned int align;
9b580a0b 21743 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21744
21745 /* Align to at least UNIT size. */
21746 if (flags & SECTION_CODE)
21747 align = MIN_UNITS_PER_WORD;
21748 else
21749 /* Increase alignment of large objects if not already stricter. */
21750 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21751 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21752 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21753
21754 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21755}
a5fe455b 21756
1bc7c5b6
ZW
21757/* Output at beginning of assembler file.
21758
21759 Initialize the section names for the RS/6000 at this point.
21760
21761 Specify filename, including full path, to assembler.
21762
21763 We want to go into the TOC section so at least one .toc will be emitted.
21764 Also, in order to output proper .bs/.es pairs, we need at least one static
21765 [RW] section emitted.
21766
21767 Finally, declare mcount when profiling to make the assembler happy. */
21768
21769static void
863d938c 21770rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21771{
21772 rs6000_gen_section_name (&xcoff_bss_section_name,
21773 main_input_filename, ".bss_");
21774 rs6000_gen_section_name (&xcoff_private_data_section_name,
21775 main_input_filename, ".rw_");
21776 rs6000_gen_section_name (&xcoff_read_only_section_name,
21777 main_input_filename, ".ro_");
21778
21779 fputs ("\t.file\t", asm_out_file);
21780 output_quoted_string (asm_out_file, main_input_filename);
21781 fputc ('\n', asm_out_file);
1bc7c5b6 21782 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21783 switch_to_section (private_data_section);
21784 switch_to_section (text_section);
1bc7c5b6
ZW
21785 if (profile_flag)
21786 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21787 rs6000_file_start ();
21788}
21789
a5fe455b
ZW
21790/* Output at end of assembler file.
21791 On the RS/6000, referencing data should automatically pull in text. */
21792
21793static void
863d938c 21794rs6000_xcoff_file_end (void)
a5fe455b 21795{
d6b5193b 21796 switch_to_section (text_section);
a5fe455b 21797 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21798 switch_to_section (data_section);
a5fe455b
ZW
21799 fputs (TARGET_32BIT
21800 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21801 asm_out_file);
21802}
f1384257 21803#endif /* TARGET_XCOFF */
0e5dbd9b 21804
3c50106f
RH
21805/* Compute a (partial) cost for rtx X. Return true if the complete
21806 cost has been computed, and false if subexpressions should be
21807 scanned. In either case, *TOTAL contains the cost result. */
21808
21809static bool
f40751dd
JH
21810rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
21811 bool speed)
3c50106f 21812{
f0517163
RS
21813 enum machine_mode mode = GET_MODE (x);
21814
3c50106f
RH
21815 switch (code)
21816 {
30a555d9 21817 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21818 case CONST_INT:
066cd967
DE
21819 if (((outer_code == SET
21820 || outer_code == PLUS
21821 || outer_code == MINUS)
279bb624
DE
21822 && (satisfies_constraint_I (x)
21823 || satisfies_constraint_L (x)))
066cd967 21824 || (outer_code == AND
279bb624
DE
21825 && (satisfies_constraint_K (x)
21826 || (mode == SImode
21827 ? satisfies_constraint_L (x)
21828 : satisfies_constraint_J (x))
1990cd79
AM
21829 || mask_operand (x, mode)
21830 || (mode == DImode
21831 && mask64_operand (x, DImode))))
22e54023 21832 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21833 && (satisfies_constraint_K (x)
21834 || (mode == SImode
21835 ? satisfies_constraint_L (x)
21836 : satisfies_constraint_J (x))))
066cd967
DE
21837 || outer_code == ASHIFT
21838 || outer_code == ASHIFTRT
21839 || outer_code == LSHIFTRT
21840 || outer_code == ROTATE
21841 || outer_code == ROTATERT
d5861a7a 21842 || outer_code == ZERO_EXTRACT
066cd967 21843 || (outer_code == MULT
279bb624 21844 && satisfies_constraint_I (x))
22e54023
DE
21845 || ((outer_code == DIV || outer_code == UDIV
21846 || outer_code == MOD || outer_code == UMOD)
21847 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21848 || (outer_code == COMPARE
279bb624
DE
21849 && (satisfies_constraint_I (x)
21850 || satisfies_constraint_K (x)))
22e54023 21851 || (outer_code == EQ
279bb624
DE
21852 && (satisfies_constraint_I (x)
21853 || satisfies_constraint_K (x)
21854 || (mode == SImode
21855 ? satisfies_constraint_L (x)
21856 : satisfies_constraint_J (x))))
22e54023 21857 || (outer_code == GTU
279bb624 21858 && satisfies_constraint_I (x))
22e54023 21859 || (outer_code == LTU
279bb624 21860 && satisfies_constraint_P (x)))
066cd967
DE
21861 {
21862 *total = 0;
21863 return true;
21864 }
21865 else if ((outer_code == PLUS
4ae234b0 21866 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21867 || (outer_code == MINUS
4ae234b0 21868 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21869 || ((outer_code == SET
21870 || outer_code == IOR
21871 || outer_code == XOR)
21872 && (INTVAL (x)
21873 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21874 {
21875 *total = COSTS_N_INSNS (1);
21876 return true;
21877 }
21878 /* FALLTHRU */
21879
21880 case CONST_DOUBLE:
f6fe3a22 21881 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21882 {
f6fe3a22
DE
21883 if ((outer_code == IOR || outer_code == XOR)
21884 && CONST_DOUBLE_HIGH (x) == 0
21885 && (CONST_DOUBLE_LOW (x)
21886 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21887 {
21888 *total = 0;
21889 return true;
21890 }
21891 else if ((outer_code == AND && and64_2_operand (x, DImode))
21892 || ((outer_code == SET
21893 || outer_code == IOR
21894 || outer_code == XOR)
21895 && CONST_DOUBLE_HIGH (x) == 0))
21896 {
21897 *total = COSTS_N_INSNS (1);
21898 return true;
21899 }
066cd967
DE
21900 }
21901 /* FALLTHRU */
21902
3c50106f 21903 case CONST:
066cd967 21904 case HIGH:
3c50106f 21905 case SYMBOL_REF:
066cd967
DE
21906 case MEM:
21907 /* When optimizing for size, MEM should be slightly more expensive
21908 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21909 L1 cache latency is about two instructions. */
f40751dd 21910 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21911 return true;
21912
30a555d9
DE
21913 case LABEL_REF:
21914 *total = 0;
21915 return true;
21916
3c50106f 21917 case PLUS:
f0517163 21918 if (mode == DFmode)
066cd967
DE
21919 {
21920 if (GET_CODE (XEXP (x, 0)) == MULT)
21921 {
21922 /* FNMA accounted in outer NEG. */
21923 if (outer_code == NEG)
21924 *total = rs6000_cost->dmul - rs6000_cost->fp;
21925 else
21926 *total = rs6000_cost->dmul;
21927 }
21928 else
21929 *total = rs6000_cost->fp;
21930 }
f0517163 21931 else if (mode == SFmode)
066cd967
DE
21932 {
21933 /* FNMA accounted in outer NEG. */
21934 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21935 *total = 0;
21936 else
21937 *total = rs6000_cost->fp;
21938 }
f0517163 21939 else
066cd967
DE
21940 *total = COSTS_N_INSNS (1);
21941 return false;
3c50106f 21942
52190329 21943 case MINUS:
f0517163 21944 if (mode == DFmode)
066cd967 21945 {
762c919f
JM
21946 if (GET_CODE (XEXP (x, 0)) == MULT
21947 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21948 {
21949 /* FNMA accounted in outer NEG. */
21950 if (outer_code == NEG)
762c919f 21951 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21952 else
21953 *total = rs6000_cost->dmul;
21954 }
21955 else
21956 *total = rs6000_cost->fp;
21957 }
f0517163 21958 else if (mode == SFmode)
066cd967
DE
21959 {
21960 /* FNMA accounted in outer NEG. */
21961 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21962 *total = 0;
21963 else
21964 *total = rs6000_cost->fp;
21965 }
f0517163 21966 else
c4ad648e 21967 *total = COSTS_N_INSNS (1);
066cd967 21968 return false;
3c50106f
RH
21969
21970 case MULT:
c9dbf840 21971 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21972 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21973 {
8b897cfa
RS
21974 if (INTVAL (XEXP (x, 1)) >= -256
21975 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21976 *total = rs6000_cost->mulsi_const9;
8b897cfa 21977 else
06a67bdd 21978 *total = rs6000_cost->mulsi_const;
3c50106f 21979 }
066cd967
DE
21980 /* FMA accounted in outer PLUS/MINUS. */
21981 else if ((mode == DFmode || mode == SFmode)
21982 && (outer_code == PLUS || outer_code == MINUS))
21983 *total = 0;
f0517163 21984 else if (mode == DFmode)
06a67bdd 21985 *total = rs6000_cost->dmul;
f0517163 21986 else if (mode == SFmode)
06a67bdd 21987 *total = rs6000_cost->fp;
f0517163 21988 else if (mode == DImode)
06a67bdd 21989 *total = rs6000_cost->muldi;
8b897cfa 21990 else
06a67bdd 21991 *total = rs6000_cost->mulsi;
066cd967 21992 return false;
3c50106f
RH
21993
21994 case DIV:
21995 case MOD:
f0517163
RS
21996 if (FLOAT_MODE_P (mode))
21997 {
06a67bdd
RS
21998 *total = mode == DFmode ? rs6000_cost->ddiv
21999 : rs6000_cost->sdiv;
066cd967 22000 return false;
f0517163 22001 }
5efb1046 22002 /* FALLTHRU */
3c50106f
RH
22003
22004 case UDIV:
22005 case UMOD:
627b6fe2
DJ
22006 if (GET_CODE (XEXP (x, 1)) == CONST_INT
22007 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
22008 {
22009 if (code == DIV || code == MOD)
22010 /* Shift, addze */
22011 *total = COSTS_N_INSNS (2);
22012 else
22013 /* Shift */
22014 *total = COSTS_N_INSNS (1);
22015 }
c4ad648e 22016 else
627b6fe2
DJ
22017 {
22018 if (GET_MODE (XEXP (x, 1)) == DImode)
22019 *total = rs6000_cost->divdi;
22020 else
22021 *total = rs6000_cost->divsi;
22022 }
22023 /* Add in shift and subtract for MOD. */
22024 if (code == MOD || code == UMOD)
22025 *total += COSTS_N_INSNS (2);
066cd967 22026 return false;
3c50106f 22027
32f56aad 22028 case CTZ:
3c50106f
RH
22029 case FFS:
22030 *total = COSTS_N_INSNS (4);
066cd967 22031 return false;
3c50106f 22032
32f56aad
DE
22033 case POPCOUNT:
22034 *total = COSTS_N_INSNS (6);
22035 return false;
22036
06a67bdd 22037 case NOT:
066cd967
DE
22038 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
22039 {
22040 *total = 0;
22041 return false;
22042 }
22043 /* FALLTHRU */
22044
22045 case AND:
32f56aad 22046 case CLZ:
066cd967
DE
22047 case IOR:
22048 case XOR:
d5861a7a
DE
22049 case ZERO_EXTRACT:
22050 *total = COSTS_N_INSNS (1);
22051 return false;
22052
066cd967
DE
22053 case ASHIFT:
22054 case ASHIFTRT:
22055 case LSHIFTRT:
22056 case ROTATE:
22057 case ROTATERT:
d5861a7a 22058 /* Handle mul_highpart. */
066cd967
DE
22059 if (outer_code == TRUNCATE
22060 && GET_CODE (XEXP (x, 0)) == MULT)
22061 {
22062 if (mode == DImode)
22063 *total = rs6000_cost->muldi;
22064 else
22065 *total = rs6000_cost->mulsi;
22066 return true;
22067 }
d5861a7a
DE
22068 else if (outer_code == AND)
22069 *total = 0;
22070 else
22071 *total = COSTS_N_INSNS (1);
22072 return false;
22073
22074 case SIGN_EXTEND:
22075 case ZERO_EXTEND:
22076 if (GET_CODE (XEXP (x, 0)) == MEM)
22077 *total = 0;
22078 else
22079 *total = COSTS_N_INSNS (1);
066cd967 22080 return false;
06a67bdd 22081
066cd967
DE
22082 case COMPARE:
22083 case NEG:
22084 case ABS:
22085 if (!FLOAT_MODE_P (mode))
22086 {
22087 *total = COSTS_N_INSNS (1);
22088 return false;
22089 }
22090 /* FALLTHRU */
22091
22092 case FLOAT:
22093 case UNSIGNED_FLOAT:
22094 case FIX:
22095 case UNSIGNED_FIX:
06a67bdd
RS
22096 case FLOAT_TRUNCATE:
22097 *total = rs6000_cost->fp;
066cd967 22098 return false;
06a67bdd 22099
a2af5043
DJ
22100 case FLOAT_EXTEND:
22101 if (mode == DFmode)
22102 *total = 0;
22103 else
22104 *total = rs6000_cost->fp;
22105 return false;
22106
06a67bdd
RS
22107 case UNSPEC:
22108 switch (XINT (x, 1))
22109 {
22110 case UNSPEC_FRSP:
22111 *total = rs6000_cost->fp;
22112 return true;
22113
22114 default:
22115 break;
22116 }
22117 break;
22118
22119 case CALL:
22120 case IF_THEN_ELSE:
f40751dd 22121 if (!speed)
06a67bdd
RS
22122 {
22123 *total = COSTS_N_INSNS (1);
22124 return true;
22125 }
066cd967
DE
22126 else if (FLOAT_MODE_P (mode)
22127 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
22128 {
22129 *total = rs6000_cost->fp;
22130 return false;
22131 }
06a67bdd
RS
22132 break;
22133
c0600ecd
DE
22134 case EQ:
22135 case GTU:
22136 case LTU:
22e54023
DE
22137 /* Carry bit requires mode == Pmode.
22138 NEG or PLUS already counted so only add one. */
22139 if (mode == Pmode
22140 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 22141 {
22e54023
DE
22142 *total = COSTS_N_INSNS (1);
22143 return true;
22144 }
22145 if (outer_code == SET)
22146 {
22147 if (XEXP (x, 1) == const0_rtx)
c0600ecd 22148 {
22e54023 22149 *total = COSTS_N_INSNS (2);
c0600ecd 22150 return true;
c0600ecd 22151 }
22e54023
DE
22152 else if (mode == Pmode)
22153 {
22154 *total = COSTS_N_INSNS (3);
22155 return false;
22156 }
22157 }
22158 /* FALLTHRU */
22159
22160 case GT:
22161 case LT:
22162 case UNORDERED:
22163 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
22164 {
22165 *total = COSTS_N_INSNS (2);
22166 return true;
c0600ecd 22167 }
22e54023
DE
22168 /* CC COMPARE. */
22169 if (outer_code == COMPARE)
22170 {
22171 *total = 0;
22172 return true;
22173 }
22174 break;
c0600ecd 22175
3c50106f 22176 default:
06a67bdd 22177 break;
3c50106f 22178 }
06a67bdd
RS
22179
22180 return false;
3c50106f
RH
22181}
22182
34bb030a
DE
22183/* A C expression returning the cost of moving data from a register of class
22184 CLASS1 to one of CLASS2. */
22185
22186int
f676971a 22187rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 22188 enum reg_class from, enum reg_class to)
34bb030a
DE
22189{
22190 /* Moves from/to GENERAL_REGS. */
22191 if (reg_classes_intersect_p (to, GENERAL_REGS)
22192 || reg_classes_intersect_p (from, GENERAL_REGS))
22193 {
22194 if (! reg_classes_intersect_p (to, GENERAL_REGS))
22195 from = to;
22196
22197 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
22198 return (rs6000_memory_move_cost (mode, from, 0)
22199 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
22200
c4ad648e
AM
22201 /* It's more expensive to move CR_REGS than CR0_REGS because of the
22202 shift. */
34bb030a
DE
22203 else if (from == CR_REGS)
22204 return 4;
22205
aafc759a
PH
22206 /* Power6 has slower LR/CTR moves so make them more expensive than
22207 memory in order to bias spills to memory .*/
22208 else if (rs6000_cpu == PROCESSOR_POWER6
22209 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
22210 return 6 * hard_regno_nregs[0][mode];
22211
34bb030a 22212 else
c4ad648e 22213 /* A move will cost one instruction per GPR moved. */
c8b622ff 22214 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
22215 }
22216
c4ad648e 22217 /* Moving between two similar registers is just one instruction. */
34bb030a 22218 else if (reg_classes_intersect_p (to, from))
7393f7f8 22219 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 22220
c4ad648e 22221 /* Everything else has to go through GENERAL_REGS. */
34bb030a 22222 else
f676971a 22223 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
22224 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
22225}
22226
22227/* A C expressions returning the cost of moving data of MODE from a register to
22228 or from memory. */
22229
22230int
0a2aaacc 22231rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
a2369ed3 22232 int in ATTRIBUTE_UNUSED)
34bb030a 22233{
0a2aaacc 22234 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
c8b622ff 22235 return 4 * hard_regno_nregs[0][mode];
0a2aaacc 22236 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
c8b622ff 22237 return 4 * hard_regno_nregs[32][mode];
0a2aaacc 22238 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
c8b622ff 22239 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a 22240 else
0a2aaacc 22241 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
34bb030a
DE
22242}
22243
9c78b944
DE
22244/* Returns a code for a target-specific builtin that implements
22245 reciprocal of the function, or NULL_TREE if not available. */
22246
22247static tree
22248rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
22249 bool sqrt ATTRIBUTE_UNUSED)
22250{
22251 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
22252 && flag_finite_math_only && !flag_trapping_math
22253 && flag_unsafe_math_optimizations))
22254 return NULL_TREE;
22255
22256 if (md_fn)
22257 return NULL_TREE;
22258 else
22259 switch (fn)
22260 {
22261 case BUILT_IN_SQRTF:
22262 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
22263
22264 default:
22265 return NULL_TREE;
22266 }
22267}
22268
ef765ea9
DE
22269/* Newton-Raphson approximation of single-precision floating point divide n/d.
22270 Assumes no trapping math and finite arguments. */
22271
22272void
9c78b944 22273rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22274{
22275 rtx x0, e0, e1, y1, u0, v0, one;
22276
22277 x0 = gen_reg_rtx (SFmode);
22278 e0 = gen_reg_rtx (SFmode);
22279 e1 = gen_reg_rtx (SFmode);
22280 y1 = gen_reg_rtx (SFmode);
22281 u0 = gen_reg_rtx (SFmode);
22282 v0 = gen_reg_rtx (SFmode);
22283 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22284
22285 /* x0 = 1./d estimate */
22286 emit_insn (gen_rtx_SET (VOIDmode, x0,
22287 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22288 UNSPEC_FRES)));
22289 /* e0 = 1. - d * x0 */
22290 emit_insn (gen_rtx_SET (VOIDmode, e0,
22291 gen_rtx_MINUS (SFmode, one,
22292 gen_rtx_MULT (SFmode, d, x0))));
22293 /* e1 = e0 + e0 * e0 */
22294 emit_insn (gen_rtx_SET (VOIDmode, e1,
22295 gen_rtx_PLUS (SFmode,
22296 gen_rtx_MULT (SFmode, e0, e0), e0)));
22297 /* y1 = x0 + e1 * x0 */
22298 emit_insn (gen_rtx_SET (VOIDmode, y1,
22299 gen_rtx_PLUS (SFmode,
22300 gen_rtx_MULT (SFmode, e1, x0), x0)));
22301 /* u0 = n * y1 */
22302 emit_insn (gen_rtx_SET (VOIDmode, u0,
22303 gen_rtx_MULT (SFmode, n, y1)));
22304 /* v0 = n - d * u0 */
22305 emit_insn (gen_rtx_SET (VOIDmode, v0,
22306 gen_rtx_MINUS (SFmode, n,
22307 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
22308 /* dst = u0 + v0 * y1 */
22309 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22310 gen_rtx_PLUS (SFmode,
22311 gen_rtx_MULT (SFmode, v0, y1), u0)));
22312}
22313
22314/* Newton-Raphson approximation of double-precision floating point divide n/d.
22315 Assumes no trapping math and finite arguments. */
22316
22317void
9c78b944 22318rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22319{
22320 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22321
22322 x0 = gen_reg_rtx (DFmode);
22323 e0 = gen_reg_rtx (DFmode);
22324 e1 = gen_reg_rtx (DFmode);
22325 e2 = gen_reg_rtx (DFmode);
22326 y1 = gen_reg_rtx (DFmode);
22327 y2 = gen_reg_rtx (DFmode);
22328 y3 = gen_reg_rtx (DFmode);
22329 u0 = gen_reg_rtx (DFmode);
22330 v0 = gen_reg_rtx (DFmode);
22331 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22332
22333 /* x0 = 1./d estimate */
22334 emit_insn (gen_rtx_SET (VOIDmode, x0,
22335 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22336 UNSPEC_FRES)));
22337 /* e0 = 1. - d * x0 */
22338 emit_insn (gen_rtx_SET (VOIDmode, e0,
22339 gen_rtx_MINUS (DFmode, one,
22340 gen_rtx_MULT (SFmode, d, x0))));
22341 /* y1 = x0 + e0 * x0 */
22342 emit_insn (gen_rtx_SET (VOIDmode, y1,
22343 gen_rtx_PLUS (DFmode,
22344 gen_rtx_MULT (DFmode, e0, x0), x0)));
22345 /* e1 = e0 * e0 */
22346 emit_insn (gen_rtx_SET (VOIDmode, e1,
22347 gen_rtx_MULT (DFmode, e0, e0)));
22348 /* y2 = y1 + e1 * y1 */
22349 emit_insn (gen_rtx_SET (VOIDmode, y2,
22350 gen_rtx_PLUS (DFmode,
22351 gen_rtx_MULT (DFmode, e1, y1), y1)));
22352 /* e2 = e1 * e1 */
22353 emit_insn (gen_rtx_SET (VOIDmode, e2,
22354 gen_rtx_MULT (DFmode, e1, e1)));
22355 /* y3 = y2 + e2 * y2 */
22356 emit_insn (gen_rtx_SET (VOIDmode, y3,
22357 gen_rtx_PLUS (DFmode,
22358 gen_rtx_MULT (DFmode, e2, y2), y2)));
22359 /* u0 = n * y3 */
22360 emit_insn (gen_rtx_SET (VOIDmode, u0,
22361 gen_rtx_MULT (DFmode, n, y3)));
22362 /* v0 = n - d * u0 */
22363 emit_insn (gen_rtx_SET (VOIDmode, v0,
22364 gen_rtx_MINUS (DFmode, n,
22365 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
22366 /* dst = u0 + v0 * y3 */
22367 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22368 gen_rtx_PLUS (DFmode,
22369 gen_rtx_MULT (DFmode, v0, y3), u0)));
22370}
22371
565ef4ba 22372
9c78b944
DE
22373/* Newton-Raphson approximation of single-precision floating point rsqrt.
22374 Assumes no trapping math and finite arguments. */
22375
22376void
22377rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22378{
22379 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22380 half, one, halfthree, c1, cond, label;
22381
22382 x0 = gen_reg_rtx (SFmode);
22383 x1 = gen_reg_rtx (SFmode);
22384 x2 = gen_reg_rtx (SFmode);
22385 y1 = gen_reg_rtx (SFmode);
22386 u0 = gen_reg_rtx (SFmode);
22387 u1 = gen_reg_rtx (SFmode);
22388 u2 = gen_reg_rtx (SFmode);
22389 v0 = gen_reg_rtx (SFmode);
22390 v1 = gen_reg_rtx (SFmode);
22391 v2 = gen_reg_rtx (SFmode);
22392 t0 = gen_reg_rtx (SFmode);
22393 halfthree = gen_reg_rtx (SFmode);
22394 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22395 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22396
22397 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22398 emit_insn (gen_rtx_SET (VOIDmode, t0,
22399 gen_rtx_MULT (SFmode, src, src)));
22400
22401 emit_insn (gen_rtx_SET (VOIDmode, cond,
22402 gen_rtx_COMPARE (CCFPmode, t0, src)));
22403 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22404 emit_unlikely_jump (c1, label);
22405
22406 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22407 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22408
22409 /* halfthree = 1.5 = 1.0 + 0.5 */
22410 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22411 gen_rtx_PLUS (SFmode, one, half)));
22412
22413 /* x0 = rsqrt estimate */
22414 emit_insn (gen_rtx_SET (VOIDmode, x0,
22415 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22416 UNSPEC_RSQRT)));
22417
22418 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22419 emit_insn (gen_rtx_SET (VOIDmode, y1,
22420 gen_rtx_MINUS (SFmode,
22421 gen_rtx_MULT (SFmode, src, halfthree),
22422 src)));
22423
22424 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22425 emit_insn (gen_rtx_SET (VOIDmode, u0,
22426 gen_rtx_MULT (SFmode, x0, x0)));
22427 emit_insn (gen_rtx_SET (VOIDmode, v0,
22428 gen_rtx_MINUS (SFmode,
22429 halfthree,
22430 gen_rtx_MULT (SFmode, y1, u0))));
22431 emit_insn (gen_rtx_SET (VOIDmode, x1,
22432 gen_rtx_MULT (SFmode, x0, v0)));
22433
22434 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22435 emit_insn (gen_rtx_SET (VOIDmode, u1,
22436 gen_rtx_MULT (SFmode, x1, x1)));
22437 emit_insn (gen_rtx_SET (VOIDmode, v1,
22438 gen_rtx_MINUS (SFmode,
22439 halfthree,
22440 gen_rtx_MULT (SFmode, y1, u1))));
22441 emit_insn (gen_rtx_SET (VOIDmode, x2,
22442 gen_rtx_MULT (SFmode, x1, v1)));
22443
22444 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22445 emit_insn (gen_rtx_SET (VOIDmode, u2,
22446 gen_rtx_MULT (SFmode, x2, x2)));
22447 emit_insn (gen_rtx_SET (VOIDmode, v2,
22448 gen_rtx_MINUS (SFmode,
22449 halfthree,
22450 gen_rtx_MULT (SFmode, y1, u2))));
22451 emit_insn (gen_rtx_SET (VOIDmode, dst,
22452 gen_rtx_MULT (SFmode, x2, v2)));
22453
22454 emit_label (XEXP (label, 0));
22455}
22456
565ef4ba
RS
22457/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22458 target, and SRC is the argument operand. */
22459
22460void
22461rs6000_emit_popcount (rtx dst, rtx src)
22462{
22463 enum machine_mode mode = GET_MODE (dst);
22464 rtx tmp1, tmp2;
22465
22466 tmp1 = gen_reg_rtx (mode);
22467
22468 if (mode == SImode)
22469 {
22470 emit_insn (gen_popcntbsi2 (tmp1, src));
22471 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22472 NULL_RTX, 0);
22473 tmp2 = force_reg (SImode, tmp2);
22474 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22475 }
22476 else
22477 {
22478 emit_insn (gen_popcntbdi2 (tmp1, src));
22479 tmp2 = expand_mult (DImode, tmp1,
22480 GEN_INT ((HOST_WIDE_INT)
22481 0x01010101 << 32 | 0x01010101),
22482 NULL_RTX, 0);
22483 tmp2 = force_reg (DImode, tmp2);
22484 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22485 }
22486}
22487
22488
22489/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22490 target, and SRC is the argument operand. */
22491
22492void
22493rs6000_emit_parity (rtx dst, rtx src)
22494{
22495 enum machine_mode mode = GET_MODE (dst);
22496 rtx tmp;
22497
22498 tmp = gen_reg_rtx (mode);
22499 if (mode == SImode)
22500 {
22501 /* Is mult+shift >= shift+xor+shift+xor? */
22502 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22503 {
22504 rtx tmp1, tmp2, tmp3, tmp4;
22505
22506 tmp1 = gen_reg_rtx (SImode);
22507 emit_insn (gen_popcntbsi2 (tmp1, src));
22508
22509 tmp2 = gen_reg_rtx (SImode);
22510 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22511 tmp3 = gen_reg_rtx (SImode);
22512 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22513
22514 tmp4 = gen_reg_rtx (SImode);
22515 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22516 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22517 }
22518 else
22519 rs6000_emit_popcount (tmp, src);
22520 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22521 }
22522 else
22523 {
22524 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22525 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22526 {
22527 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22528
22529 tmp1 = gen_reg_rtx (DImode);
22530 emit_insn (gen_popcntbdi2 (tmp1, src));
22531
22532 tmp2 = gen_reg_rtx (DImode);
22533 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22534 tmp3 = gen_reg_rtx (DImode);
22535 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22536
22537 tmp4 = gen_reg_rtx (DImode);
22538 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22539 tmp5 = gen_reg_rtx (DImode);
22540 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22541
22542 tmp6 = gen_reg_rtx (DImode);
22543 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22544 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22545 }
22546 else
22547 rs6000_emit_popcount (tmp, src);
22548 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22549 }
22550}
22551
ded9bf77
AH
22552/* Return an RTX representing where to find the function value of a
22553 function returning MODE. */
22554static rtx
22555rs6000_complex_function_value (enum machine_mode mode)
22556{
22557 unsigned int regno;
22558 rtx r1, r2;
22559 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22560 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22561
18f63bfa
AH
22562 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22563 regno = FP_ARG_RETURN;
354ed18f
AH
22564 else
22565 {
18f63bfa 22566 regno = GP_ARG_RETURN;
ded9bf77 22567
18f63bfa
AH
22568 /* 32-bit is OK since it'll go in r3/r4. */
22569 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22570 return gen_rtx_REG (mode, regno);
22571 }
22572
18f63bfa
AH
22573 if (inner_bytes >= 8)
22574 return gen_rtx_REG (mode, regno);
22575
ded9bf77
AH
22576 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22577 const0_rtx);
22578 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22579 GEN_INT (inner_bytes));
ded9bf77
AH
22580 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22581}
22582
a6ebc39a
AH
22583/* Define how to find the value returned by a function.
22584 VALTYPE is the data type of the value (as a tree).
22585 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22586 otherwise, FUNC is 0.
22587
22588 On the SPE, both FPs and vectors are returned in r3.
22589
22590 On RS/6000 an integer value is in r3 and a floating-point value is in
22591 fp1, unless -msoft-float. */
22592
22593rtx
586de218 22594rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22595{
22596 enum machine_mode mode;
2a8fa26c 22597 unsigned int regno;
a6ebc39a 22598
594a51fe
SS
22599 /* Special handling for structs in darwin64. */
22600 if (rs6000_darwin64_abi
22601 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22602 && TREE_CODE (valtype) == RECORD_TYPE
22603 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22604 {
22605 CUMULATIVE_ARGS valcum;
22606 rtx valret;
22607
0b5383eb 22608 valcum.words = 0;
594a51fe
SS
22609 valcum.fregno = FP_ARG_MIN_REG;
22610 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22611 /* Do a trial code generation as if this were going to be passed as
22612 an argument; if any part goes in memory, we return NULL. */
22613 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22614 if (valret)
22615 return valret;
22616 /* Otherwise fall through to standard ABI rules. */
22617 }
22618
0e67400a
FJ
22619 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22620 {
22621 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22622 return gen_rtx_PARALLEL (DImode,
22623 gen_rtvec (2,
22624 gen_rtx_EXPR_LIST (VOIDmode,
22625 gen_rtx_REG (SImode, GP_ARG_RETURN),
22626 const0_rtx),
22627 gen_rtx_EXPR_LIST (VOIDmode,
22628 gen_rtx_REG (SImode,
22629 GP_ARG_RETURN + 1),
22630 GEN_INT (4))));
22631 }
0f086e42
FJ
22632 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22633 {
22634 return gen_rtx_PARALLEL (DCmode,
22635 gen_rtvec (4,
22636 gen_rtx_EXPR_LIST (VOIDmode,
22637 gen_rtx_REG (SImode, GP_ARG_RETURN),
22638 const0_rtx),
22639 gen_rtx_EXPR_LIST (VOIDmode,
22640 gen_rtx_REG (SImode,
22641 GP_ARG_RETURN + 1),
22642 GEN_INT (4)),
22643 gen_rtx_EXPR_LIST (VOIDmode,
22644 gen_rtx_REG (SImode,
22645 GP_ARG_RETURN + 2),
22646 GEN_INT (8)),
22647 gen_rtx_EXPR_LIST (VOIDmode,
22648 gen_rtx_REG (SImode,
22649 GP_ARG_RETURN + 3),
22650 GEN_INT (12))));
22651 }
602ea4d3 22652
7348aa7f
FXC
22653 mode = TYPE_MODE (valtype);
22654 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22655 || POINTER_TYPE_P (valtype))
b78d48dd 22656 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22657
e41b2a33
PB
22658 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22659 /* _Decimal128 must use an even/odd register pair. */
22660 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
2c5cac98
ME
22661 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS
22662 && ((TARGET_SINGLE_FLOAT && (mode == SFmode)) || TARGET_DOUBLE_FLOAT))
2a8fa26c 22663 regno = FP_ARG_RETURN;
ded9bf77 22664 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22665 && targetm.calls.split_complex_arg)
ded9bf77 22666 return rs6000_complex_function_value (mode);
44688022 22667 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22668 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22669 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22670 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22671 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22672 && (mode == DFmode || mode == DCmode
22673 || mode == TFmode || mode == TCmode))
18f63bfa 22674 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22675 else
22676 regno = GP_ARG_RETURN;
22677
22678 return gen_rtx_REG (mode, regno);
22679}
22680
ded9bf77
AH
22681/* Define how to find the value returned by a library function
22682 assuming the value has mode MODE. */
22683rtx
22684rs6000_libcall_value (enum machine_mode mode)
22685{
22686 unsigned int regno;
22687
2e6c9641
FJ
22688 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22689 {
22690 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22691 return gen_rtx_PARALLEL (DImode,
22692 gen_rtvec (2,
22693 gen_rtx_EXPR_LIST (VOIDmode,
22694 gen_rtx_REG (SImode, GP_ARG_RETURN),
22695 const0_rtx),
22696 gen_rtx_EXPR_LIST (VOIDmode,
22697 gen_rtx_REG (SImode,
22698 GP_ARG_RETURN + 1),
22699 GEN_INT (4))));
22700 }
22701
e41b2a33
PB
22702 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22703 /* _Decimal128 must use an even/odd register pair. */
22704 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22705 else if (SCALAR_FLOAT_MODE_P (mode)
d083fbba
ME
22706 && TARGET_HARD_FLOAT && TARGET_FPRS
22707 && ((TARGET_SINGLE_FLOAT && mode == SFmode) || TARGET_DOUBLE_FLOAT))
ded9bf77 22708 regno = FP_ARG_RETURN;
44688022
AM
22709 else if (ALTIVEC_VECTOR_MODE (mode)
22710 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22711 regno = ALTIVEC_ARG_RETURN;
42ba5130 22712 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22713 return rs6000_complex_function_value (mode);
18f63bfa 22714 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22715 && (mode == DFmode || mode == DCmode
22716 || mode == TFmode || mode == TCmode))
18f63bfa 22717 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22718 else
22719 regno = GP_ARG_RETURN;
22720
22721 return gen_rtx_REG (mode, regno);
22722}
22723
d1d0c603
JJ
22724/* Define the offset between two registers, FROM to be eliminated and its
22725 replacement TO, at the start of a routine. */
22726HOST_WIDE_INT
22727rs6000_initial_elimination_offset (int from, int to)
22728{
22729 rs6000_stack_t *info = rs6000_stack_info ();
22730 HOST_WIDE_INT offset;
22731
7d5175e1 22732 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22733 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22734 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22735 {
22736 offset = info->push_p ? 0 : -info->total_size;
22737 if (FRAME_GROWS_DOWNWARD)
5b667039 22738 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22739 }
22740 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22741 offset = FRAME_GROWS_DOWNWARD
5b667039 22742 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22743 : 0;
22744 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22745 offset = info->total_size;
22746 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22747 offset = info->push_p ? info->total_size : 0;
22748 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22749 offset = 0;
22750 else
37409796 22751 gcc_unreachable ();
d1d0c603
JJ
22752
22753 return offset;
22754}
22755
96714395 22756static rtx
a2369ed3 22757rs6000_dwarf_register_span (rtx reg)
96714395 22758{
6cd1d2e2
DJ
22759 rtx parts[8];
22760 int i, words;
22761 unsigned regno = REGNO (reg);
22762 enum machine_mode mode = GET_MODE (reg);
96714395 22763
4d4cbc0e 22764 if (TARGET_SPE
6cd1d2e2 22765 && regno < 32
4d4cbc0e 22766 && (SPE_VECTOR_MODE (GET_MODE (reg))
6cd1d2e2
DJ
22767 || (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode)
22768 && mode != SFmode && mode != SDmode && mode != SCmode)))
4d4cbc0e
AH
22769 ;
22770 else
96714395
AH
22771 return NULL_RTX;
22772
22773 regno = REGNO (reg);
22774
22775 /* The duality of the SPE register size wreaks all kinds of havoc.
22776 This is a way of distinguishing r0 in 32-bits from r0 in
22777 64-bits. */
6cd1d2e2
DJ
22778 words = (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
22779 gcc_assert (words <= 4);
22780 for (i = 0; i < words; i++, regno++)
22781 {
22782 if (BYTES_BIG_ENDIAN)
22783 {
22784 parts[2 * i] = gen_rtx_REG (SImode, regno + 1200);
22785 parts[2 * i + 1] = gen_rtx_REG (SImode, regno);
22786 }
22787 else
22788 {
22789 parts[2 * i] = gen_rtx_REG (SImode, regno);
22790 parts[2 * i + 1] = gen_rtx_REG (SImode, regno + 1200);
22791 }
22792 }
22793
22794 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (words * 2, parts));
96714395
AH
22795}
22796
37ea0b7e
JM
22797/* Fill in sizes for SPE register high parts in table used by unwinder. */
22798
22799static void
22800rs6000_init_dwarf_reg_sizes_extra (tree address)
22801{
22802 if (TARGET_SPE)
22803 {
22804 int i;
22805 enum machine_mode mode = TYPE_MODE (char_type_node);
bbbbb16a 22806 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, EXPAND_NORMAL);
37ea0b7e
JM
22807 rtx mem = gen_rtx_MEM (BLKmode, addr);
22808 rtx value = gen_int_mode (4, mode);
22809
22810 for (i = 1201; i < 1232; i++)
22811 {
22812 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22813 HOST_WIDE_INT offset
22814 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22815
22816 emit_move_insn (adjust_address (mem, mode, offset), value);
22817 }
22818 }
22819}
22820
93c9d1ba
AM
22821/* Map internal gcc register numbers to DWARF2 register numbers. */
22822
22823unsigned int
22824rs6000_dbx_register_number (unsigned int regno)
22825{
22826 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22827 return regno;
22828 if (regno == MQ_REGNO)
22829 return 100;
1de43f85 22830 if (regno == LR_REGNO)
93c9d1ba 22831 return 108;
1de43f85 22832 if (regno == CTR_REGNO)
93c9d1ba
AM
22833 return 109;
22834 if (CR_REGNO_P (regno))
22835 return regno - CR0_REGNO + 86;
22836 if (regno == XER_REGNO)
22837 return 101;
22838 if (ALTIVEC_REGNO_P (regno))
22839 return regno - FIRST_ALTIVEC_REGNO + 1124;
22840 if (regno == VRSAVE_REGNO)
22841 return 356;
22842 if (regno == VSCR_REGNO)
22843 return 67;
22844 if (regno == SPE_ACC_REGNO)
22845 return 99;
22846 if (regno == SPEFSCR_REGNO)
22847 return 612;
22848 /* SPE high reg number. We get these values of regno from
22849 rs6000_dwarf_register_span. */
37409796
NS
22850 gcc_assert (regno >= 1200 && regno < 1232);
22851 return regno;
93c9d1ba
AM
22852}
22853
93f90be6 22854/* target hook eh_return_filter_mode */
f676971a 22855static enum machine_mode
93f90be6
FJ
22856rs6000_eh_return_filter_mode (void)
22857{
22858 return TARGET_32BIT ? SImode : word_mode;
22859}
22860
00b79d54
BE
22861/* Target hook for scalar_mode_supported_p. */
22862static bool
22863rs6000_scalar_mode_supported_p (enum machine_mode mode)
22864{
22865 if (DECIMAL_FLOAT_MODE_P (mode))
22866 return true;
22867 else
22868 return default_scalar_mode_supported_p (mode);
22869}
22870
f676971a
EC
22871/* Target hook for vector_mode_supported_p. */
22872static bool
22873rs6000_vector_mode_supported_p (enum machine_mode mode)
22874{
22875
96038623
DE
22876 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22877 return true;
22878
f676971a
EC
22879 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22880 return true;
22881
22882 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22883 return true;
22884
22885 else
22886 return false;
22887}
22888
bb8df8a6
EC
22889/* Target hook for invalid_arg_for_unprototyped_fn. */
22890static const char *
3101faab 22891invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22892{
22893 return (!rs6000_darwin64_abi
22894 && typelist == 0
22895 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22896 && (funcdecl == NULL_TREE
22897 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22898 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22899 ? N_("AltiVec argument passed to unprototyped function")
22900 : NULL;
22901}
22902
3aebbe5f
JJ
22903/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22904 setup by using __stack_chk_fail_local hidden function instead of
22905 calling __stack_chk_fail directly. Otherwise it is better to call
22906 __stack_chk_fail directly. */
22907
22908static tree
22909rs6000_stack_protect_fail (void)
22910{
22911 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22912 ? default_hidden_stack_protect_fail ()
22913 : default_external_stack_protect_fail ();
22914}
22915
c921bad8
AP
22916void
22917rs6000_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
22918 int num_operands ATTRIBUTE_UNUSED)
22919{
22920 if (rs6000_warn_cell_microcode)
22921 {
22922 const char *temp;
22923 int insn_code_number = recog_memoized (insn);
22924 location_t location = locator_location (INSN_LOCATOR (insn));
22925
22926 /* Punt on insns we cannot recognize. */
22927 if (insn_code_number < 0)
22928 return;
22929
22930 temp = get_insn_template (insn_code_number, insn);
22931
22932 if (get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS)
22933 warning_at (location, OPT_mwarn_cell_microcode,
22934 "emitting microcode insn %s\t[%s] #%d",
22935 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22936 else if (get_attr_cell_micro (insn) == CELL_MICRO_CONDITIONAL)
22937 warning_at (location, OPT_mwarn_cell_microcode,
22938 "emitting conditional microcode insn %s\t[%s] #%d",
22939 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22940 }
22941}
22942
17211ab5 22943#include "gt-rs6000.h"