]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
Step 1 of VSX changes: Powerpc infrstructure changes
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
66647d44 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
726a989a 55#include "gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
d1b38208 116typedef struct GTY(()) machine_function
5b667039
JJ
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
ec507f2d
DE
146/* Always emit branch hint bits. */
147static GTY(()) bool rs6000_always_hint;
148
149/* Schedule instructions for group formation. */
150static GTY(()) bool rs6000_sched_groups;
151
44cd321e
PS
152/* Align branch targets. */
153static GTY(()) bool rs6000_align_branch_targets;
154
569fa502
DN
155/* Support for -msched-costly-dep option. */
156const char *rs6000_sched_costly_dep_str;
157enum rs6000_dependence_cost rs6000_sched_costly_dep;
158
cbe26ab8
DN
159/* Support for -minsert-sched-nops option. */
160const char *rs6000_sched_insert_nops_str;
161enum rs6000_nop_insertion rs6000_sched_insert_nops;
162
7ccf35ed 163/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 164static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 165
602ea4d3 166/* Size of long double. */
6fa3f289
ZW
167int rs6000_long_double_type_size;
168
602ea4d3
JJ
169/* IEEE quad extended precision long double. */
170int rs6000_ieeequad;
171
a2db2771 172/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
173int rs6000_altivec_abi;
174
94f4765c
NF
175/* Nonzero if we want SPE SIMD instructions. */
176int rs6000_spe;
177
a3170dc6
AH
178/* Nonzero if we want SPE ABI extensions. */
179int rs6000_spe_abi;
180
5da702b1
AH
181/* Nonzero if floating point operations are done in the GPRs. */
182int rs6000_float_gprs = 0;
183
594a51fe
SS
184/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
185int rs6000_darwin64_abi;
186
a0ab749a 187/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 188static GTY(()) int common_mode_defined;
c81bebd7 189
874a0744
MM
190/* Label number of label created for -mrelocatable, to call to so we can
191 get the address of the GOT section */
192int rs6000_pic_labelno;
c81bebd7 193
b91da81f 194#ifdef USING_ELFOS_H
c81bebd7 195/* Which abi to adhere to */
9739c90c 196const char *rs6000_abi_name;
d9407988
MM
197
198/* Semantics of the small data area */
199enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
200
201/* Which small data model to use */
815cdc52 202const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
203
204/* Counter for labels which are to be placed in .fixup. */
205int fixuplabelno = 0;
874a0744 206#endif
4697a36c 207
c4501e62
JJ
208/* Bit size of immediate TLS offsets and string from which it is decoded. */
209int rs6000_tls_size = 32;
210const char *rs6000_tls_size_string;
211
b6c9286a
MM
212/* ABI enumeration available for subtarget to use. */
213enum rs6000_abi rs6000_current_abi;
214
85b776df
AM
215/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
216int dot_symbols;
217
38c1f2d7 218/* Debug flags */
815cdc52 219const char *rs6000_debug_name;
38c1f2d7
MM
220int rs6000_debug_stack; /* debug stack applications */
221int rs6000_debug_arg; /* debug argument handling */
cacf1ca8
MM
222int rs6000_debug_reg; /* debug register classes */
223int rs6000_debug_addr; /* debug memory addressing */
224int rs6000_debug_cost; /* debug rtx_costs */
225
226/* Specify the machine mode that pointers have. After generation of rtl, the
227 compiler makes no further distinction between pointers and any other objects
228 of this machine mode. The type is unsigned since not all things that
229 include rs6000.h also include machmode.h. */
230unsigned rs6000_pmode;
231
232/* Width in bits of a pointer. */
233unsigned rs6000_pointer_size;
234
38c1f2d7 235
aabcd309 236/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
237bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
238
cacf1ca8
MM
239/* Maximum number of registers needed for a given register class and mode. */
240unsigned char rs6000_class_max_nregs[NUM_MACHINE_MODES][LIM_REG_CLASSES];
241
242/* How many registers are needed for a given register and mode. */
243unsigned char rs6000_hard_regno_nregs[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
58646b77 244
cacf1ca8
MM
245/* Map register number to register class. */
246enum reg_class rs6000_regno_regclass[FIRST_PSEUDO_REGISTER];
247
248/* Built in types. */
58646b77
PB
249tree rs6000_builtin_types[RS6000_BTI_MAX];
250tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 251
57ac7be9
AM
252const char *rs6000_traceback_name;
253static enum {
254 traceback_default = 0,
255 traceback_none,
256 traceback_part,
257 traceback_full
258} rs6000_traceback;
259
38c1f2d7
MM
260/* Flag to say the TOC is initialized */
261int toc_initialized;
9ebbca7d 262char toc_label_name[10];
38c1f2d7 263
44cd321e
PS
264/* Cached value of rs6000_variable_issue. This is cached in
265 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
266static short cached_can_issue_more;
267
d6b5193b
RS
268static GTY(()) section *read_only_data_section;
269static GTY(()) section *private_data_section;
270static GTY(()) section *read_only_private_data_section;
271static GTY(()) section *sdata2_section;
272static GTY(()) section *toc_section;
273
a3c9585f
KH
274/* Control alignment for fields within structures. */
275/* String from -malign-XXXXX. */
025d9908
KH
276int rs6000_alignment_flags;
277
78f5898b 278/* True for any options that were explicitly set. */
c885ca89 279static struct {
df01da37 280 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 281 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
282 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
283 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
284 bool spe; /* True if -mspe= was used. */
285 bool float_gprs; /* True if -mfloat-gprs= was used. */
78f5898b 286 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 287 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 288 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
289} rs6000_explicit_options;
290
a3170dc6
AH
291struct builtin_description
292{
293 /* mask is not const because we're going to alter it below. This
294 nonsense will go away when we rewrite the -march infrastructure
295 to give us more target flag bits. */
296 unsigned int mask;
297 const enum insn_code icode;
298 const char *const name;
299 const enum rs6000_builtins code;
300};
cacf1ca8
MM
301
302/* Describe the vector unit used for modes. */
303enum rs6000_vector rs6000_vector_unit[NUM_MACHINE_MODES];
304enum rs6000_vector rs6000_vector_mem[NUM_MACHINE_MODES];
305enum reg_class rs6000_vector_reg_class[NUM_MACHINE_MODES];
306
307/* Describe the alignment of a vector. */
308int rs6000_vector_align[NUM_MACHINE_MODES];
8b897cfa
RS
309\f
310/* Target cpu costs. */
311
312struct processor_costs {
c4ad648e 313 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
314 const int mulsi_const; /* cost of SImode multiplication by constant. */
315 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
316 const int muldi; /* cost of DImode multiplication. */
317 const int divsi; /* cost of SImode division. */
318 const int divdi; /* cost of DImode division. */
319 const int fp; /* cost of simple SFmode and DFmode insns. */
320 const int dmul; /* cost of DFmode multiplication (and fmadd). */
321 const int sdiv; /* cost of SFmode division (fdivs). */
322 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
323 const int cache_line_size; /* cache line size in bytes. */
324 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
325 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
326 const int simultaneous_prefetches; /* number of parallel prefetch
327 operations. */
8b897cfa
RS
328};
329
330const struct processor_costs *rs6000_cost;
331
332/* Processor costs (relative to an add) */
333
334/* Instruction size costs on 32bit processors. */
335static const
336struct processor_costs size32_cost = {
06a67bdd
RS
337 COSTS_N_INSNS (1), /* mulsi */
338 COSTS_N_INSNS (1), /* mulsi_const */
339 COSTS_N_INSNS (1), /* mulsi_const9 */
340 COSTS_N_INSNS (1), /* muldi */
341 COSTS_N_INSNS (1), /* divsi */
342 COSTS_N_INSNS (1), /* divdi */
343 COSTS_N_INSNS (1), /* fp */
344 COSTS_N_INSNS (1), /* dmul */
345 COSTS_N_INSNS (1), /* sdiv */
346 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
347 32,
348 0,
349 0,
5f732aba 350 0,
8b897cfa
RS
351};
352
353/* Instruction size costs on 64bit processors. */
354static const
355struct processor_costs size64_cost = {
06a67bdd
RS
356 COSTS_N_INSNS (1), /* mulsi */
357 COSTS_N_INSNS (1), /* mulsi_const */
358 COSTS_N_INSNS (1), /* mulsi_const9 */
359 COSTS_N_INSNS (1), /* muldi */
360 COSTS_N_INSNS (1), /* divsi */
361 COSTS_N_INSNS (1), /* divdi */
362 COSTS_N_INSNS (1), /* fp */
363 COSTS_N_INSNS (1), /* dmul */
364 COSTS_N_INSNS (1), /* sdiv */
365 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
366 128,
367 0,
368 0,
5f732aba 369 0,
8b897cfa
RS
370};
371
372/* Instruction costs on RIOS1 processors. */
373static const
374struct processor_costs rios1_cost = {
06a67bdd
RS
375 COSTS_N_INSNS (5), /* mulsi */
376 COSTS_N_INSNS (4), /* mulsi_const */
377 COSTS_N_INSNS (3), /* mulsi_const9 */
378 COSTS_N_INSNS (5), /* muldi */
379 COSTS_N_INSNS (19), /* divsi */
380 COSTS_N_INSNS (19), /* divdi */
381 COSTS_N_INSNS (2), /* fp */
382 COSTS_N_INSNS (2), /* dmul */
383 COSTS_N_INSNS (19), /* sdiv */
384 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 385 128, /* cache line size */
5f732aba
DE
386 64, /* l1 cache */
387 512, /* l2 cache */
0b11da67 388 0, /* streams */
8b897cfa
RS
389};
390
391/* Instruction costs on RIOS2 processors. */
392static const
393struct processor_costs rios2_cost = {
06a67bdd
RS
394 COSTS_N_INSNS (2), /* mulsi */
395 COSTS_N_INSNS (2), /* mulsi_const */
396 COSTS_N_INSNS (2), /* mulsi_const9 */
397 COSTS_N_INSNS (2), /* muldi */
398 COSTS_N_INSNS (13), /* divsi */
399 COSTS_N_INSNS (13), /* divdi */
400 COSTS_N_INSNS (2), /* fp */
401 COSTS_N_INSNS (2), /* dmul */
402 COSTS_N_INSNS (17), /* sdiv */
403 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 404 256, /* cache line size */
5f732aba
DE
405 256, /* l1 cache */
406 1024, /* l2 cache */
0b11da67 407 0, /* streams */
8b897cfa
RS
408};
409
410/* Instruction costs on RS64A processors. */
411static const
412struct processor_costs rs64a_cost = {
06a67bdd
RS
413 COSTS_N_INSNS (20), /* mulsi */
414 COSTS_N_INSNS (12), /* mulsi_const */
415 COSTS_N_INSNS (8), /* mulsi_const9 */
416 COSTS_N_INSNS (34), /* muldi */
417 COSTS_N_INSNS (65), /* divsi */
418 COSTS_N_INSNS (67), /* divdi */
419 COSTS_N_INSNS (4), /* fp */
420 COSTS_N_INSNS (4), /* dmul */
421 COSTS_N_INSNS (31), /* sdiv */
422 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 423 128, /* cache line size */
5f732aba
DE
424 128, /* l1 cache */
425 2048, /* l2 cache */
0b11da67 426 1, /* streams */
8b897cfa
RS
427};
428
429/* Instruction costs on MPCCORE processors. */
430static const
431struct processor_costs mpccore_cost = {
06a67bdd
RS
432 COSTS_N_INSNS (2), /* mulsi */
433 COSTS_N_INSNS (2), /* mulsi_const */
434 COSTS_N_INSNS (2), /* mulsi_const9 */
435 COSTS_N_INSNS (2), /* muldi */
436 COSTS_N_INSNS (6), /* divsi */
437 COSTS_N_INSNS (6), /* divdi */
438 COSTS_N_INSNS (4), /* fp */
439 COSTS_N_INSNS (5), /* dmul */
440 COSTS_N_INSNS (10), /* sdiv */
441 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 442 32, /* cache line size */
5f732aba
DE
443 4, /* l1 cache */
444 16, /* l2 cache */
0b11da67 445 1, /* streams */
8b897cfa
RS
446};
447
448/* Instruction costs on PPC403 processors. */
449static const
450struct processor_costs ppc403_cost = {
06a67bdd
RS
451 COSTS_N_INSNS (4), /* mulsi */
452 COSTS_N_INSNS (4), /* mulsi_const */
453 COSTS_N_INSNS (4), /* mulsi_const9 */
454 COSTS_N_INSNS (4), /* muldi */
455 COSTS_N_INSNS (33), /* divsi */
456 COSTS_N_INSNS (33), /* divdi */
457 COSTS_N_INSNS (11), /* fp */
458 COSTS_N_INSNS (11), /* dmul */
459 COSTS_N_INSNS (11), /* sdiv */
460 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 461 32, /* cache line size */
5f732aba
DE
462 4, /* l1 cache */
463 16, /* l2 cache */
0b11da67 464 1, /* streams */
8b897cfa
RS
465};
466
467/* Instruction costs on PPC405 processors. */
468static const
469struct processor_costs ppc405_cost = {
06a67bdd
RS
470 COSTS_N_INSNS (5), /* mulsi */
471 COSTS_N_INSNS (4), /* mulsi_const */
472 COSTS_N_INSNS (3), /* mulsi_const9 */
473 COSTS_N_INSNS (5), /* muldi */
474 COSTS_N_INSNS (35), /* divsi */
475 COSTS_N_INSNS (35), /* divdi */
476 COSTS_N_INSNS (11), /* fp */
477 COSTS_N_INSNS (11), /* dmul */
478 COSTS_N_INSNS (11), /* sdiv */
479 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 480 32, /* cache line size */
5f732aba
DE
481 16, /* l1 cache */
482 128, /* l2 cache */
0b11da67 483 1, /* streams */
8b897cfa
RS
484};
485
486/* Instruction costs on PPC440 processors. */
487static const
488struct processor_costs ppc440_cost = {
06a67bdd
RS
489 COSTS_N_INSNS (3), /* mulsi */
490 COSTS_N_INSNS (2), /* mulsi_const */
491 COSTS_N_INSNS (2), /* mulsi_const9 */
492 COSTS_N_INSNS (3), /* muldi */
493 COSTS_N_INSNS (34), /* divsi */
494 COSTS_N_INSNS (34), /* divdi */
495 COSTS_N_INSNS (5), /* fp */
496 COSTS_N_INSNS (5), /* dmul */
497 COSTS_N_INSNS (19), /* sdiv */
498 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 499 32, /* cache line size */
5f732aba
DE
500 32, /* l1 cache */
501 256, /* l2 cache */
0b11da67 502 1, /* streams */
8b897cfa
RS
503};
504
505/* Instruction costs on PPC601 processors. */
506static const
507struct processor_costs ppc601_cost = {
06a67bdd
RS
508 COSTS_N_INSNS (5), /* mulsi */
509 COSTS_N_INSNS (5), /* mulsi_const */
510 COSTS_N_INSNS (5), /* mulsi_const9 */
511 COSTS_N_INSNS (5), /* muldi */
512 COSTS_N_INSNS (36), /* divsi */
513 COSTS_N_INSNS (36), /* divdi */
514 COSTS_N_INSNS (4), /* fp */
515 COSTS_N_INSNS (5), /* dmul */
516 COSTS_N_INSNS (17), /* sdiv */
517 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 518 32, /* cache line size */
5f732aba
DE
519 32, /* l1 cache */
520 256, /* l2 cache */
0b11da67 521 1, /* streams */
8b897cfa
RS
522};
523
524/* Instruction costs on PPC603 processors. */
525static const
526struct processor_costs ppc603_cost = {
06a67bdd
RS
527 COSTS_N_INSNS (5), /* mulsi */
528 COSTS_N_INSNS (3), /* mulsi_const */
529 COSTS_N_INSNS (2), /* mulsi_const9 */
530 COSTS_N_INSNS (5), /* muldi */
531 COSTS_N_INSNS (37), /* divsi */
532 COSTS_N_INSNS (37), /* divdi */
533 COSTS_N_INSNS (3), /* fp */
534 COSTS_N_INSNS (4), /* dmul */
535 COSTS_N_INSNS (18), /* sdiv */
536 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 537 32, /* cache line size */
5f732aba
DE
538 8, /* l1 cache */
539 64, /* l2 cache */
0b11da67 540 1, /* streams */
8b897cfa
RS
541};
542
543/* Instruction costs on PPC604 processors. */
544static const
545struct processor_costs ppc604_cost = {
06a67bdd
RS
546 COSTS_N_INSNS (4), /* mulsi */
547 COSTS_N_INSNS (4), /* mulsi_const */
548 COSTS_N_INSNS (4), /* mulsi_const9 */
549 COSTS_N_INSNS (4), /* muldi */
550 COSTS_N_INSNS (20), /* divsi */
551 COSTS_N_INSNS (20), /* divdi */
552 COSTS_N_INSNS (3), /* fp */
553 COSTS_N_INSNS (3), /* dmul */
554 COSTS_N_INSNS (18), /* sdiv */
555 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 556 32, /* cache line size */
5f732aba
DE
557 16, /* l1 cache */
558 512, /* l2 cache */
0b11da67 559 1, /* streams */
8b897cfa
RS
560};
561
562/* Instruction costs on PPC604e processors. */
563static const
564struct processor_costs ppc604e_cost = {
06a67bdd
RS
565 COSTS_N_INSNS (2), /* mulsi */
566 COSTS_N_INSNS (2), /* mulsi_const */
567 COSTS_N_INSNS (2), /* mulsi_const9 */
568 COSTS_N_INSNS (2), /* muldi */
569 COSTS_N_INSNS (20), /* divsi */
570 COSTS_N_INSNS (20), /* divdi */
571 COSTS_N_INSNS (3), /* fp */
572 COSTS_N_INSNS (3), /* dmul */
573 COSTS_N_INSNS (18), /* sdiv */
574 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 575 32, /* cache line size */
5f732aba
DE
576 32, /* l1 cache */
577 1024, /* l2 cache */
0b11da67 578 1, /* streams */
8b897cfa
RS
579};
580
f0517163 581/* Instruction costs on PPC620 processors. */
8b897cfa
RS
582static const
583struct processor_costs ppc620_cost = {
06a67bdd
RS
584 COSTS_N_INSNS (5), /* mulsi */
585 COSTS_N_INSNS (4), /* mulsi_const */
586 COSTS_N_INSNS (3), /* mulsi_const9 */
587 COSTS_N_INSNS (7), /* muldi */
588 COSTS_N_INSNS (21), /* divsi */
589 COSTS_N_INSNS (37), /* divdi */
590 COSTS_N_INSNS (3), /* fp */
591 COSTS_N_INSNS (3), /* dmul */
592 COSTS_N_INSNS (18), /* sdiv */
593 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 594 128, /* cache line size */
5f732aba
DE
595 32, /* l1 cache */
596 1024, /* l2 cache */
0b11da67 597 1, /* streams */
f0517163
RS
598};
599
600/* Instruction costs on PPC630 processors. */
601static const
602struct processor_costs ppc630_cost = {
06a67bdd
RS
603 COSTS_N_INSNS (5), /* mulsi */
604 COSTS_N_INSNS (4), /* mulsi_const */
605 COSTS_N_INSNS (3), /* mulsi_const9 */
606 COSTS_N_INSNS (7), /* muldi */
607 COSTS_N_INSNS (21), /* divsi */
608 COSTS_N_INSNS (37), /* divdi */
609 COSTS_N_INSNS (3), /* fp */
610 COSTS_N_INSNS (3), /* dmul */
611 COSTS_N_INSNS (17), /* sdiv */
612 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 613 128, /* cache line size */
5f732aba
DE
614 64, /* l1 cache */
615 1024, /* l2 cache */
0b11da67 616 1, /* streams */
8b897cfa
RS
617};
618
d296e02e
AP
619/* Instruction costs on Cell processor. */
620/* COSTS_N_INSNS (1) ~ one add. */
621static const
622struct processor_costs ppccell_cost = {
623 COSTS_N_INSNS (9/2)+2, /* mulsi */
624 COSTS_N_INSNS (6/2), /* mulsi_const */
625 COSTS_N_INSNS (6/2), /* mulsi_const9 */
626 COSTS_N_INSNS (15/2)+2, /* muldi */
627 COSTS_N_INSNS (38/2), /* divsi */
628 COSTS_N_INSNS (70/2), /* divdi */
629 COSTS_N_INSNS (10/2), /* fp */
630 COSTS_N_INSNS (10/2), /* dmul */
631 COSTS_N_INSNS (74/2), /* sdiv */
632 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 633 128, /* cache line size */
5f732aba
DE
634 32, /* l1 cache */
635 512, /* l2 cache */
636 6, /* streams */
d296e02e
AP
637};
638
8b897cfa
RS
639/* Instruction costs on PPC750 and PPC7400 processors. */
640static const
641struct processor_costs ppc750_cost = {
06a67bdd
RS
642 COSTS_N_INSNS (5), /* mulsi */
643 COSTS_N_INSNS (3), /* mulsi_const */
644 COSTS_N_INSNS (2), /* mulsi_const9 */
645 COSTS_N_INSNS (5), /* muldi */
646 COSTS_N_INSNS (17), /* divsi */
647 COSTS_N_INSNS (17), /* divdi */
648 COSTS_N_INSNS (3), /* fp */
649 COSTS_N_INSNS (3), /* dmul */
650 COSTS_N_INSNS (17), /* sdiv */
651 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 652 32, /* cache line size */
5f732aba
DE
653 32, /* l1 cache */
654 512, /* l2 cache */
0b11da67 655 1, /* streams */
8b897cfa
RS
656};
657
658/* Instruction costs on PPC7450 processors. */
659static const
660struct processor_costs ppc7450_cost = {
06a67bdd
RS
661 COSTS_N_INSNS (4), /* mulsi */
662 COSTS_N_INSNS (3), /* mulsi_const */
663 COSTS_N_INSNS (3), /* mulsi_const9 */
664 COSTS_N_INSNS (4), /* muldi */
665 COSTS_N_INSNS (23), /* divsi */
666 COSTS_N_INSNS (23), /* divdi */
667 COSTS_N_INSNS (5), /* fp */
668 COSTS_N_INSNS (5), /* dmul */
669 COSTS_N_INSNS (21), /* sdiv */
670 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 671 32, /* cache line size */
5f732aba
DE
672 32, /* l1 cache */
673 1024, /* l2 cache */
0b11da67 674 1, /* streams */
8b897cfa 675};
a3170dc6 676
8b897cfa
RS
677/* Instruction costs on PPC8540 processors. */
678static const
679struct processor_costs ppc8540_cost = {
06a67bdd
RS
680 COSTS_N_INSNS (4), /* mulsi */
681 COSTS_N_INSNS (4), /* mulsi_const */
682 COSTS_N_INSNS (4), /* mulsi_const9 */
683 COSTS_N_INSNS (4), /* muldi */
684 COSTS_N_INSNS (19), /* divsi */
685 COSTS_N_INSNS (19), /* divdi */
686 COSTS_N_INSNS (4), /* fp */
687 COSTS_N_INSNS (4), /* dmul */
688 COSTS_N_INSNS (29), /* sdiv */
689 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 690 32, /* cache line size */
5f732aba
DE
691 32, /* l1 cache */
692 256, /* l2 cache */
0b11da67 693 1, /* prefetch streams /*/
8b897cfa
RS
694};
695
fa41c305
EW
696/* Instruction costs on E300C2 and E300C3 cores. */
697static const
698struct processor_costs ppce300c2c3_cost = {
699 COSTS_N_INSNS (4), /* mulsi */
700 COSTS_N_INSNS (4), /* mulsi_const */
701 COSTS_N_INSNS (4), /* mulsi_const9 */
702 COSTS_N_INSNS (4), /* muldi */
703 COSTS_N_INSNS (19), /* divsi */
704 COSTS_N_INSNS (19), /* divdi */
705 COSTS_N_INSNS (3), /* fp */
706 COSTS_N_INSNS (4), /* dmul */
707 COSTS_N_INSNS (18), /* sdiv */
708 COSTS_N_INSNS (33), /* ddiv */
642639ce 709 32,
a19b7d46
EW
710 16, /* l1 cache */
711 16, /* l2 cache */
642639ce 712 1, /* prefetch streams /*/
fa41c305
EW
713};
714
edae5fe3
DE
715/* Instruction costs on PPCE500MC processors. */
716static const
717struct processor_costs ppce500mc_cost = {
718 COSTS_N_INSNS (4), /* mulsi */
719 COSTS_N_INSNS (4), /* mulsi_const */
720 COSTS_N_INSNS (4), /* mulsi_const9 */
721 COSTS_N_INSNS (4), /* muldi */
722 COSTS_N_INSNS (14), /* divsi */
723 COSTS_N_INSNS (14), /* divdi */
724 COSTS_N_INSNS (8), /* fp */
725 COSTS_N_INSNS (10), /* dmul */
726 COSTS_N_INSNS (36), /* sdiv */
727 COSTS_N_INSNS (66), /* ddiv */
728 64, /* cache line size */
729 32, /* l1 cache */
730 128, /* l2 cache */
731 1, /* prefetch streams /*/
732};
733
8b897cfa
RS
734/* Instruction costs on POWER4 and POWER5 processors. */
735static const
736struct processor_costs power4_cost = {
06a67bdd
RS
737 COSTS_N_INSNS (3), /* mulsi */
738 COSTS_N_INSNS (2), /* mulsi_const */
739 COSTS_N_INSNS (2), /* mulsi_const9 */
740 COSTS_N_INSNS (4), /* muldi */
741 COSTS_N_INSNS (18), /* divsi */
742 COSTS_N_INSNS (34), /* divdi */
743 COSTS_N_INSNS (3), /* fp */
744 COSTS_N_INSNS (3), /* dmul */
745 COSTS_N_INSNS (17), /* sdiv */
746 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 747 128, /* cache line size */
5f732aba
DE
748 32, /* l1 cache */
749 1024, /* l2 cache */
0b11da67 750 8, /* prefetch streams /*/
8b897cfa
RS
751};
752
44cd321e
PS
753/* Instruction costs on POWER6 processors. */
754static const
755struct processor_costs power6_cost = {
756 COSTS_N_INSNS (8), /* mulsi */
757 COSTS_N_INSNS (8), /* mulsi_const */
758 COSTS_N_INSNS (8), /* mulsi_const9 */
759 COSTS_N_INSNS (8), /* muldi */
760 COSTS_N_INSNS (22), /* divsi */
761 COSTS_N_INSNS (28), /* divdi */
762 COSTS_N_INSNS (3), /* fp */
763 COSTS_N_INSNS (3), /* dmul */
764 COSTS_N_INSNS (13), /* sdiv */
765 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 766 128, /* cache line size */
5f732aba
DE
767 64, /* l1 cache */
768 2048, /* l2 cache */
0b11da67 769 16, /* prefetch streams */
44cd321e
PS
770};
771
cacf1ca8
MM
772/* Instruction costs on POWER7 processors. */
773static const
774struct processor_costs power7_cost = {
775 COSTS_N_INSNS (2), /* mulsi */
776 COSTS_N_INSNS (2), /* mulsi_const */
777 COSTS_N_INSNS (2), /* mulsi_const9 */
778 COSTS_N_INSNS (2), /* muldi */
779 COSTS_N_INSNS (18), /* divsi */
780 COSTS_N_INSNS (34), /* divdi */
781 COSTS_N_INSNS (3), /* fp */
782 COSTS_N_INSNS (3), /* dmul */
783 COSTS_N_INSNS (13), /* sdiv */
784 COSTS_N_INSNS (16), /* ddiv */
785 128, /* cache line size */
786 32, /* l1 cache */
787 256, /* l2 cache */
788 12, /* prefetch streams */
789};
790
8b897cfa 791\f
a2369ed3 792static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 793static const char *rs6000_invalid_within_doloop (const_rtx);
c6c3dba9 794static bool rs6000_legitimate_address_p (enum machine_mode, rtx, bool);
f90b7a5a 795static rtx rs6000_generate_compare (rtx, enum machine_mode);
a2369ed3
DJ
796static void rs6000_emit_stack_tie (void);
797static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 798static bool spe_func_has_64bit_regs_p (void);
b20a9cca 799static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 800 int, HOST_WIDE_INT);
a2369ed3 801static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 802static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
803static unsigned rs6000_hash_constant (rtx);
804static unsigned toc_hash_function (const void *);
805static int toc_hash_eq (const void *, const void *);
a2369ed3 806static bool constant_pool_expr_p (rtx);
d04b6e6e 807static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
808static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
809static struct machine_function * rs6000_init_machine_status (void);
810static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 811static bool no_global_regs_above (int, bool);
5add3202 812#ifdef HAVE_GAS_HIDDEN
a2369ed3 813static void rs6000_assemble_visibility (tree, int);
5add3202 814#endif
a2369ed3
DJ
815static int rs6000_ra_ever_killed (void);
816static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 817static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 818static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 819static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 820static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 821static const char *rs6000_mangle_type (const_tree);
a2369ed3 822static void rs6000_set_default_type_attributes (tree);
f78c3290 823static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
ff35822b 824static rtx rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
f78c3290
NF
825static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
826 enum machine_mode, bool, bool, bool);
52ff33d0 827static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
828static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
829static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
830static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
831static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
832static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
833 tree);
a2369ed3 834static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 835static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 836static void rs6000_file_start (void);
7c262518 837#if TARGET_ELF
9b580a0b 838static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
839static void rs6000_elf_asm_out_constructor (rtx, int);
840static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 841static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 842static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
843static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
844 unsigned HOST_WIDE_INT);
a56d7372 845static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 846 ATTRIBUTE_UNUSED;
7c262518 847#endif
3101faab 848static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
849static void rs6000_alloc_sdmode_stack_slot (void);
850static void rs6000_instantiate_decls (void);
cbaaba19 851#if TARGET_XCOFF
0d5817b2 852static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 853static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 854static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 855static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 856static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 857static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 858 unsigned HOST_WIDE_INT);
d6b5193b
RS
859static void rs6000_xcoff_unique_section (tree, int);
860static section *rs6000_xcoff_select_rtx_section
861 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
862static const char * rs6000_xcoff_strip_name_encoding (const char *);
863static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
864static void rs6000_xcoff_file_start (void);
865static void rs6000_xcoff_file_end (void);
f1384257 866#endif
a2369ed3 867static int rs6000_variable_issue (FILE *, int, rtx, int);
f40751dd 868static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
a2369ed3 869static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 870static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 871static bool is_microcoded_insn (rtx);
d296e02e 872static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
873static bool is_cracked_insn (rtx);
874static bool is_branch_slot_insn (rtx);
44cd321e 875static bool is_load_insn (rtx);
e3a0e200 876static rtx get_store_dest (rtx pat);
44cd321e
PS
877static bool is_store_insn (rtx);
878static bool set_to_load_agen (rtx,rtx);
982afe02 879static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
880static int rs6000_adjust_priority (rtx, int);
881static int rs6000_issue_rate (void);
b198261f 882static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
883static rtx get_next_active_insn (rtx, rtx);
884static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
885static bool insn_must_be_first_in_group (rtx);
886static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
887static bool is_costly_group (rtx *, rtx);
888static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
889static int redefine_groups (FILE *, int, rtx, rtx);
890static int pad_groups (FILE *, int, rtx, rtx);
891static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
892static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
893static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 894static int rs6000_use_sched_lookahead (void);
d296e02e 895static int rs6000_use_sched_lookahead_guard (rtx);
e855c69d
AB
896static void * rs6000_alloc_sched_context (void);
897static void rs6000_init_sched_context (void *, bool);
898static void rs6000_set_sched_context (void *);
899static void rs6000_free_sched_context (void *);
9c78b944 900static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 901static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
902static tree rs6000_builtin_mul_widen_even (tree);
903static tree rs6000_builtin_mul_widen_odd (tree);
744aa42f 904static tree rs6000_builtin_conversion (unsigned int, tree);
0fca40f5 905static tree rs6000_builtin_vec_perm (tree, tree *);
a2369ed3 906
58646b77 907static void def_builtin (int, const char *, tree, int);
3101faab 908static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
909static void rs6000_init_builtins (void);
910static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
911static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
912static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
913static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
914static void altivec_init_builtins (void);
915static void rs6000_common_init_builtins (void);
c15c90bb 916static void rs6000_init_libfuncs (void);
a2369ed3 917
96038623
DE
918static void paired_init_builtins (void);
919static rtx paired_expand_builtin (tree, rtx, bool *);
920static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
921static rtx paired_expand_stv_builtin (enum insn_code, tree);
922static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
923
b20a9cca
AM
924static void enable_mask_for_builtins (struct builtin_description *, int,
925 enum rs6000_builtins,
926 enum rs6000_builtins);
a2369ed3
DJ
927static void spe_init_builtins (void);
928static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 929static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
930static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
931static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
932static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
933static rs6000_stack_t *rs6000_stack_info (void);
934static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
935
936static rtx altivec_expand_builtin (tree, rtx, bool *);
937static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
938static rtx altivec_expand_st_builtin (tree, rtx, bool *);
939static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
940static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 941static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 942 const char *, tree, rtx);
a2369ed3 943static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
944static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
945static rtx altivec_expand_vec_set_builtin (tree);
946static rtx altivec_expand_vec_ext_builtin (tree, rtx);
947static int get_element_number (tree, tree);
78f5898b 948static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 949static void rs6000_parse_tls_size_option (void);
5da702b1 950static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
951static int first_altivec_reg_to_save (void);
952static unsigned int compute_vrsave_mask (void);
9390387d 953static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
954static void is_altivec_return_reg (rtx, void *);
955static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
956int easy_vector_constant (rtx, enum machine_mode);
a2369ed3 957static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 958static void rs6000_init_dwarf_reg_sizes_extra (tree);
506d7b68 959static rtx rs6000_legitimize_address (rtx, rtx, enum machine_mode);
a2369ed3 960static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 961static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
962static rtx rs6000_tls_get_addr (void);
963static rtx rs6000_got_sym (void);
9390387d 964static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
965static const char *rs6000_get_some_local_dynamic_name (void);
966static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 967static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 968static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 969 enum machine_mode, tree);
0b5383eb
DJ
970static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
971 HOST_WIDE_INT);
972static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
973 tree, HOST_WIDE_INT);
974static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
975 HOST_WIDE_INT,
976 rtx[], int *);
977static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
978 const_tree, HOST_WIDE_INT,
979 rtx[], int *);
980static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 981static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 982static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
983static void setup_incoming_varargs (CUMULATIVE_ARGS *,
984 enum machine_mode, tree,
985 int *, int);
8cd5a4e0 986static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 987 const_tree, bool);
78a52f11
RH
988static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
989 tree, bool);
3101faab 990static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
991#if TARGET_MACHO
992static void macho_branch_islands (void);
efdba735
SH
993static int no_previous_def (tree function_name);
994static tree get_prev_label (tree function_name);
c4e18b1c 995static void rs6000_darwin_file_start (void);
efdba735
SH
996#endif
997
c35d187f 998static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 999static void rs6000_va_start (tree, rtx);
726a989a 1000static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
586de218 1001static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 1002static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 1003static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 1004static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 1005 enum machine_mode);
94ff898d 1006static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
1007 enum machine_mode);
1008static int get_vsel_insn (enum machine_mode);
1009static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 1010static tree rs6000_stack_protect_fail (void);
21213b4c
DP
1011
1012const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
1013static enum machine_mode rs6000_eh_return_filter_mode (void);
1014
17211ab5
GK
1015/* Hash table stuff for keeping track of TOC entries. */
1016
d1b38208 1017struct GTY(()) toc_hash_struct
17211ab5
GK
1018{
1019 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
1020 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
1021 rtx key;
1022 enum machine_mode key_mode;
1023 int labelno;
1024};
1025
1026static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
1027\f
1028/* Default register names. */
1029char rs6000_reg_names[][8] =
1030{
802a0058
MM
1031 "0", "1", "2", "3", "4", "5", "6", "7",
1032 "8", "9", "10", "11", "12", "13", "14", "15",
1033 "16", "17", "18", "19", "20", "21", "22", "23",
1034 "24", "25", "26", "27", "28", "29", "30", "31",
1035 "0", "1", "2", "3", "4", "5", "6", "7",
1036 "8", "9", "10", "11", "12", "13", "14", "15",
1037 "16", "17", "18", "19", "20", "21", "22", "23",
1038 "24", "25", "26", "27", "28", "29", "30", "31",
1039 "mq", "lr", "ctr","ap",
1040 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
1041 "xer",
1042 /* AltiVec registers. */
0cd5e3a1
AH
1043 "0", "1", "2", "3", "4", "5", "6", "7",
1044 "8", "9", "10", "11", "12", "13", "14", "15",
1045 "16", "17", "18", "19", "20", "21", "22", "23",
1046 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1047 "vrsave", "vscr",
1048 /* SPE registers. */
7d5175e1
JJ
1049 "spe_acc", "spefscr",
1050 /* Soft frame pointer. */
1051 "sfp"
c81bebd7
MM
1052};
1053
1054#ifdef TARGET_REGNAMES
8b60264b 1055static const char alt_reg_names[][8] =
c81bebd7 1056{
802a0058
MM
1057 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1058 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1059 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1060 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1061 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1062 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1063 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1064 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1065 "mq", "lr", "ctr", "ap",
1066 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1067 "xer",
59a4c851 1068 /* AltiVec registers. */
0ac081f6 1069 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1070 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1071 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1072 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1073 "vrsave", "vscr",
1074 /* SPE registers. */
7d5175e1
JJ
1075 "spe_acc", "spefscr",
1076 /* Soft frame pointer. */
1077 "sfp"
c81bebd7
MM
1078};
1079#endif
c885ca89
ILT
1080
1081/* Table of valid machine attributes. */
1082
1083static const struct attribute_spec rs6000_attribute_table[] =
1084{
1085 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1086 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
1087 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
1088 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
1089 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
1090 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
1091#ifdef SUBTARGET_ATTRIBUTE_TABLE
1092 SUBTARGET_ATTRIBUTE_TABLE,
1093#endif
1094 { NULL, 0, 0, false, false, false, NULL }
1095};
9878760c 1096\f
daf11973
MM
1097#ifndef MASK_STRICT_ALIGN
1098#define MASK_STRICT_ALIGN 0
1099#endif
ffcfcb5f
AM
1100#ifndef TARGET_PROFILE_KERNEL
1101#define TARGET_PROFILE_KERNEL 0
cacf1ca8
MM
1102#define SET_PROFILE_KERNEL(N)
1103#else
1104#define SET_PROFILE_KERNEL(N) TARGET_PROFILE_KERNEL = (N)
ffcfcb5f 1105#endif
3961e8fe
RH
1106
1107/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1108#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1109\f
1110/* Initialize the GCC target structure. */
91d231cb
JM
1111#undef TARGET_ATTRIBUTE_TABLE
1112#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1113#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1114#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1115
301d03af
RS
1116#undef TARGET_ASM_ALIGNED_DI_OP
1117#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1118
1119/* Default unaligned ops are only provided for ELF. Find the ops needed
1120 for non-ELF systems. */
1121#ifndef OBJECT_FORMAT_ELF
cbaaba19 1122#if TARGET_XCOFF
ae6c1efd 1123/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1124 64-bit targets. */
1125#undef TARGET_ASM_UNALIGNED_HI_OP
1126#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1127#undef TARGET_ASM_UNALIGNED_SI_OP
1128#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1129#undef TARGET_ASM_UNALIGNED_DI_OP
1130#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1131#else
1132/* For Darwin. */
1133#undef TARGET_ASM_UNALIGNED_HI_OP
1134#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1135#undef TARGET_ASM_UNALIGNED_SI_OP
1136#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1137#undef TARGET_ASM_UNALIGNED_DI_OP
1138#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1139#undef TARGET_ASM_ALIGNED_DI_OP
1140#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1141#endif
1142#endif
1143
1144/* This hook deals with fixups for relocatable code and DI-mode objects
1145 in 64-bit code. */
1146#undef TARGET_ASM_INTEGER
1147#define TARGET_ASM_INTEGER rs6000_assemble_integer
1148
93638d7a
AM
1149#ifdef HAVE_GAS_HIDDEN
1150#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1151#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1152#endif
1153
c4501e62
JJ
1154#undef TARGET_HAVE_TLS
1155#define TARGET_HAVE_TLS HAVE_AS_TLS
1156
1157#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1158#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1159
08c148a8
NB
1160#undef TARGET_ASM_FUNCTION_PROLOGUE
1161#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1162#undef TARGET_ASM_FUNCTION_EPILOGUE
1163#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1164
506d7b68
PB
1165#undef TARGET_LEGITIMIZE_ADDRESS
1166#define TARGET_LEGITIMIZE_ADDRESS rs6000_legitimize_address
1167
b54cf83a
DE
1168#undef TARGET_SCHED_VARIABLE_ISSUE
1169#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1170
c237e94a
ZW
1171#undef TARGET_SCHED_ISSUE_RATE
1172#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1173#undef TARGET_SCHED_ADJUST_COST
1174#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1175#undef TARGET_SCHED_ADJUST_PRIORITY
1176#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1177#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1178#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1179#undef TARGET_SCHED_INIT
1180#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1181#undef TARGET_SCHED_FINISH
1182#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1183#undef TARGET_SCHED_REORDER
1184#define TARGET_SCHED_REORDER rs6000_sched_reorder
1185#undef TARGET_SCHED_REORDER2
1186#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1187
be12c2b0
VM
1188#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1189#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1190
d296e02e
AP
1191#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1192#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1193
e855c69d
AB
1194#undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1195#define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1196#undef TARGET_SCHED_INIT_SCHED_CONTEXT
1197#define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1198#undef TARGET_SCHED_SET_SCHED_CONTEXT
1199#define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1200#undef TARGET_SCHED_FREE_SCHED_CONTEXT
1201#define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1202
7ccf35ed
DN
1203#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1204#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1205#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1206#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1207#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1208#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1209#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1210#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
0fca40f5
IR
1211#undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1212#define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
7ccf35ed 1213
5b900a4c
DN
1214#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1215#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1216
0ac081f6
AH
1217#undef TARGET_INIT_BUILTINS
1218#define TARGET_INIT_BUILTINS rs6000_init_builtins
1219
1220#undef TARGET_EXPAND_BUILTIN
1221#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1222
608063c3
JB
1223#undef TARGET_MANGLE_TYPE
1224#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1225
c15c90bb
ZW
1226#undef TARGET_INIT_LIBFUNCS
1227#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1228
f1384257 1229#if TARGET_MACHO
0e5dbd9b 1230#undef TARGET_BINDS_LOCAL_P
31920d83 1231#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1232#endif
0e5dbd9b 1233
77ccdfed
EC
1234#undef TARGET_MS_BITFIELD_LAYOUT_P
1235#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1236
3961e8fe
RH
1237#undef TARGET_ASM_OUTPUT_MI_THUNK
1238#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1239
3961e8fe 1240#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1241#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1242
4977bab6
ZW
1243#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1244#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1245
2e3f0db6
DJ
1246#undef TARGET_INVALID_WITHIN_DOLOOP
1247#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1248
3c50106f
RH
1249#undef TARGET_RTX_COSTS
1250#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67 1251#undef TARGET_ADDRESS_COST
f40751dd 1252#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 1253
96714395
AH
1254#undef TARGET_DWARF_REGISTER_SPAN
1255#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1256
37ea0b7e
JM
1257#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1258#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1259
c6e8c921
GK
1260/* On rs6000, function arguments are promoted, as are function return
1261 values. */
1262#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1263#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1264#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1265#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1266
c6e8c921
GK
1267#undef TARGET_RETURN_IN_MEMORY
1268#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1269
1270#undef TARGET_SETUP_INCOMING_VARARGS
1271#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1272
1273/* Always strict argument naming on rs6000. */
1274#undef TARGET_STRICT_ARGUMENT_NAMING
1275#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1276#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1277#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1278#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1279#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1280#undef TARGET_MUST_PASS_IN_STACK
1281#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1282#undef TARGET_PASS_BY_REFERENCE
1283#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1284#undef TARGET_ARG_PARTIAL_BYTES
1285#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1286
c35d187f
RH
1287#undef TARGET_BUILD_BUILTIN_VA_LIST
1288#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1289
d7bd8aeb
JJ
1290#undef TARGET_EXPAND_BUILTIN_VA_START
1291#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1292
cd3ce9b4
JM
1293#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1294#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1295
93f90be6
FJ
1296#undef TARGET_EH_RETURN_FILTER_MODE
1297#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1298
00b79d54
BE
1299#undef TARGET_SCALAR_MODE_SUPPORTED_P
1300#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1301
f676971a
EC
1302#undef TARGET_VECTOR_MODE_SUPPORTED_P
1303#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1304
4d3e6fae
FJ
1305#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1306#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1307
78f5898b
AH
1308#undef TARGET_HANDLE_OPTION
1309#define TARGET_HANDLE_OPTION rs6000_handle_option
1310
1311#undef TARGET_DEFAULT_TARGET_FLAGS
1312#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1313 (TARGET_DEFAULT)
78f5898b 1314
3aebbe5f
JJ
1315#undef TARGET_STACK_PROTECT_FAIL
1316#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1317
445cf5eb
JM
1318/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1319 The PowerPC architecture requires only weak consistency among
1320 processors--that is, memory accesses between processors need not be
1321 sequentially consistent and memory accesses among processors can occur
1322 in any order. The ability to order memory accesses weakly provides
1323 opportunities for more efficient use of the system bus. Unless a
1324 dependency exists, the 604e allows read operations to precede store
1325 operations. */
1326#undef TARGET_RELAXED_ORDERING
1327#define TARGET_RELAXED_ORDERING true
1328
fdbe66f2
EB
1329#ifdef HAVE_AS_TLS
1330#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1331#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1332#endif
1333
aacd3885
RS
1334/* Use a 32-bit anchor range. This leads to sequences like:
1335
1336 addis tmp,anchor,high
1337 add dest,tmp,low
1338
1339 where tmp itself acts as an anchor, and can be shared between
1340 accesses to the same 64k page. */
1341#undef TARGET_MIN_ANCHOR_OFFSET
1342#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1343#undef TARGET_MAX_ANCHOR_OFFSET
1344#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1345#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1346#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1347
9c78b944
DE
1348#undef TARGET_BUILTIN_RECIPROCAL
1349#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1350
e41b2a33
PB
1351#undef TARGET_EXPAND_TO_RTL_HOOK
1352#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1353
1354#undef TARGET_INSTANTIATE_DECLS
1355#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1356
c6c3dba9
PB
1357#undef TARGET_LEGITIMATE_ADDRESS_P
1358#define TARGET_LEGITIMATE_ADDRESS_P rs6000_legitimate_address_p
1359
f6897b10 1360struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1361\f
cacf1ca8
MM
1362/* Return number of consecutive hard regs needed starting at reg REGNO
1363 to hold something of mode MODE.
1364 This is ordinarily the length in words of a value of mode MODE
1365 but can be less for certain modes in special long registers.
1366
1367 For the SPE, GPRs are 64 bits but only 32 bits are visible in
1368 scalar instructions. The upper 32 bits are only available to the
1369 SIMD instructions.
1370
1371 POWER and PowerPC GPRs hold 32 bits worth;
1372 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
1373
1374static int
1375rs6000_hard_regno_nregs_internal (int regno, enum machine_mode mode)
1376{
1377 unsigned HOST_WIDE_INT reg_size;
1378
1379 if (FP_REGNO_P (regno))
1380 reg_size = UNITS_PER_FP_WORD;
1381
1382 else if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1383 reg_size = UNITS_PER_SPE_WORD;
1384
1385 else if (ALTIVEC_REGNO_P (regno))
1386 reg_size = UNITS_PER_ALTIVEC_WORD;
1387
1388 /* The value returned for SCmode in the E500 double case is 2 for
1389 ABI compatibility; storing an SCmode value in a single register
1390 would require function_arg and rs6000_spe_function_arg to handle
1391 SCmode so as to pass the value correctly in a pair of
1392 registers. */
1393 else if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
1394 && !DECIMAL_FLOAT_MODE_P (mode))
1395 reg_size = UNITS_PER_FP_WORD;
1396
1397 else
1398 reg_size = UNITS_PER_WORD;
1399
1400 return (GET_MODE_SIZE (mode) + reg_size - 1) / reg_size;
1401}
0d1fbc8c
AH
1402
1403/* Value is 1 if hard register REGNO can hold a value of machine-mode
1404 MODE. */
1405static int
1406rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1407{
1408 /* The GPRs can hold any mode, but values bigger than one register
1409 cannot go past R31. */
1410 if (INT_REGNO_P (regno))
1411 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1412
a5a97921 1413 /* The float registers can only hold floating modes and DImode.
7393f7f8 1414 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1415 if (FP_REGNO_P (regno))
1416 return
96038623 1417 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1418 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1419 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1420 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1421 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1422 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1423 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1424
1425 /* The CR register can only hold CC modes. */
1426 if (CR_REGNO_P (regno))
1427 return GET_MODE_CLASS (mode) == MODE_CC;
1428
1429 if (XER_REGNO_P (regno))
1430 return mode == PSImode;
1431
1432 /* AltiVec only in AldyVec registers. */
1433 if (ALTIVEC_REGNO_P (regno))
1434 return ALTIVEC_VECTOR_MODE (mode);
1435
1436 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1437 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1438 return 1;
1439
1440 /* We cannot put TImode anywhere except general register and it must be
1441 able to fit within the register set. */
1442
1443 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1444}
1445
cacf1ca8 1446/* Print interesting facts about registers. */
0d1fbc8c 1447static void
cacf1ca8 1448rs6000_debug_reg_print (int first_regno, int last_regno, const char *reg_name)
0d1fbc8c
AH
1449{
1450 int r, m;
1451
cacf1ca8
MM
1452 for (r = first_regno; r <= last_regno; ++r)
1453 {
1454 const char *comma = "";
1455 int len;
1456
1457 if (first_regno == last_regno)
1458 fprintf (stderr, "%s:\t", reg_name);
1459 else
1460 fprintf (stderr, "%s%d:\t", reg_name, r - first_regno);
1461
1462 len = 8;
1463 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1464 if (rs6000_hard_regno_mode_ok_p[m][r] && rs6000_hard_regno_nregs[m][r])
1465 {
1466 if (len > 70)
1467 {
1468 fprintf (stderr, ",\n\t");
1469 len = 8;
1470 comma = "";
1471 }
1472
1473 if (rs6000_hard_regno_nregs[m][r] > 1)
1474 len += fprintf (stderr, "%s%s/%d", comma, GET_MODE_NAME (m),
1475 rs6000_hard_regno_nregs[m][r]);
1476 else
1477 len += fprintf (stderr, "%s%s", comma, GET_MODE_NAME (m));
1478
1479 comma = ", ";
1480 }
1481
1482 if (call_used_regs[r])
1483 {
1484 if (len > 70)
1485 {
1486 fprintf (stderr, ",\n\t");
1487 len = 8;
1488 comma = "";
1489 }
1490
1491 len += fprintf (stderr, "%s%s", comma, "call-used");
1492 comma = ", ";
1493 }
1494
1495 if (fixed_regs[r])
1496 {
1497 if (len > 70)
1498 {
1499 fprintf (stderr, ",\n\t");
1500 len = 8;
1501 comma = "";
1502 }
1503
1504 len += fprintf (stderr, "%s%s", comma, "fixed");
1505 comma = ", ";
1506 }
1507
1508 if (len > 70)
1509 {
1510 fprintf (stderr, ",\n\t");
1511 comma = "";
1512 }
1513
1514 fprintf (stderr, "%sregno = %d\n", comma, r);
1515 }
1516}
1517
1518/* Map enum rs6000_vector to string. */
1519static const char *
1520rs6000_debug_vector_unit[] = {
1521 "none",
1522 "altivec",
1523 "vsx",
1524 "paired",
1525 "spe",
1526 "other"
1527};
1528
1529/* Initialize the various global tables that are based on register size. */
1530static void
1531rs6000_init_hard_regno_mode_ok (void)
1532{
1533 int r, m, c;
1534 bool float_p = (TARGET_HARD_FLOAT && TARGET_FPRS);
1535
1536 /* Precalculate REGNO_REG_CLASS. */
1537 rs6000_regno_regclass[0] = GENERAL_REGS;
1538 for (r = 1; r < 32; ++r)
1539 rs6000_regno_regclass[r] = BASE_REGS;
1540
1541 for (r = 32; r < 64; ++r)
1542 rs6000_regno_regclass[r] = FLOAT_REGS;
1543
1544 for (r = 64; r < FIRST_PSEUDO_REGISTER; ++r)
1545 rs6000_regno_regclass[r] = NO_REGS;
1546
1547 for (r = FIRST_ALTIVEC_REGNO; r <= LAST_ALTIVEC_REGNO; ++r)
1548 rs6000_regno_regclass[r] = ALTIVEC_REGS;
1549
1550 rs6000_regno_regclass[CR0_REGNO] = CR0_REGS;
1551 for (r = CR1_REGNO; r <= CR7_REGNO; ++r)
1552 rs6000_regno_regclass[r] = CR_REGS;
1553
1554 rs6000_regno_regclass[MQ_REGNO] = MQ_REGS;
1555 rs6000_regno_regclass[LR_REGNO] = LINK_REGS;
1556 rs6000_regno_regclass[CTR_REGNO] = CTR_REGS;
1557 rs6000_regno_regclass[XER_REGNO] = XER_REGS;
1558 rs6000_regno_regclass[VRSAVE_REGNO] = VRSAVE_REGS;
1559 rs6000_regno_regclass[VSCR_REGNO] = VRSAVE_REGS;
1560 rs6000_regno_regclass[SPE_ACC_REGNO] = SPE_ACC_REGS;
1561 rs6000_regno_regclass[SPEFSCR_REGNO] = SPEFSCR_REGS;
1562 rs6000_regno_regclass[ARG_POINTER_REGNUM] = BASE_REGS;
1563 rs6000_regno_regclass[FRAME_POINTER_REGNUM] = BASE_REGS;
1564
1565 /* Precalculate vector information, this must be set up before the
1566 rs6000_hard_regno_nregs_internal below. */
1567 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1568 {
1569 rs6000_vector_unit[m] = rs6000_vector_mem[m] = VECTOR_NONE;
1570 rs6000_vector_reg_class[m] = NO_REGS;
1571 }
1572
1573 /* V4SF mode, Altivec only. */
1574 if (float_p && TARGET_ALTIVEC)
1575 {
1576 rs6000_vector_unit[V4SFmode] = VECTOR_ALTIVEC;
1577 rs6000_vector_mem[V4SFmode] = VECTOR_ALTIVEC;
1578 rs6000_vector_align[V4SFmode] = 128;
1579 }
1580
1581 /* V16QImode, V8HImode, V4SImode are Altivec only. */
1582 if (TARGET_ALTIVEC)
1583 {
1584 rs6000_vector_unit[V4SImode] = VECTOR_ALTIVEC;
1585 rs6000_vector_unit[V8HImode] = VECTOR_ALTIVEC;
1586 rs6000_vector_unit[V16QImode] = VECTOR_ALTIVEC;
1587
1588 rs6000_vector_reg_class[V16QImode] = ALTIVEC_REGS;
1589 rs6000_vector_reg_class[V8HImode] = ALTIVEC_REGS;
1590 rs6000_vector_reg_class[V4SImode] = ALTIVEC_REGS;
1591
1592 rs6000_vector_mem[V4SImode] = VECTOR_ALTIVEC;
1593 rs6000_vector_mem[V8HImode] = VECTOR_ALTIVEC;
1594 rs6000_vector_mem[V16QImode] = VECTOR_ALTIVEC;
1595 rs6000_vector_align[V4SImode] = 128;
1596 rs6000_vector_align[V8HImode] = 128;
1597 rs6000_vector_align[V16QImode] = 128;
1598 }
1599
1600 /* V2DImode, prefer vsx over altivec, since the main use will be for
1601 vectorized floating point conversions. */
1602 if (TARGET_ALTIVEC)
1603 {
1604 rs6000_vector_mem[V2DImode] = VECTOR_ALTIVEC;
1605 rs6000_vector_unit[V2DImode] = VECTOR_NONE;
1606 rs6000_vector_reg_class[V2DImode] = ALTIVEC_REGS;
1607 rs6000_vector_align[V2DImode] = 128;
1608 }
1609
1610 /* TODO add SPE and paired floating point vector support. */
1611
1612 /* Set the VSX register classes. */
1613 rs6000_vector_reg_class[V4SFmode]
1614 = (VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)
1615 ? ALTIVEC_REGS
1616 : NO_REGS);
1617
1618 rs6000_vector_reg_class[V2DFmode] = NO_REGS;
1619
1620 rs6000_vector_reg_class[DFmode] = (!float_p ? NO_REGS : FLOAT_REGS);
1621
1622 /* Precalculate HARD_REGNO_NREGS. */
0d1fbc8c
AH
1623 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1624 for (m = 0; m < NUM_MACHINE_MODES; ++m)
cacf1ca8
MM
1625 rs6000_hard_regno_nregs[m][r]
1626 = rs6000_hard_regno_nregs_internal (r, (enum machine_mode)m);
1627
1628 /* Precalculate HARD_REGNO_MODE_OK. */
1629 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1630 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1631 if (rs6000_hard_regno_mode_ok (r, (enum machine_mode)m))
0d1fbc8c 1632 rs6000_hard_regno_mode_ok_p[m][r] = true;
cacf1ca8
MM
1633
1634 /* Precalculate CLASS_MAX_NREGS sizes. */
1635 for (c = 0; c < LIM_REG_CLASSES; ++c)
1636 {
1637 int reg_size;
1638
1639 if (c == ALTIVEC_REGS)
1640 reg_size = UNITS_PER_ALTIVEC_WORD;
1641
1642 else if (c == FLOAT_REGS)
1643 reg_size = UNITS_PER_FP_WORD;
1644
1645 else
1646 reg_size = UNITS_PER_WORD;
1647
1648 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1649 rs6000_class_max_nregs[m][c]
1650 = (GET_MODE_SIZE (m) + reg_size - 1) / reg_size;
1651 }
1652
1653 if (TARGET_E500_DOUBLE)
1654 rs6000_class_max_nregs[DFmode][GENERAL_REGS] = 1;
1655
1656 if (TARGET_DEBUG_REG)
1657 {
1658 const char *nl = (const char *)0;
1659
1660 fprintf (stderr, "Register information: (last virtual reg = %d)\n",
1661 LAST_VIRTUAL_REGISTER);
1662 rs6000_debug_reg_print (0, 31, "gr");
1663 rs6000_debug_reg_print (32, 63, "fp");
1664 rs6000_debug_reg_print (FIRST_ALTIVEC_REGNO,
1665 LAST_ALTIVEC_REGNO,
1666 "vs");
1667 rs6000_debug_reg_print (LR_REGNO, LR_REGNO, "lr");
1668 rs6000_debug_reg_print (CTR_REGNO, CTR_REGNO, "ctr");
1669 rs6000_debug_reg_print (CR0_REGNO, CR7_REGNO, "cr");
1670 rs6000_debug_reg_print (MQ_REGNO, MQ_REGNO, "mq");
1671 rs6000_debug_reg_print (XER_REGNO, XER_REGNO, "xer");
1672 rs6000_debug_reg_print (VRSAVE_REGNO, VRSAVE_REGNO, "vrsave");
1673 rs6000_debug_reg_print (VSCR_REGNO, VSCR_REGNO, "vscr");
1674 rs6000_debug_reg_print (SPE_ACC_REGNO, SPE_ACC_REGNO, "spe_a");
1675 rs6000_debug_reg_print (SPEFSCR_REGNO, SPEFSCR_REGNO, "spe_f");
1676
1677 fprintf (stderr,
1678 "\n"
1679 "V16QI reg_class = %s\n"
1680 "V8HI reg_class = %s\n"
1681 "V4SI reg_class = %s\n"
1682 "V2DI reg_class = %s\n"
1683 "V4SF reg_class = %s\n"
1684 "V2DF reg_class = %s\n"
1685 "DF reg_class = %s\n\n",
1686 reg_class_names[rs6000_vector_reg_class[V16QImode]],
1687 reg_class_names[rs6000_vector_reg_class[V8HImode]],
1688 reg_class_names[rs6000_vector_reg_class[V4SImode]],
1689 reg_class_names[rs6000_vector_reg_class[V2DImode]],
1690 reg_class_names[rs6000_vector_reg_class[V4SFmode]],
1691 reg_class_names[rs6000_vector_reg_class[V2DFmode]],
1692 reg_class_names[rs6000_vector_reg_class[DFmode]]);
1693
1694 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1695 if (rs6000_vector_unit[m] || rs6000_vector_mem[m])
1696 {
1697 nl = "\n";
1698 fprintf (stderr, "Vector mode: %-5s arithmetic: %-8s move: %-8s\n",
1699 GET_MODE_NAME (m),
1700 rs6000_debug_vector_unit[ rs6000_vector_unit[m] ],
1701 rs6000_debug_vector_unit[ rs6000_vector_mem[m] ]);
1702 }
1703
1704 if (nl)
1705 fputs (nl, stderr);
1706 }
0d1fbc8c
AH
1707}
1708
e4cad568
GK
1709#if TARGET_MACHO
1710/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1711
1712static void
1713darwin_rs6000_override_options (void)
1714{
1715 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1716 off. */
1717 rs6000_altivec_abi = 1;
1718 TARGET_ALTIVEC_VRSAVE = 1;
1719 if (DEFAULT_ABI == ABI_DARWIN)
1720 {
1721 if (MACHO_DYNAMIC_NO_PIC_P)
1722 {
1723 if (flag_pic)
1724 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1725 flag_pic = 0;
1726 }
1727 else if (flag_pic == 1)
1728 {
1729 flag_pic = 2;
1730 }
1731 }
1732 if (TARGET_64BIT && ! TARGET_POWERPC64)
1733 {
1734 target_flags |= MASK_POWERPC64;
1735 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1736 }
1737 if (flag_mkernel)
1738 {
1739 rs6000_default_long_calls = 1;
1740 target_flags |= MASK_SOFT_FLOAT;
1741 }
1742
1743 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1744 Altivec. */
1745 if (!flag_mkernel && !flag_apple_kext
1746 && TARGET_64BIT
1747 && ! (target_flags_explicit & MASK_ALTIVEC))
1748 target_flags |= MASK_ALTIVEC;
1749
1750 /* Unless the user (not the configurer) has explicitly overridden
1751 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1752 G4 unless targetting the kernel. */
1753 if (!flag_mkernel
1754 && !flag_apple_kext
1755 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1756 && ! (target_flags_explicit & MASK_ALTIVEC)
1757 && ! rs6000_select[1].string)
1758 {
1759 target_flags |= MASK_ALTIVEC;
1760 }
1761}
1762#endif
1763
c1e55850
GK
1764/* If not otherwise specified by a target, make 'long double' equivalent to
1765 'double'. */
1766
1767#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1768#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1769#endif
1770
5248c961
RK
1771/* Override command line options. Mostly we process the processor
1772 type and sometimes adjust other TARGET_ options. */
1773
1774void
d779d0dc 1775rs6000_override_options (const char *default_cpu)
5248c961 1776{
c4d38ccb 1777 size_t i, j;
8e3f41e7 1778 struct rs6000_cpu_select *ptr;
66188a7e 1779 int set_masks;
5248c961 1780
66188a7e 1781 /* Simplifications for entries below. */
85638c0d 1782
66188a7e
GK
1783 enum {
1784 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1785 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1786 };
85638c0d 1787
66188a7e
GK
1788 /* This table occasionally claims that a processor does not support
1789 a particular feature even though it does, but the feature is slower
1790 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1791 complete description of the processor's support.
66188a7e
GK
1792
1793 Please keep this list in order, and don't forget to update the
1794 documentation in invoke.texi when adding a new processor or
1795 flag. */
5248c961
RK
1796 static struct ptt
1797 {
8b60264b
KG
1798 const char *const name; /* Canonical processor name. */
1799 const enum processor_type processor; /* Processor type enum value. */
1800 const int target_enable; /* Target flags to enable. */
8b60264b 1801 } const processor_target_table[]
66188a7e 1802 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1803 {"403", PROCESSOR_PPC403,
66188a7e 1804 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1805 {"405", PROCESSOR_PPC405,
716019c0
JM
1806 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1807 {"405fp", PROCESSOR_PPC405,
1808 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1809 {"440", PROCESSOR_PPC440,
716019c0
JM
1810 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1811 {"440fp", PROCESSOR_PPC440,
1812 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1813 {"464", PROCESSOR_PPC440,
1814 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1815 {"464fp", PROCESSOR_PPC440,
1816 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1817 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1818 {"601", PROCESSOR_PPC601,
66188a7e
GK
1819 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1820 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1821 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1822 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1823 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1824 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1825 {"620", PROCESSOR_PPC620,
1826 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1827 {"630", PROCESSOR_PPC630,
1828 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1829 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1830 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1831 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1832 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1833 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1834 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1835 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
cacf1ca8
MM
1836 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN
1837 | MASK_ISEL},
4d4cbc0e 1838 /* 8548 has a dummy entry for now. */
cacf1ca8
MM
1839 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN
1840 | MASK_ISEL},
fa41c305
EW
1841 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1842 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
cacf1ca8
MM
1843 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT
1844 | MASK_ISEL},
66188a7e 1845 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1846 {"970", PROCESSOR_POWER4,
66188a7e 1847 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1848 {"cell", PROCESSOR_CELL,
1849 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1850 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1851 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1852 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1853 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1854 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1855 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1856 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1857 {"power2", PROCESSOR_POWER,
1858 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1859 {"power3", PROCESSOR_PPC630,
1860 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1861 {"power4", PROCESSOR_POWER4,
9a8d7941 1862 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1bc39d2f 1863 | MASK_MFCRF},
ec507f2d 1864 {"power5", PROCESSOR_POWER5,
9a8d7941 1865 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1866 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1867 {"power5+", PROCESSOR_POWER5,
9a8d7941 1868 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1869 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1870 {"power6", PROCESSOR_POWER6,
0783d48d
DE
1871 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1872 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1873 {"power6x", PROCESSOR_POWER6,
0783d48d
DE
1874 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1875 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1876 | MASK_MFPGPR},
cacf1ca8 1877 {"power7", PROCESSOR_POWER7,
d40c9e33 1878 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
cacf1ca8
MM
1879 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP | MASK_POPCNTD
1880 | MASK_VSX}, /* Don't add MASK_ISEL by default */
66188a7e
GK
1881 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1882 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1883 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1884 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1885 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1886 {"rios2", PROCESSOR_RIOS2,
1887 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1888 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1889 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1890 {"rs64", PROCESSOR_RS64A,
1891 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1892 };
5248c961 1893
ca7558fc 1894 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1895
66188a7e
GK
1896 /* Some OSs don't support saving the high part of 64-bit registers on
1897 context switch. Other OSs don't support saving Altivec registers.
1898 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1899 settings; if the user wants either, the user must explicitly specify
1900 them and we won't interfere with the user's specification. */
1901
1902 enum {
1903 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1904 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1905 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1906 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
cacf1ca8
MM
1907 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP
1908 | MASK_POPCNTD | MASK_VSX | MASK_ISEL)
66188a7e 1909 };
0d1fbc8c 1910
cacf1ca8
MM
1911 /* Set the pointer size. */
1912 if (TARGET_POWERPC64)
1913 {
1914 rs6000_pmode = (int)DImode;
1915 rs6000_pointer_size = 64;
1916 }
1917 else
1918 {
1919 rs6000_pmode = (int)SImode;
1920 rs6000_pointer_size = 32;
1921 }
1922
c4ad648e 1923 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1924#ifdef OS_MISSING_POWERPC64
1925 if (OS_MISSING_POWERPC64)
1926 set_masks &= ~MASK_POWERPC64;
1927#endif
1928#ifdef OS_MISSING_ALTIVEC
1929 if (OS_MISSING_ALTIVEC)
1930 set_masks &= ~MASK_ALTIVEC;
1931#endif
1932
768875a8
AM
1933 /* Don't override by the processor default if given explicitly. */
1934 set_masks &= ~target_flags_explicit;
957211c3 1935
a4f6c312 1936 /* Identify the processor type. */
8e3f41e7 1937 rs6000_select[0].string = default_cpu;
3cb999d8 1938 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1939
b6a1cbae 1940 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1941 {
8e3f41e7
MM
1942 ptr = &rs6000_select[i];
1943 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1944 {
8e3f41e7
MM
1945 for (j = 0; j < ptt_size; j++)
1946 if (! strcmp (ptr->string, processor_target_table[j].name))
1947 {
1948 if (ptr->set_tune_p)
1949 rs6000_cpu = processor_target_table[j].processor;
1950
1951 if (ptr->set_arch_p)
1952 {
66188a7e
GK
1953 target_flags &= ~set_masks;
1954 target_flags |= (processor_target_table[j].target_enable
1955 & set_masks);
8e3f41e7
MM
1956 }
1957 break;
1958 }
1959
4406229e 1960 if (j == ptt_size)
8e3f41e7 1961 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1962 }
1963 }
8a61d227 1964
edae5fe3
DE
1965 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1966 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1967 {
1968 if (TARGET_ALTIVEC)
1969 error ("AltiVec not supported in this target");
1970 if (TARGET_SPE)
1971 error ("Spe not supported in this target");
1972 }
1973
25696a75 1974 /* Disable Cell microcode if we are optimizing for the Cell
c921bad8
AP
1975 and not optimizing for size. */
1976 if (rs6000_gen_cell_microcode == -1)
1977 rs6000_gen_cell_microcode = !(rs6000_cpu == PROCESSOR_CELL
1978 && !optimize_size);
1979
dff9f1b6 1980 /* If we are optimizing big endian systems for space, use the load/store
c921bad8
AP
1981 multiple and string instructions unless we are not generating
1982 Cell microcode. */
1983 if (BYTES_BIG_ENDIAN && optimize_size && !rs6000_gen_cell_microcode)
957211c3 1984 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1985
a4f6c312
SS
1986 /* Don't allow -mmultiple or -mstring on little endian systems
1987 unless the cpu is a 750, because the hardware doesn't support the
1988 instructions used in little endian mode, and causes an alignment
1989 trap. The 750 does not cause an alignment trap (except when the
1990 target is unaligned). */
bef84347 1991
b21fb038 1992 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1993 {
1994 if (TARGET_MULTIPLE)
1995 {
1996 target_flags &= ~MASK_MULTIPLE;
b21fb038 1997 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1998 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1999 }
2000
2001 if (TARGET_STRING)
2002 {
2003 target_flags &= ~MASK_STRING;
b21fb038 2004 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 2005 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
2006 }
2007 }
3933e0e1 2008
cacf1ca8
MM
2009 /* Add some warnings for VSX. Enable -maltivec unless the user explicitly
2010 used -mno-altivec */
2011 if (TARGET_VSX)
2012 {
2013 const char *msg = NULL;
2014 if (!TARGET_HARD_FLOAT || !TARGET_FPRS
2015 || !TARGET_SINGLE_FLOAT || !TARGET_DOUBLE_FLOAT)
2016 {
2017 if (target_flags_explicit & MASK_VSX)
2018 msg = N_("-mvsx requires hardware floating point");
2019 else
2020 target_flags &= ~ MASK_VSX;
2021 }
2022 else if (TARGET_PAIRED_FLOAT)
2023 msg = N_("-mvsx and -mpaired are incompatible");
2024 /* The hardware will allow VSX and little endian, but until we make sure
2025 things like vector select, etc. work don't allow VSX on little endian
2026 systems at this point. */
2027 else if (!BYTES_BIG_ENDIAN)
2028 msg = N_("-mvsx used with little endian code");
2029 else if (TARGET_AVOID_XFORM > 0)
2030 msg = N_("-mvsx needs indexed addressing");
2031
2032 if (msg)
2033 {
2034 warning (0, msg);
2035 target_flags &= ~ MASK_VSX;
2036 }
2037 else if (TARGET_VSX && !TARGET_ALTIVEC
2038 && (target_flags_explicit & MASK_ALTIVEC) == 0)
2039 target_flags |= MASK_ALTIVEC;
2040 }
2041
38c1f2d7
MM
2042 /* Set debug flags */
2043 if (rs6000_debug_name)
2044 {
bfc79d3b 2045 if (! strcmp (rs6000_debug_name, "all"))
cacf1ca8
MM
2046 rs6000_debug_stack = rs6000_debug_arg = rs6000_debug_reg
2047 = rs6000_debug_addr = rs6000_debug_cost = 1;
bfc79d3b 2048 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 2049 rs6000_debug_stack = 1;
bfc79d3b 2050 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7 2051 rs6000_debug_arg = 1;
cacf1ca8
MM
2052 else if (! strcmp (rs6000_debug_name, "reg"))
2053 rs6000_debug_reg = 1;
2054 else if (! strcmp (rs6000_debug_name, "addr"))
2055 rs6000_debug_addr = 1;
2056 else if (! strcmp (rs6000_debug_name, "cost"))
2057 rs6000_debug_cost = 1;
38c1f2d7 2058 else
c725bd79 2059 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
2060 }
2061
57ac7be9
AM
2062 if (rs6000_traceback_name)
2063 {
2064 if (! strncmp (rs6000_traceback_name, "full", 4))
2065 rs6000_traceback = traceback_full;
2066 else if (! strncmp (rs6000_traceback_name, "part", 4))
2067 rs6000_traceback = traceback_part;
2068 else if (! strncmp (rs6000_traceback_name, "no", 2))
2069 rs6000_traceback = traceback_none;
2070 else
9e637a26 2071 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
2072 rs6000_traceback_name);
2073 }
2074
78f5898b
AH
2075 if (!rs6000_explicit_options.long_double)
2076 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 2077
602ea4d3 2078#ifndef POWERPC_LINUX
d3603e8c 2079 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
2080 rs6000_ieeequad = 1;
2081#endif
2082
0db747be 2083 /* Enable Altivec ABI for AIX -maltivec. */
cacf1ca8 2084 if (TARGET_XCOFF && (TARGET_ALTIVEC || TARGET_VSX))
0db747be
DE
2085 rs6000_altivec_abi = 1;
2086
a2db2771
JJ
2087 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
2088 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
2089 be explicitly overridden in either case. */
2090 if (TARGET_ELF)
6d0ef01e 2091 {
a2db2771 2092 if (!rs6000_explicit_options.altivec_abi
cacf1ca8 2093 && (TARGET_64BIT || TARGET_ALTIVEC || TARGET_VSX))
a2db2771
JJ
2094 rs6000_altivec_abi = 1;
2095
2096 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
2097 if (!rs6000_explicit_options.vrsave)
2098 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
2099 }
2100
594a51fe
SS
2101 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
2102 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2103 {
2104 rs6000_darwin64_abi = 1;
9c7956fd 2105#if TARGET_MACHO
6ac49599 2106 darwin_one_byte_bool = 1;
9c7956fd 2107#endif
d9168963
SS
2108 /* Default to natural alignment, for better performance. */
2109 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
2110 }
2111
194c524a
DE
2112 /* Place FP constants in the constant pool instead of TOC
2113 if section anchors enabled. */
2114 if (flag_section_anchors)
2115 TARGET_NO_FP_IN_TOC = 1;
2116
c4501e62
JJ
2117 /* Handle -mtls-size option. */
2118 rs6000_parse_tls_size_option ();
2119
a7ae18e2
AH
2120#ifdef SUBTARGET_OVERRIDE_OPTIONS
2121 SUBTARGET_OVERRIDE_OPTIONS;
2122#endif
2123#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
2124 SUBSUBTARGET_OVERRIDE_OPTIONS;
2125#endif
4d4cbc0e
AH
2126#ifdef SUB3TARGET_OVERRIDE_OPTIONS
2127 SUB3TARGET_OVERRIDE_OPTIONS;
2128#endif
a7ae18e2 2129
edae5fe3 2130 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 2131 {
edae5fe3 2132 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
2133 MASK_STRING above when optimizing for size. */
2134 if ((target_flags & MASK_STRING) != 0)
2135 target_flags = target_flags & ~MASK_STRING;
2136 }
2137 else if (rs6000_select[1].string != NULL)
2138 {
2139 /* For the powerpc-eabispe configuration, we set all these by
2140 default, so let's unset them if we manually set another
2141 CPU that is not the E500. */
a2db2771 2142 if (!rs6000_explicit_options.spe_abi)
5da702b1 2143 rs6000_spe_abi = 0;
78f5898b 2144 if (!rs6000_explicit_options.spe)
5da702b1 2145 rs6000_spe = 0;
78f5898b 2146 if (!rs6000_explicit_options.float_gprs)
5da702b1 2147 rs6000_float_gprs = 0;
cacf1ca8
MM
2148 if (!(target_flags_explicit & MASK_ISEL))
2149 target_flags &= ~MASK_ISEL;
5da702b1 2150 }
b5044283 2151
eca0d5e8
JM
2152 /* Detect invalid option combinations with E500. */
2153 CHECK_E500_OPTIONS;
2154
ec507f2d 2155 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 2156 && rs6000_cpu != PROCESSOR_POWER5
cacf1ca8
MM
2157 && rs6000_cpu != PROCESSOR_POWER6
2158 && rs6000_cpu != PROCESSOR_POWER7
d296e02e 2159 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
2160 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
2161 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e 2162 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
cacf1ca8
MM
2163 || rs6000_cpu == PROCESSOR_POWER5
2164 || rs6000_cpu == PROCESSOR_POWER6
2165 || rs6000_cpu == PROCESSOR_POWER7);
ec507f2d 2166
ec507f2d
DE
2167 rs6000_sched_restricted_insns_priority
2168 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 2169
569fa502 2170 /* Handle -msched-costly-dep option. */
ec507f2d
DE
2171 rs6000_sched_costly_dep
2172 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 2173
569fa502
DN
2174 if (rs6000_sched_costly_dep_str)
2175 {
f676971a 2176 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 2177 rs6000_sched_costly_dep = no_dep_costly;
569fa502 2178 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 2179 rs6000_sched_costly_dep = all_deps_costly;
569fa502 2180 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 2181 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 2182 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 2183 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 2184 else
32e8bb8e
ILT
2185 rs6000_sched_costly_dep = ((enum rs6000_dependence_cost)
2186 atoi (rs6000_sched_costly_dep_str));
cbe26ab8
DN
2187 }
2188
2189 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
2190 rs6000_sched_insert_nops
2191 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 2192
cbe26ab8
DN
2193 if (rs6000_sched_insert_nops_str)
2194 {
2195 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 2196 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 2197 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 2198 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 2199 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 2200 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 2201 else
32e8bb8e
ILT
2202 rs6000_sched_insert_nops = ((enum rs6000_nop_insertion)
2203 atoi (rs6000_sched_insert_nops_str));
569fa502
DN
2204 }
2205
c81bebd7 2206#ifdef TARGET_REGNAMES
a4f6c312
SS
2207 /* If the user desires alternate register names, copy in the
2208 alternate names now. */
c81bebd7 2209 if (TARGET_REGNAMES)
4e135bdd 2210 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
2211#endif
2212
df01da37 2213 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
2214 If -maix-struct-return or -msvr4-struct-return was explicitly
2215 used, don't override with the ABI default. */
df01da37
DE
2216 if (!rs6000_explicit_options.aix_struct_ret)
2217 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 2218
602ea4d3 2219 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 2220 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 2221
f676971a 2222 if (TARGET_TOC)
9ebbca7d 2223 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 2224
301d03af
RS
2225 /* We can only guarantee the availability of DI pseudo-ops when
2226 assembling for 64-bit targets. */
ae6c1efd 2227 if (!TARGET_64BIT)
301d03af
RS
2228 {
2229 targetm.asm_out.aligned_op.di = NULL;
2230 targetm.asm_out.unaligned_op.di = NULL;
2231 }
2232
1494c534
DE
2233 /* Set branch target alignment, if not optimizing for size. */
2234 if (!optimize_size)
2235 {
d296e02e
AP
2236 /* Cell wants to be aligned 8byte for dual issue. */
2237 if (rs6000_cpu == PROCESSOR_CELL)
2238 {
2239 if (align_functions <= 0)
2240 align_functions = 8;
2241 if (align_jumps <= 0)
2242 align_jumps = 8;
2243 if (align_loops <= 0)
2244 align_loops = 8;
2245 }
44cd321e 2246 if (rs6000_align_branch_targets)
1494c534
DE
2247 {
2248 if (align_functions <= 0)
2249 align_functions = 16;
2250 if (align_jumps <= 0)
2251 align_jumps = 16;
2252 if (align_loops <= 0)
2253 align_loops = 16;
2254 }
2255 if (align_jumps_max_skip <= 0)
2256 align_jumps_max_skip = 15;
2257 if (align_loops_max_skip <= 0)
2258 align_loops_max_skip = 15;
2259 }
2792d578 2260
71f123ca
FS
2261 /* Arrange to save and restore machine status around nested functions. */
2262 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
2263
2264 /* We should always be splitting complex arguments, but we can't break
2265 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 2266 if (DEFAULT_ABI != ABI_AIX)
42ba5130 2267 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
2268
2269 /* Initialize rs6000_cost with the appropriate target costs. */
2270 if (optimize_size)
2271 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
2272 else
2273 switch (rs6000_cpu)
2274 {
2275 case PROCESSOR_RIOS1:
2276 rs6000_cost = &rios1_cost;
2277 break;
2278
2279 case PROCESSOR_RIOS2:
2280 rs6000_cost = &rios2_cost;
2281 break;
2282
2283 case PROCESSOR_RS64A:
2284 rs6000_cost = &rs64a_cost;
2285 break;
2286
2287 case PROCESSOR_MPCCORE:
2288 rs6000_cost = &mpccore_cost;
2289 break;
2290
2291 case PROCESSOR_PPC403:
2292 rs6000_cost = &ppc403_cost;
2293 break;
2294
2295 case PROCESSOR_PPC405:
2296 rs6000_cost = &ppc405_cost;
2297 break;
2298
2299 case PROCESSOR_PPC440:
2300 rs6000_cost = &ppc440_cost;
2301 break;
2302
2303 case PROCESSOR_PPC601:
2304 rs6000_cost = &ppc601_cost;
2305 break;
2306
2307 case PROCESSOR_PPC603:
2308 rs6000_cost = &ppc603_cost;
2309 break;
2310
2311 case PROCESSOR_PPC604:
2312 rs6000_cost = &ppc604_cost;
2313 break;
2314
2315 case PROCESSOR_PPC604e:
2316 rs6000_cost = &ppc604e_cost;
2317 break;
2318
2319 case PROCESSOR_PPC620:
8b897cfa
RS
2320 rs6000_cost = &ppc620_cost;
2321 break;
2322
f0517163
RS
2323 case PROCESSOR_PPC630:
2324 rs6000_cost = &ppc630_cost;
2325 break;
2326
982afe02 2327 case PROCESSOR_CELL:
d296e02e
AP
2328 rs6000_cost = &ppccell_cost;
2329 break;
2330
8b897cfa
RS
2331 case PROCESSOR_PPC750:
2332 case PROCESSOR_PPC7400:
2333 rs6000_cost = &ppc750_cost;
2334 break;
2335
2336 case PROCESSOR_PPC7450:
2337 rs6000_cost = &ppc7450_cost;
2338 break;
2339
2340 case PROCESSOR_PPC8540:
2341 rs6000_cost = &ppc8540_cost;
2342 break;
2343
fa41c305
EW
2344 case PROCESSOR_PPCE300C2:
2345 case PROCESSOR_PPCE300C3:
2346 rs6000_cost = &ppce300c2c3_cost;
2347 break;
2348
edae5fe3
DE
2349 case PROCESSOR_PPCE500MC:
2350 rs6000_cost = &ppce500mc_cost;
2351 break;
2352
8b897cfa
RS
2353 case PROCESSOR_POWER4:
2354 case PROCESSOR_POWER5:
2355 rs6000_cost = &power4_cost;
2356 break;
2357
44cd321e
PS
2358 case PROCESSOR_POWER6:
2359 rs6000_cost = &power6_cost;
2360 break;
2361
cacf1ca8
MM
2362 case PROCESSOR_POWER7:
2363 rs6000_cost = &power7_cost;
2364 break;
2365
8b897cfa 2366 default:
37409796 2367 gcc_unreachable ();
8b897cfa 2368 }
0b11da67
DE
2369
2370 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
2371 set_param_value ("simultaneous-prefetches",
2372 rs6000_cost->simultaneous_prefetches);
2373 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 2374 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
2375 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
2376 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
2377 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
2378 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
2379
2380 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
2381 can be optimized to ap = __builtin_next_arg (0). */
2382 if (DEFAULT_ABI != ABI_V4)
2383 targetm.expand_builtin_va_start = NULL;
696e45ba
ME
2384
2385 /* Set up single/double float flags.
2386 If TARGET_HARD_FLOAT is set, but neither single or double is set,
2387 then set both flags. */
2388 if (TARGET_HARD_FLOAT && TARGET_FPRS
2389 && rs6000_single_float == 0 && rs6000_double_float == 0)
2390 rs6000_single_float = rs6000_double_float = 1;
2391
2392 /* Reset single and double FP flags if target is E500. */
2393 if (TARGET_E500)
2394 {
2395 rs6000_single_float = rs6000_double_float = 0;
2396 if (TARGET_E500_SINGLE)
2397 rs6000_single_float = 1;
2398 if (TARGET_E500_DOUBLE)
2399 rs6000_single_float = rs6000_double_float = 1;
2400 }
2401
001b9eb6
PH
2402 /* If not explicitly specified via option, decide whether to generate indexed
2403 load/store instructions. */
2404 if (TARGET_AVOID_XFORM == -1)
2405 /* Avoid indexed addressing when targeting Power6 in order to avoid
2406 the DERAT mispredict penalty. */
2407 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB);
2408
696e45ba 2409 rs6000_init_hard_regno_mode_ok ();
5248c961 2410}
5accd822 2411
7ccf35ed
DN
2412/* Implement targetm.vectorize.builtin_mask_for_load. */
2413static tree
2414rs6000_builtin_mask_for_load (void)
2415{
cacf1ca8 2416 if (TARGET_ALTIVEC || TARGET_VSX)
7ccf35ed
DN
2417 return altivec_builtin_mask_for_load;
2418 else
2419 return 0;
2420}
2421
7910ae0c
DN
2422/* Implement targetm.vectorize.builtin_conversion.
2423 Returns a decl of a function that implements conversion of an integer vector
2424 into a floating-point vector, or vice-versa. TYPE is the type of the integer
2425 side of the conversion.
2426 Return NULL_TREE if it is not available. */
f57d17f1 2427static tree
744aa42f 2428rs6000_builtin_conversion (unsigned int tcode, tree type)
f57d17f1 2429{
744aa42f
ILT
2430 enum tree_code code = (enum tree_code) tcode;
2431
f57d17f1
TM
2432 if (!TARGET_ALTIVEC)
2433 return NULL_TREE;
982afe02 2434
f57d17f1
TM
2435 switch (code)
2436 {
7910ae0c
DN
2437 case FIX_TRUNC_EXPR:
2438 switch (TYPE_MODE (type))
2439 {
2440 case V4SImode:
2441 return TYPE_UNSIGNED (type)
2442 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2443 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2444 default:
2445 return NULL_TREE;
2446 }
2447
f57d17f1
TM
2448 case FLOAT_EXPR:
2449 switch (TYPE_MODE (type))
2450 {
2451 case V4SImode:
7910ae0c
DN
2452 return TYPE_UNSIGNED (type)
2453 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2454 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
f57d17f1
TM
2455 default:
2456 return NULL_TREE;
2457 }
7910ae0c 2458
f57d17f1
TM
2459 default:
2460 return NULL_TREE;
2461 }
2462}
2463
89d67cca
DN
2464/* Implement targetm.vectorize.builtin_mul_widen_even. */
2465static tree
2466rs6000_builtin_mul_widen_even (tree type)
2467{
2468 if (!TARGET_ALTIVEC)
2469 return NULL_TREE;
2470
2471 switch (TYPE_MODE (type))
2472 {
2473 case V8HImode:
7910ae0c
DN
2474 return TYPE_UNSIGNED (type)
2475 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2476 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
89d67cca
DN
2477
2478 case V16QImode:
7910ae0c
DN
2479 return TYPE_UNSIGNED (type)
2480 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2481 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
89d67cca
DN
2482 default:
2483 return NULL_TREE;
2484 }
2485}
2486
2487/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2488static tree
2489rs6000_builtin_mul_widen_odd (tree type)
2490{
2491 if (!TARGET_ALTIVEC)
2492 return NULL_TREE;
2493
2494 switch (TYPE_MODE (type))
2495 {
2496 case V8HImode:
7910ae0c
DN
2497 return TYPE_UNSIGNED (type)
2498 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2499 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
89d67cca
DN
2500
2501 case V16QImode:
7910ae0c
DN
2502 return TYPE_UNSIGNED (type)
2503 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2504 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
89d67cca
DN
2505 default:
2506 return NULL_TREE;
2507 }
2508}
2509
5b900a4c
DN
2510
2511/* Return true iff, data reference of TYPE can reach vector alignment (16)
2512 after applying N number of iterations. This routine does not determine
2513 how may iterations are required to reach desired alignment. */
2514
2515static bool
3101faab 2516rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2517{
2518 if (is_packed)
2519 return false;
2520
2521 if (TARGET_32BIT)
2522 {
2523 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2524 return true;
2525
2526 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2527 return true;
2528
2529 return false;
2530 }
2531 else
2532 {
2533 if (TARGET_MACHO)
2534 return false;
2535
2536 /* Assuming that all other types are naturally aligned. CHECKME! */
2537 return true;
2538 }
2539}
2540
0fca40f5
IR
2541/* Implement targetm.vectorize.builtin_vec_perm. */
2542tree
2543rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2544{
2545 tree d;
2546
2547 *mask_element_type = unsigned_char_type_node;
2548
2549 switch (TYPE_MODE (type))
2550 {
2551 case V16QImode:
2552 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2553 break;
2554
2555 case V8HImode:
2556 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2557 break;
2558
2559 case V4SImode:
2560 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2561 break;
2562
2563 case V4SFmode:
2564 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2565 break;
2566
2567 default:
2568 return NULL_TREE;
2569 }
2570
2571 gcc_assert (d);
2572 return d;
2573}
2574
5da702b1
AH
2575/* Handle generic options of the form -mfoo=yes/no.
2576 NAME is the option name.
2577 VALUE is the option value.
2578 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2579 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2580static void
5da702b1 2581rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2582{
5da702b1 2583 if (value == 0)
993f19a8 2584 return;
5da702b1
AH
2585 else if (!strcmp (value, "yes"))
2586 *flag = 1;
2587 else if (!strcmp (value, "no"))
2588 *flag = 0;
08b57fb3 2589 else
5da702b1 2590 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2591}
2592
c4501e62
JJ
2593/* Validate and record the size specified with the -mtls-size option. */
2594
2595static void
863d938c 2596rs6000_parse_tls_size_option (void)
c4501e62
JJ
2597{
2598 if (rs6000_tls_size_string == 0)
2599 return;
2600 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2601 rs6000_tls_size = 16;
2602 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2603 rs6000_tls_size = 32;
2604 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2605 rs6000_tls_size = 64;
2606 else
9e637a26 2607 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2608}
2609
5accd822 2610void
a2369ed3 2611optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2612{
2e3f0db6
DJ
2613 if (DEFAULT_ABI == ABI_DARWIN)
2614 /* The Darwin libraries never set errno, so we might as well
2615 avoid calling them when that's the only reason we would. */
2616 flag_errno_math = 0;
59d6560b
DE
2617
2618 /* Double growth factor to counter reduced min jump length. */
2619 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2620
2621 /* Enable section anchors by default.
2622 Skip section anchors for Objective C and Objective C++
2f3b0d4a
ST
2623 until front-ends fixed. */
2624 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
d6cc6ec9 2625 flag_section_anchors = 2;
5accd822 2626}
78f5898b 2627
0bb7b92e
ME
2628static enum fpu_type_t
2629rs6000_parse_fpu_option (const char *option)
2630{
2631 if (!strcmp("none", option)) return FPU_NONE;
2632 if (!strcmp("sp_lite", option)) return FPU_SF_LITE;
2633 if (!strcmp("dp_lite", option)) return FPU_DF_LITE;
2634 if (!strcmp("sp_full", option)) return FPU_SF_FULL;
2635 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
2636 error("unknown value %s for -mfpu", option);
2637 return FPU_NONE;
2638}
2639
78f5898b
AH
2640/* Implement TARGET_HANDLE_OPTION. */
2641
2642static bool
2643rs6000_handle_option (size_t code, const char *arg, int value)
2644{
0bb7b92e 2645 enum fpu_type_t fpu_type = FPU_NONE;
cacf1ca8 2646 int isel;
0bb7b92e 2647
78f5898b
AH
2648 switch (code)
2649 {
2650 case OPT_mno_power:
2651 target_flags &= ~(MASK_POWER | MASK_POWER2
2652 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2653 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2654 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2655 break;
2656 case OPT_mno_powerpc:
2657 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2658 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2659 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2660 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2661 break;
2662 case OPT_mfull_toc:
d2894ab5
DE
2663 target_flags &= ~MASK_MINIMAL_TOC;
2664 TARGET_NO_FP_IN_TOC = 0;
2665 TARGET_NO_SUM_IN_TOC = 0;
2666 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2667#ifdef TARGET_USES_SYSV4_OPT
2668 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2669 just the same as -mminimal-toc. */
2670 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2671 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2672#endif
2673 break;
2674
2675#ifdef TARGET_USES_SYSV4_OPT
2676 case OPT_mtoc:
2677 /* Make -mtoc behave like -mminimal-toc. */
2678 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2679 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2680 break;
2681#endif
2682
2683#ifdef TARGET_USES_AIX64_OPT
2684 case OPT_maix64:
2685#else
2686 case OPT_m64:
2687#endif
2c9c9afd
AM
2688 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2689 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2690 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2691 break;
2692
2693#ifdef TARGET_USES_AIX64_OPT
2694 case OPT_maix32:
2695#else
2696 case OPT_m32:
2697#endif
2698 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2699 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2700 break;
2701
2702 case OPT_minsert_sched_nops_:
2703 rs6000_sched_insert_nops_str = arg;
2704 break;
2705
2706 case OPT_mminimal_toc:
2707 if (value == 1)
2708 {
d2894ab5
DE
2709 TARGET_NO_FP_IN_TOC = 0;
2710 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2711 }
2712 break;
2713
2714 case OPT_mpower:
2715 if (value == 1)
c2dba4ab
AH
2716 {
2717 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2718 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2719 }
78f5898b
AH
2720 break;
2721
2722 case OPT_mpower2:
2723 if (value == 1)
c2dba4ab
AH
2724 {
2725 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2726 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2727 }
78f5898b
AH
2728 break;
2729
2730 case OPT_mpowerpc_gpopt:
2731 case OPT_mpowerpc_gfxopt:
2732 if (value == 1)
c2dba4ab
AH
2733 {
2734 target_flags |= MASK_POWERPC;
2735 target_flags_explicit |= MASK_POWERPC;
2736 }
78f5898b
AH
2737 break;
2738
df01da37
DE
2739 case OPT_maix_struct_return:
2740 case OPT_msvr4_struct_return:
2741 rs6000_explicit_options.aix_struct_ret = true;
2742 break;
2743
b5e3caf2
BE
2744 case OPT_mvrsave:
2745 rs6000_explicit_options.vrsave = true;
2746 TARGET_ALTIVEC_VRSAVE = value;
2747 break;
2748
78f5898b 2749 case OPT_mvrsave_:
a2db2771 2750 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2751 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2752 break;
78f5898b
AH
2753
2754 case OPT_misel_:
cacf1ca8
MM
2755 target_flags_explicit |= MASK_ISEL;
2756 isel = 0;
2757 rs6000_parse_yes_no_option ("isel", arg, &isel);
2758 if (isel)
2759 target_flags |= MASK_ISEL;
2760 else
2761 target_flags &= ~MASK_ISEL;
78f5898b
AH
2762 break;
2763
94f4765c
NF
2764 case OPT_mspe:
2765 rs6000_explicit_options.spe = true;
2766 rs6000_spe = value;
2767 break;
2768
78f5898b
AH
2769 case OPT_mspe_:
2770 rs6000_explicit_options.spe = true;
2771 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2772 break;
2773
2774 case OPT_mdebug_:
2775 rs6000_debug_name = arg;
2776 break;
2777
2778#ifdef TARGET_USES_SYSV4_OPT
2779 case OPT_mcall_:
2780 rs6000_abi_name = arg;
2781 break;
2782
2783 case OPT_msdata_:
2784 rs6000_sdata_name = arg;
2785 break;
2786
2787 case OPT_mtls_size_:
2788 rs6000_tls_size_string = arg;
2789 break;
2790
2791 case OPT_mrelocatable:
2792 if (value == 1)
c2dba4ab 2793 {
e0bf274f
AM
2794 target_flags |= MASK_MINIMAL_TOC;
2795 target_flags_explicit |= MASK_MINIMAL_TOC;
2796 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2797 }
78f5898b
AH
2798 break;
2799
2800 case OPT_mrelocatable_lib:
2801 if (value == 1)
c2dba4ab 2802 {
e0bf274f
AM
2803 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2804 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2805 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2806 }
78f5898b 2807 else
c2dba4ab
AH
2808 {
2809 target_flags &= ~MASK_RELOCATABLE;
2810 target_flags_explicit |= MASK_RELOCATABLE;
2811 }
78f5898b
AH
2812 break;
2813#endif
2814
2815 case OPT_mabi_:
78f5898b
AH
2816 if (!strcmp (arg, "altivec"))
2817 {
a2db2771 2818 rs6000_explicit_options.altivec_abi = true;
78f5898b 2819 rs6000_altivec_abi = 1;
a2db2771
JJ
2820
2821 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2822 rs6000_spe_abi = 0;
2823 }
2824 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2825 {
a2db2771 2826 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2827 rs6000_altivec_abi = 0;
2828 }
78f5898b
AH
2829 else if (! strcmp (arg, "spe"))
2830 {
a2db2771 2831 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2832 rs6000_spe_abi = 1;
2833 rs6000_altivec_abi = 0;
2834 if (!TARGET_SPE_ABI)
2835 error ("not configured for ABI: '%s'", arg);
2836 }
2837 else if (! strcmp (arg, "no-spe"))
d3603e8c 2838 {
a2db2771 2839 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2840 rs6000_spe_abi = 0;
2841 }
78f5898b
AH
2842
2843 /* These are here for testing during development only, do not
2844 document in the manual please. */
2845 else if (! strcmp (arg, "d64"))
2846 {
2847 rs6000_darwin64_abi = 1;
2848 warning (0, "Using darwin64 ABI");
2849 }
2850 else if (! strcmp (arg, "d32"))
2851 {
2852 rs6000_darwin64_abi = 0;
2853 warning (0, "Using old darwin ABI");
2854 }
2855
602ea4d3
JJ
2856 else if (! strcmp (arg, "ibmlongdouble"))
2857 {
d3603e8c 2858 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2859 rs6000_ieeequad = 0;
2860 warning (0, "Using IBM extended precision long double");
2861 }
2862 else if (! strcmp (arg, "ieeelongdouble"))
2863 {
d3603e8c 2864 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2865 rs6000_ieeequad = 1;
2866 warning (0, "Using IEEE extended precision long double");
2867 }
2868
78f5898b
AH
2869 else
2870 {
2871 error ("unknown ABI specified: '%s'", arg);
2872 return false;
2873 }
2874 break;
2875
2876 case OPT_mcpu_:
2877 rs6000_select[1].string = arg;
2878 break;
2879
2880 case OPT_mtune_:
2881 rs6000_select[2].string = arg;
2882 break;
2883
2884 case OPT_mtraceback_:
2885 rs6000_traceback_name = arg;
2886 break;
2887
2888 case OPT_mfloat_gprs_:
2889 rs6000_explicit_options.float_gprs = true;
2890 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2891 rs6000_float_gprs = 1;
2892 else if (! strcmp (arg, "double"))
2893 rs6000_float_gprs = 2;
2894 else if (! strcmp (arg, "no"))
2895 rs6000_float_gprs = 0;
2896 else
2897 {
2898 error ("invalid option for -mfloat-gprs: '%s'", arg);
2899 return false;
2900 }
2901 break;
2902
2903 case OPT_mlong_double_:
2904 rs6000_explicit_options.long_double = true;
2905 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2906 if (value != 64 && value != 128)
2907 {
2908 error ("Unknown switch -mlong-double-%s", arg);
2909 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2910 return false;
2911 }
2912 else
2913 rs6000_long_double_type_size = value;
2914 break;
2915
2916 case OPT_msched_costly_dep_:
2917 rs6000_sched_costly_dep_str = arg;
2918 break;
2919
2920 case OPT_malign_:
2921 rs6000_explicit_options.alignment = true;
2922 if (! strcmp (arg, "power"))
2923 {
2924 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2925 some C library functions, so warn about it. The flag may be
2926 useful for performance studies from time to time though, so
2927 don't disable it entirely. */
2928 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2929 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2930 " it is incompatible with the installed C and C++ libraries");
2931 rs6000_alignment_flags = MASK_ALIGN_POWER;
2932 }
2933 else if (! strcmp (arg, "natural"))
2934 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2935 else
2936 {
2937 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2938 return false;
2939 }
2940 break;
696e45ba
ME
2941
2942 case OPT_msingle_float:
2943 if (!TARGET_SINGLE_FPU)
2944 warning (0, "-msingle-float option equivalent to -mhard-float");
2945 /* -msingle-float implies -mno-double-float and TARGET_HARD_FLOAT. */
2946 rs6000_double_float = 0;
2947 target_flags &= ~MASK_SOFT_FLOAT;
2948 target_flags_explicit |= MASK_SOFT_FLOAT;
2949 break;
2950
2951 case OPT_mdouble_float:
2952 /* -mdouble-float implies -msingle-float and TARGET_HARD_FLOAT. */
2953 rs6000_single_float = 1;
2954 target_flags &= ~MASK_SOFT_FLOAT;
2955 target_flags_explicit |= MASK_SOFT_FLOAT;
2956 break;
2957
2958 case OPT_msimple_fpu:
2959 if (!TARGET_SINGLE_FPU)
2960 warning (0, "-msimple-fpu option ignored");
2961 break;
2962
2963 case OPT_mhard_float:
2964 /* -mhard_float implies -msingle-float and -mdouble-float. */
2965 rs6000_single_float = rs6000_double_float = 1;
2966 break;
2967
2968 case OPT_msoft_float:
2969 /* -msoft_float implies -mnosingle-float and -mnodouble-float. */
2970 rs6000_single_float = rs6000_double_float = 0;
2971 break;
0bb7b92e
ME
2972
2973 case OPT_mfpu_:
2974 fpu_type = rs6000_parse_fpu_option(arg);
2975 if (fpu_type != FPU_NONE)
2976 /* If -mfpu is not none, then turn off SOFT_FLOAT, turn on HARD_FLOAT. */
2977 {
2978 target_flags &= ~MASK_SOFT_FLOAT;
2979 target_flags_explicit |= MASK_SOFT_FLOAT;
2980 rs6000_xilinx_fpu = 1;
2981 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_SF_FULL)
2982 rs6000_single_float = 1;
2983 if (fpu_type == FPU_DF_LITE || fpu_type == FPU_DF_FULL)
2984 rs6000_single_float = rs6000_double_float = 1;
2985 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_DF_LITE)
2986 rs6000_simple_fpu = 1;
2987 }
2988 else
2989 {
2990 /* -mfpu=none is equivalent to -msoft-float */
2991 target_flags |= MASK_SOFT_FLOAT;
2992 target_flags_explicit |= MASK_SOFT_FLOAT;
2993 rs6000_single_float = rs6000_double_float = 0;
2994 }
2995 break;
78f5898b
AH
2996 }
2997 return true;
2998}
3cfa4909
MM
2999\f
3000/* Do anything needed at the start of the asm file. */
3001
1bc7c5b6 3002static void
863d938c 3003rs6000_file_start (void)
3cfa4909 3004{
c4d38ccb 3005 size_t i;
3cfa4909 3006 char buffer[80];
d330fd93 3007 const char *start = buffer;
3cfa4909 3008 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
3009 const char *default_cpu = TARGET_CPU_DEFAULT;
3010 FILE *file = asm_out_file;
3011
3012 default_file_start ();
3013
3014#ifdef TARGET_BI_ARCH
3015 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
3016 default_cpu = 0;
3017#endif
3cfa4909
MM
3018
3019 if (flag_verbose_asm)
3020 {
3021 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
3022 rs6000_select[0].string = default_cpu;
3023
b6a1cbae 3024 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
3025 {
3026 ptr = &rs6000_select[i];
3027 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
3028 {
3029 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
3030 start = "";
3031 }
3032 }
3033
9c6b4ed9 3034 if (PPC405_ERRATUM77)
b0bfee6e 3035 {
9c6b4ed9 3036 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
3037 start = "";
3038 }
b0bfee6e 3039
b91da81f 3040#ifdef USING_ELFOS_H
3cfa4909
MM
3041 switch (rs6000_sdata)
3042 {
3043 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
3044 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
3045 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
3046 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
3047 }
3048
3049 if (rs6000_sdata && g_switch_value)
3050 {
307b599c
MK
3051 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
3052 g_switch_value);
3cfa4909
MM
3053 start = "";
3054 }
3055#endif
3056
3057 if (*start == '\0')
949ea356 3058 putc ('\n', file);
3cfa4909 3059 }
b723e82f 3060
e51917ae
JM
3061#ifdef HAVE_AS_GNU_ATTRIBUTE
3062 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
3063 {
3064 fprintf (file, "\t.gnu_attribute 4, %d\n",
696e45ba
ME
3065 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
3066 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
3067 : 2));
aaa42494
DJ
3068 fprintf (file, "\t.gnu_attribute 8, %d\n",
3069 (TARGET_ALTIVEC_ABI ? 2
3070 : TARGET_SPE_ABI ? 3
3071 : 1));
f9fd1e77
NF
3072 fprintf (file, "\t.gnu_attribute 12, %d\n",
3073 aix_struct_return ? 2 : 1);
3074
aaa42494 3075 }
e51917ae
JM
3076#endif
3077
b723e82f
JJ
3078 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
3079 {
d6b5193b
RS
3080 switch_to_section (toc_section);
3081 switch_to_section (text_section);
b723e82f 3082 }
3cfa4909 3083}
c4e18b1c 3084
5248c961 3085\f
a0ab749a 3086/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
3087
3088int
863d938c 3089direct_return (void)
9878760c 3090{
4697a36c
MM
3091 if (reload_completed)
3092 {
3093 rs6000_stack_t *info = rs6000_stack_info ();
3094
3095 if (info->first_gp_reg_save == 32
3096 && info->first_fp_reg_save == 64
00b960c7 3097 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
3098 && ! info->lr_save_p
3099 && ! info->cr_save_p
00b960c7 3100 && info->vrsave_mask == 0
c81fc13e 3101 && ! info->push_p)
4697a36c
MM
3102 return 1;
3103 }
3104
3105 return 0;
9878760c
RK
3106}
3107
4e74d8ec
MM
3108/* Return the number of instructions it takes to form a constant in an
3109 integer register. */
3110
48d72335 3111int
a2369ed3 3112num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
3113{
3114 /* signed constant loadable with {cal|addi} */
547b216d 3115 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
3116 return 1;
3117
4e74d8ec 3118 /* constant loadable with {cau|addis} */
547b216d
DE
3119 else if ((value & 0xffff) == 0
3120 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
3121 return 1;
3122
5f59ecb7 3123#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 3124 else if (TARGET_POWERPC64)
4e74d8ec 3125 {
a65c591c
DE
3126 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
3127 HOST_WIDE_INT high = value >> 31;
4e74d8ec 3128
a65c591c 3129 if (high == 0 || high == -1)
4e74d8ec
MM
3130 return 2;
3131
a65c591c 3132 high >>= 1;
4e74d8ec 3133
a65c591c 3134 if (low == 0)
4e74d8ec 3135 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
3136 else
3137 return (num_insns_constant_wide (high)
e396202a 3138 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
3139 }
3140#endif
3141
3142 else
3143 return 2;
3144}
3145
3146int
a2369ed3 3147num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 3148{
37409796 3149 HOST_WIDE_INT low, high;
bb8df8a6 3150
37409796 3151 switch (GET_CODE (op))
0d30d435 3152 {
37409796 3153 case CONST_INT:
0d30d435 3154#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 3155 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 3156 && mask64_operand (op, mode))
c4ad648e 3157 return 2;
0d30d435
DE
3158 else
3159#endif
3160 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 3161
37409796 3162 case CONST_DOUBLE:
e41b2a33 3163 if (mode == SFmode || mode == SDmode)
37409796
NS
3164 {
3165 long l;
3166 REAL_VALUE_TYPE rv;
bb8df8a6 3167
37409796 3168 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
3169 if (DECIMAL_FLOAT_MODE_P (mode))
3170 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
3171 else
3172 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
3173 return num_insns_constant_wide ((HOST_WIDE_INT) l);
3174 }
a260abc9 3175
37409796
NS
3176 if (mode == VOIDmode || mode == DImode)
3177 {
3178 high = CONST_DOUBLE_HIGH (op);
3179 low = CONST_DOUBLE_LOW (op);
3180 }
3181 else
3182 {
3183 long l[2];
3184 REAL_VALUE_TYPE rv;
bb8df8a6 3185
37409796 3186 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
3187 if (DECIMAL_FLOAT_MODE_P (mode))
3188 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
3189 else
3190 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
3191 high = l[WORDS_BIG_ENDIAN == 0];
3192 low = l[WORDS_BIG_ENDIAN != 0];
3193 }
47ad8c61 3194
37409796
NS
3195 if (TARGET_32BIT)
3196 return (num_insns_constant_wide (low)
3197 + num_insns_constant_wide (high));
3198 else
3199 {
3200 if ((high == 0 && low >= 0)
3201 || (high == -1 && low < 0))
3202 return num_insns_constant_wide (low);
bb8df8a6 3203
1990cd79 3204 else if (mask64_operand (op, mode))
37409796 3205 return 2;
bb8df8a6 3206
37409796
NS
3207 else if (low == 0)
3208 return num_insns_constant_wide (high) + 1;
bb8df8a6 3209
37409796
NS
3210 else
3211 return (num_insns_constant_wide (high)
3212 + num_insns_constant_wide (low) + 1);
3213 }
bb8df8a6 3214
37409796
NS
3215 default:
3216 gcc_unreachable ();
4e74d8ec 3217 }
4e74d8ec
MM
3218}
3219
0972012c
RS
3220/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
3221 If the mode of OP is MODE_VECTOR_INT, this simply returns the
3222 corresponding element of the vector, but for V4SFmode and V2SFmode,
3223 the corresponding "float" is interpreted as an SImode integer. */
3224
847535b6 3225HOST_WIDE_INT
0972012c
RS
3226const_vector_elt_as_int (rtx op, unsigned int elt)
3227{
3228 rtx tmp = CONST_VECTOR_ELT (op, elt);
3229 if (GET_MODE (op) == V4SFmode
3230 || GET_MODE (op) == V2SFmode)
3231 tmp = gen_lowpart (SImode, tmp);
3232 return INTVAL (tmp);
3233}
452a7d36 3234
77ccdfed 3235/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
3236 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
3237 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
3238 all items are set to the same value and contain COPIES replicas of the
3239 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
3240 operand and the others are set to the value of the operand's msb. */
3241
3242static bool
3243vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 3244{
66180ff3
PB
3245 enum machine_mode mode = GET_MODE (op);
3246 enum machine_mode inner = GET_MODE_INNER (mode);
3247
3248 unsigned i;
3249 unsigned nunits = GET_MODE_NUNITS (mode);
3250 unsigned bitsize = GET_MODE_BITSIZE (inner);
3251 unsigned mask = GET_MODE_MASK (inner);
3252
0972012c 3253 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
3254 HOST_WIDE_INT splat_val = val;
3255 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
3256
3257 /* Construct the value to be splatted, if possible. If not, return 0. */
3258 for (i = 2; i <= copies; i *= 2)
452a7d36 3259 {
66180ff3
PB
3260 HOST_WIDE_INT small_val;
3261 bitsize /= 2;
3262 small_val = splat_val >> bitsize;
3263 mask >>= bitsize;
3264 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
3265 return false;
3266 splat_val = small_val;
3267 }
c4ad648e 3268
66180ff3
PB
3269 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
3270 if (EASY_VECTOR_15 (splat_val))
3271 ;
3272
3273 /* Also check if we can splat, and then add the result to itself. Do so if
3274 the value is positive, of if the splat instruction is using OP's mode;
3275 for splat_val < 0, the splat and the add should use the same mode. */
3276 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
3277 && (splat_val >= 0 || (step == 1 && copies == 1)))
3278 ;
3279
3280 else
3281 return false;
3282
3283 /* Check if VAL is present in every STEP-th element, and the
3284 other elements are filled with its most significant bit. */
3285 for (i = 0; i < nunits - 1; ++i)
3286 {
3287 HOST_WIDE_INT desired_val;
3288 if (((i + 1) & (step - 1)) == 0)
3289 desired_val = val;
3290 else
3291 desired_val = msb_val;
3292
0972012c 3293 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 3294 return false;
452a7d36 3295 }
66180ff3
PB
3296
3297 return true;
452a7d36
HP
3298}
3299
69ef87e2 3300
77ccdfed 3301/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
3302 with a vspltisb, vspltish or vspltisw. */
3303
3304bool
3305easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 3306{
66180ff3 3307 unsigned step, copies;
d744e06e 3308
66180ff3
PB
3309 if (mode == VOIDmode)
3310 mode = GET_MODE (op);
3311 else if (mode != GET_MODE (op))
3312 return false;
d744e06e 3313
66180ff3
PB
3314 /* Start with a vspltisw. */
3315 step = GET_MODE_NUNITS (mode) / 4;
3316 copies = 1;
3317
3318 if (vspltis_constant (op, step, copies))
3319 return true;
3320
3321 /* Then try with a vspltish. */
3322 if (step == 1)
3323 copies <<= 1;
3324 else
3325 step >>= 1;
3326
3327 if (vspltis_constant (op, step, copies))
3328 return true;
3329
3330 /* And finally a vspltisb. */
3331 if (step == 1)
3332 copies <<= 1;
3333 else
3334 step >>= 1;
3335
3336 if (vspltis_constant (op, step, copies))
3337 return true;
3338
3339 return false;
d744e06e
AH
3340}
3341
66180ff3
PB
3342/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
3343 result is OP. Abort if it is not possible. */
d744e06e 3344
f676971a 3345rtx
66180ff3 3346gen_easy_altivec_constant (rtx op)
452a7d36 3347{
66180ff3
PB
3348 enum machine_mode mode = GET_MODE (op);
3349 int nunits = GET_MODE_NUNITS (mode);
3350 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
3351 unsigned step = nunits / 4;
3352 unsigned copies = 1;
3353
3354 /* Start with a vspltisw. */
3355 if (vspltis_constant (op, step, copies))
3356 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
3357
3358 /* Then try with a vspltish. */
3359 if (step == 1)
3360 copies <<= 1;
3361 else
3362 step >>= 1;
3363
3364 if (vspltis_constant (op, step, copies))
3365 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
3366
3367 /* And finally a vspltisb. */
3368 if (step == 1)
3369 copies <<= 1;
3370 else
3371 step >>= 1;
3372
3373 if (vspltis_constant (op, step, copies))
3374 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
3375
3376 gcc_unreachable ();
d744e06e
AH
3377}
3378
3379const char *
a2369ed3 3380output_vec_const_move (rtx *operands)
d744e06e
AH
3381{
3382 int cst, cst2;
3383 enum machine_mode mode;
3384 rtx dest, vec;
3385
3386 dest = operands[0];
3387 vec = operands[1];
d744e06e 3388 mode = GET_MODE (dest);
69ef87e2 3389
d744e06e
AH
3390 if (TARGET_ALTIVEC)
3391 {
66180ff3 3392 rtx splat_vec;
d744e06e
AH
3393 if (zero_constant (vec, mode))
3394 return "vxor %0,%0,%0";
37409796 3395
66180ff3
PB
3396 splat_vec = gen_easy_altivec_constant (vec);
3397 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
3398 operands[1] = XEXP (splat_vec, 0);
3399 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
3400 return "#";
bb8df8a6 3401
66180ff3 3402 switch (GET_MODE (splat_vec))
98ef3137 3403 {
37409796 3404 case V4SImode:
66180ff3 3405 return "vspltisw %0,%1";
c4ad648e 3406
37409796 3407 case V8HImode:
66180ff3 3408 return "vspltish %0,%1";
c4ad648e 3409
37409796 3410 case V16QImode:
66180ff3 3411 return "vspltisb %0,%1";
bb8df8a6 3412
37409796
NS
3413 default:
3414 gcc_unreachable ();
98ef3137 3415 }
69ef87e2
AH
3416 }
3417
37409796 3418 gcc_assert (TARGET_SPE);
bb8df8a6 3419
37409796
NS
3420 /* Vector constant 0 is handled as a splitter of V2SI, and in the
3421 pattern of V1DI, V4HI, and V2SF.
3422
3423 FIXME: We should probably return # and add post reload
3424 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
3425 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
3426 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
3427 operands[1] = CONST_VECTOR_ELT (vec, 0);
3428 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
3429 if (cst == cst2)
3430 return "li %0,%1\n\tevmergelo %0,%0,%0";
3431 else
3432 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
3433}
3434
f5027409
RE
3435/* Initialize TARGET of vector PAIRED to VALS. */
3436
3437void
3438paired_expand_vector_init (rtx target, rtx vals)
3439{
3440 enum machine_mode mode = GET_MODE (target);
3441 int n_elts = GET_MODE_NUNITS (mode);
3442 int n_var = 0;
0a2aaacc 3443 rtx x, new_rtx, tmp, constant_op, op1, op2;
f5027409
RE
3444 int i;
3445
3446 for (i = 0; i < n_elts; ++i)
3447 {
3448 x = XVECEXP (vals, 0, i);
3449 if (!CONSTANT_P (x))
3450 ++n_var;
3451 }
3452 if (n_var == 0)
3453 {
3454 /* Load from constant pool. */
3455 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
3456 return;
3457 }
3458
3459 if (n_var == 2)
3460 {
3461 /* The vector is initialized only with non-constants. */
0a2aaacc 3462 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
f5027409
RE
3463 XVECEXP (vals, 0, 1));
3464
0a2aaacc 3465 emit_move_insn (target, new_rtx);
f5027409
RE
3466 return;
3467 }
3468
3469 /* One field is non-constant and the other one is a constant. Load the
3470 constant from the constant pool and use ps_merge instruction to
3471 construct the whole vector. */
3472 op1 = XVECEXP (vals, 0, 0);
3473 op2 = XVECEXP (vals, 0, 1);
3474
3475 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
3476
3477 tmp = gen_reg_rtx (GET_MODE (constant_op));
3478 emit_move_insn (tmp, constant_op);
3479
3480 if (CONSTANT_P (op1))
0a2aaacc 3481 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
f5027409 3482 else
0a2aaacc 3483 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
f5027409 3484
0a2aaacc 3485 emit_move_insn (target, new_rtx);
f5027409
RE
3486}
3487
e2e95f45
RE
3488void
3489paired_expand_vector_move (rtx operands[])
3490{
3491 rtx op0 = operands[0], op1 = operands[1];
3492
3493 emit_move_insn (op0, op1);
3494}
3495
3496/* Emit vector compare for code RCODE. DEST is destination, OP1 and
3497 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
3498 operands for the relation operation COND. This is a recursive
3499 function. */
3500
3501static void
3502paired_emit_vector_compare (enum rtx_code rcode,
3503 rtx dest, rtx op0, rtx op1,
3504 rtx cc_op0, rtx cc_op1)
3505{
3506 rtx tmp = gen_reg_rtx (V2SFmode);
3507 rtx tmp1, max, min, equal_zero;
3508
3509 gcc_assert (TARGET_PAIRED_FLOAT);
3510 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
3511
3512 switch (rcode)
3513 {
3514 case LT:
3515 case LTU:
3516 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3517 return;
3518 case GE:
3519 case GEU:
3520 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3521 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3522 return;
3523 case LE:
3524 case LEU:
3525 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3526 return;
3527 case GT:
3528 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3529 return;
3530 case EQ:
3531 tmp1 = gen_reg_rtx (V2SFmode);
3532 max = gen_reg_rtx (V2SFmode);
3533 min = gen_reg_rtx (V2SFmode);
3534 equal_zero = gen_reg_rtx (V2SFmode);
3535
3536 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3537 emit_insn (gen_selv2sf4
3538 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3539 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3540 emit_insn (gen_selv2sf4
3541 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3542 emit_insn (gen_subv2sf3 (tmp1, min, max));
3543 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3544 return;
3545 case NE:
3546 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3547 return;
3548 case UNLE:
3549 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3550 return;
3551 case UNLT:
3552 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3553 return;
3554 case UNGE:
3555 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3556 return;
3557 case UNGT:
3558 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3559 return;
3560 default:
3561 gcc_unreachable ();
3562 }
3563
3564 return;
3565}
3566
3567/* Emit vector conditional expression.
3568 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3569 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3570
3571int
3572paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3573 rtx cond, rtx cc_op0, rtx cc_op1)
3574{
3575 enum rtx_code rcode = GET_CODE (cond);
3576
3577 if (!TARGET_PAIRED_FLOAT)
3578 return 0;
3579
3580 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3581
3582 return 1;
3583}
3584
7a4eca66
DE
3585/* Initialize vector TARGET to VALS. */
3586
3587void
3588rs6000_expand_vector_init (rtx target, rtx vals)
3589{
3590 enum machine_mode mode = GET_MODE (target);
3591 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3592 int n_elts = GET_MODE_NUNITS (mode);
3593 int n_var = 0, one_var = -1;
3594 bool all_same = true, all_const_zero = true;
3595 rtx x, mem;
3596 int i;
3597
3598 for (i = 0; i < n_elts; ++i)
3599 {
3600 x = XVECEXP (vals, 0, i);
3601 if (!CONSTANT_P (x))
3602 ++n_var, one_var = i;
3603 else if (x != CONST0_RTX (inner_mode))
3604 all_const_zero = false;
3605
3606 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3607 all_same = false;
3608 }
3609
3610 if (n_var == 0)
3611 {
501fb355 3612 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3613 if (mode != V4SFmode && all_const_zero)
3614 {
3615 /* Zero register. */
3616 emit_insn (gen_rtx_SET (VOIDmode, target,
3617 gen_rtx_XOR (mode, target, target)));
3618 return;
3619 }
501fb355 3620 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3621 {
3622 /* Splat immediate. */
501fb355 3623 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3624 return;
3625 }
3626 else if (all_same)
3627 ; /* Splat vector element. */
3628 else
3629 {
3630 /* Load from constant pool. */
501fb355 3631 emit_move_insn (target, const_vec);
7a4eca66
DE
3632 return;
3633 }
3634 }
3635
3636 /* Store value to stack temp. Load vector element. Splat. */
3637 if (all_same)
3638 {
3639 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3640 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3641 XVECEXP (vals, 0, 0));
3642 x = gen_rtx_UNSPEC (VOIDmode,
3643 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3644 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3645 gen_rtvec (2,
3646 gen_rtx_SET (VOIDmode,
3647 target, mem),
3648 x)));
3649 x = gen_rtx_VEC_SELECT (inner_mode, target,
3650 gen_rtx_PARALLEL (VOIDmode,
3651 gen_rtvec (1, const0_rtx)));
3652 emit_insn (gen_rtx_SET (VOIDmode, target,
3653 gen_rtx_VEC_DUPLICATE (mode, x)));
3654 return;
3655 }
3656
3657 /* One field is non-constant. Load constant then overwrite
3658 varying field. */
3659 if (n_var == 1)
3660 {
3661 rtx copy = copy_rtx (vals);
3662
57b51d4d 3663 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3664 varying element. */
3665 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3666 rs6000_expand_vector_init (target, copy);
3667
3668 /* Insert variable. */
3669 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3670 return;
3671 }
3672
3673 /* Construct the vector in memory one field at a time
3674 and load the whole vector. */
3675 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3676 for (i = 0; i < n_elts; i++)
3677 emit_move_insn (adjust_address_nv (mem, inner_mode,
3678 i * GET_MODE_SIZE (inner_mode)),
3679 XVECEXP (vals, 0, i));
3680 emit_move_insn (target, mem);
3681}
3682
3683/* Set field ELT of TARGET to VAL. */
3684
3685void
3686rs6000_expand_vector_set (rtx target, rtx val, int elt)
3687{
3688 enum machine_mode mode = GET_MODE (target);
3689 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3690 rtx reg = gen_reg_rtx (mode);
3691 rtx mask, mem, x;
3692 int width = GET_MODE_SIZE (inner_mode);
3693 int i;
3694
3695 /* Load single variable value. */
3696 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3697 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3698 x = gen_rtx_UNSPEC (VOIDmode,
3699 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3700 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3701 gen_rtvec (2,
3702 gen_rtx_SET (VOIDmode,
3703 reg, mem),
3704 x)));
3705
3706 /* Linear sequence. */
3707 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3708 for (i = 0; i < 16; ++i)
3709 XVECEXP (mask, 0, i) = GEN_INT (i);
3710
3711 /* Set permute mask to insert element into target. */
3712 for (i = 0; i < width; ++i)
3713 XVECEXP (mask, 0, elt*width + i)
3714 = GEN_INT (i + 0x10);
3715 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3716 x = gen_rtx_UNSPEC (mode,
3717 gen_rtvec (3, target, reg,
3718 force_reg (V16QImode, x)),
3719 UNSPEC_VPERM);
3720 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3721}
3722
3723/* Extract field ELT from VEC into TARGET. */
3724
3725void
3726rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3727{
3728 enum machine_mode mode = GET_MODE (vec);
3729 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3730 rtx mem, x;
3731
3732 /* Allocate mode-sized buffer. */
3733 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3734
3735 /* Add offset to field within buffer matching vector element. */
3736 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3737
3738 /* Store single field into mode-sized buffer. */
3739 x = gen_rtx_UNSPEC (VOIDmode,
3740 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3741 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3742 gen_rtvec (2,
3743 gen_rtx_SET (VOIDmode,
3744 mem, vec),
3745 x)));
3746 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3747}
3748
0ba1b2ff
AM
3749/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3750 implement ANDing by the mask IN. */
3751void
a2369ed3 3752build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3753{
3754#if HOST_BITS_PER_WIDE_INT >= 64
3755 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3756 int shift;
3757
37409796 3758 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3759
3760 c = INTVAL (in);
3761 if (c & 1)
3762 {
3763 /* Assume c initially something like 0x00fff000000fffff. The idea
3764 is to rotate the word so that the middle ^^^^^^ group of zeros
3765 is at the MS end and can be cleared with an rldicl mask. We then
3766 rotate back and clear off the MS ^^ group of zeros with a
3767 second rldicl. */
3768 c = ~c; /* c == 0xff000ffffff00000 */
3769 lsb = c & -c; /* lsb == 0x0000000000100000 */
3770 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3771 c = ~c; /* c == 0x00fff000000fffff */
3772 c &= -lsb; /* c == 0x00fff00000000000 */
3773 lsb = c & -c; /* lsb == 0x0000100000000000 */
3774 c = ~c; /* c == 0xff000fffffffffff */
3775 c &= -lsb; /* c == 0xff00000000000000 */
3776 shift = 0;
3777 while ((lsb >>= 1) != 0)
3778 shift++; /* shift == 44 on exit from loop */
3779 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3780 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3781 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3782 }
3783 else
0ba1b2ff
AM
3784 {
3785 /* Assume c initially something like 0xff000f0000000000. The idea
3786 is to rotate the word so that the ^^^ middle group of zeros
3787 is at the LS end and can be cleared with an rldicr mask. We then
3788 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3789 a second rldicr. */
3790 lsb = c & -c; /* lsb == 0x0000010000000000 */
3791 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3792 c = ~c; /* c == 0x00fff0ffffffffff */
3793 c &= -lsb; /* c == 0x00fff00000000000 */
3794 lsb = c & -c; /* lsb == 0x0000100000000000 */
3795 c = ~c; /* c == 0xff000fffffffffff */
3796 c &= -lsb; /* c == 0xff00000000000000 */
3797 shift = 0;
3798 while ((lsb >>= 1) != 0)
3799 shift++; /* shift == 44 on exit from loop */
3800 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3801 m1 >>= shift; /* m1 == 0x0000000000000fff */
3802 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3803 }
3804
3805 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3806 masks will be all 1's. We are guaranteed more than one transition. */
3807 out[0] = GEN_INT (64 - shift);
3808 out[1] = GEN_INT (m1);
3809 out[2] = GEN_INT (shift);
3810 out[3] = GEN_INT (m2);
3811#else
045572c7
GK
3812 (void)in;
3813 (void)out;
37409796 3814 gcc_unreachable ();
0ba1b2ff 3815#endif
a260abc9
DE
3816}
3817
54b695e7 3818/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3819
3820bool
54b695e7
AH
3821invalid_e500_subreg (rtx op, enum machine_mode mode)
3822{
61c76239
JM
3823 if (TARGET_E500_DOUBLE)
3824 {
17caeff2 3825 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3826 subreg:TI and reg:TF. Decimal float modes are like integer
3827 modes (only low part of each register used) for this
3828 purpose. */
61c76239 3829 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3830 && (mode == SImode || mode == DImode || mode == TImode
3831 || mode == DDmode || mode == TDmode)
61c76239 3832 && REG_P (SUBREG_REG (op))
17caeff2 3833 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3834 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3835 return true;
3836
17caeff2
JM
3837 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3838 reg:TI. */
61c76239 3839 if (GET_CODE (op) == SUBREG
4f011e1e 3840 && (mode == DFmode || mode == TFmode)
61c76239 3841 && REG_P (SUBREG_REG (op))
17caeff2 3842 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3843 || GET_MODE (SUBREG_REG (op)) == TImode
3844 || GET_MODE (SUBREG_REG (op)) == DDmode
3845 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3846 return true;
3847 }
54b695e7 3848
61c76239
JM
3849 if (TARGET_SPE
3850 && GET_CODE (op) == SUBREG
3851 && mode == SImode
54b695e7 3852 && REG_P (SUBREG_REG (op))
14502dad 3853 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3854 return true;
3855
3856 return false;
3857}
3858
58182de3 3859/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3860 field is an FP double while the FP fields remain word aligned. */
3861
19d66194 3862unsigned int
fa5b0972
AM
3863rs6000_special_round_type_align (tree type, unsigned int computed,
3864 unsigned int specified)
95727fb8 3865{
fa5b0972 3866 unsigned int align = MAX (computed, specified);
95727fb8 3867 tree field = TYPE_FIELDS (type);
95727fb8 3868
bb8df8a6 3869 /* Skip all non field decls */
85962ac8 3870 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3871 field = TREE_CHAIN (field);
3872
fa5b0972
AM
3873 if (field != NULL && field != type)
3874 {
3875 type = TREE_TYPE (field);
3876 while (TREE_CODE (type) == ARRAY_TYPE)
3877 type = TREE_TYPE (type);
3878
3879 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3880 align = MAX (align, 64);
3881 }
95727fb8 3882
fa5b0972 3883 return align;
95727fb8
AP
3884}
3885
58182de3
GK
3886/* Darwin increases record alignment to the natural alignment of
3887 the first field. */
3888
3889unsigned int
3890darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3891 unsigned int specified)
3892{
3893 unsigned int align = MAX (computed, specified);
3894
3895 if (TYPE_PACKED (type))
3896 return align;
3897
3898 /* Find the first field, looking down into aggregates. */
3899 do {
3900 tree field = TYPE_FIELDS (type);
3901 /* Skip all non field decls */
3902 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3903 field = TREE_CHAIN (field);
3904 if (! field)
3905 break;
3906 type = TREE_TYPE (field);
3907 while (TREE_CODE (type) == ARRAY_TYPE)
3908 type = TREE_TYPE (type);
3909 } while (AGGREGATE_TYPE_P (type));
3910
3911 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3912 align = MAX (align, TYPE_ALIGN (type));
3913
3914 return align;
3915}
3916
a4f6c312 3917/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3918
3919int
f676971a 3920small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3921 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3922{
38c1f2d7 3923#if TARGET_ELF
5f59ecb7 3924 rtx sym_ref;
7509c759 3925
d9407988 3926 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3927 return 0;
a54d04b7 3928
f607bc57 3929 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3930 return 0;
3931
2aa42e6e
NF
3932 /* Vector and float memory instructions have a limited offset on the
3933 SPE, so using a vector or float variable directly as an operand is
3934 not useful. */
3935 if (TARGET_SPE
3936 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3937 return 0;
3938
88228c4b
MM
3939 if (GET_CODE (op) == SYMBOL_REF)
3940 sym_ref = op;
3941
3942 else if (GET_CODE (op) != CONST
3943 || GET_CODE (XEXP (op, 0)) != PLUS
3944 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3945 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3946 return 0;
3947
88228c4b 3948 else
dbf55e53
MM
3949 {
3950 rtx sum = XEXP (op, 0);
3951 HOST_WIDE_INT summand;
3952
3953 /* We have to be careful here, because it is the referenced address
c4ad648e 3954 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3955 summand = INTVAL (XEXP (sum, 1));
307b599c 3956 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3957 return 0;
dbf55e53
MM
3958
3959 sym_ref = XEXP (sum, 0);
3960 }
88228c4b 3961
20bfcd69 3962 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3963#else
3964 return 0;
3965#endif
7509c759 3966}
46c07df8 3967
3a1f863f 3968/* Return true if either operand is a general purpose register. */
46c07df8 3969
3a1f863f
DE
3970bool
3971gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3972{
3a1f863f
DE
3973 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3974 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3975}
3976
9ebbca7d 3977\f
c6c3dba9 3978/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address_p. */
4d588c14 3979
4d588c14 3980static bool
a2369ed3 3981constant_pool_expr_p (rtx op)
9ebbca7d 3982{
2e4316da
RS
3983 rtx base, offset;
3984
3985 split_const (op, &base, &offset);
3986 return (GET_CODE (base) == SYMBOL_REF
3987 && CONSTANT_POOL_ADDRESS_P (base)
3988 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
9ebbca7d
GK
3989}
3990
48d72335 3991bool
a2369ed3 3992toc_relative_expr_p (rtx op)
9ebbca7d 3993{
2e4316da
RS
3994 rtx base, offset;
3995
3996 if (GET_CODE (op) != CONST)
3997 return false;
3998
3999 split_const (op, &base, &offset);
4000 return (GET_CODE (base) == UNSPEC
4001 && XINT (base, 1) == UNSPEC_TOCREL);
4d588c14
RH
4002}
4003
4d588c14 4004bool
a2369ed3 4005legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
4006{
4007 return (TARGET_TOC
4008 && GET_CODE (x) == PLUS
4009 && GET_CODE (XEXP (x, 0)) == REG
4010 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2e4316da 4011 && toc_relative_expr_p (XEXP (x, 1)));
4d588c14
RH
4012}
4013
d04b6e6e
EB
4014static bool
4015legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
4016{
4017 return (DEFAULT_ABI == ABI_V4
4018 && !flag_pic && !TARGET_TOC
4019 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
4020 && small_data_operand (x, mode));
4021}
4022
60cdabab
DE
4023/* SPE offset addressing is limited to 5-bits worth of double words. */
4024#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
4025
76d2b81d
DJ
4026bool
4027rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
4028{
4029 unsigned HOST_WIDE_INT offset, extra;
4030
4031 if (GET_CODE (x) != PLUS)
4032 return false;
4033 if (GET_CODE (XEXP (x, 0)) != REG)
4034 return false;
4035 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
4036 return false;
60cdabab
DE
4037 if (legitimate_constant_pool_address_p (x))
4038 return true;
4d588c14
RH
4039 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4040 return false;
4041
4042 offset = INTVAL (XEXP (x, 1));
4043 extra = 0;
4044 switch (mode)
4045 {
4046 case V16QImode:
4047 case V8HImode:
4048 case V4SFmode:
4049 case V4SImode:
7a4eca66 4050 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
4051 constant offset zero should not occur due to canonicalization. */
4052 return false;
4d588c14
RH
4053
4054 case V4HImode:
4055 case V2SImode:
4056 case V1DImode:
4057 case V2SFmode:
d42a3bae 4058 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 4059 constant offset zero should not occur due to canonicalization. */
d42a3bae 4060 if (TARGET_PAIRED_FLOAT)
1a23970d 4061 return false;
4d588c14
RH
4062 /* SPE vector modes. */
4063 return SPE_CONST_OFFSET_OK (offset);
4064
4065 case DFmode:
4d4cbc0e
AH
4066 if (TARGET_E500_DOUBLE)
4067 return SPE_CONST_OFFSET_OK (offset);
4068
4f011e1e 4069 case DDmode:
4d588c14 4070 case DImode:
54b695e7
AH
4071 /* On e500v2, we may have:
4072
4073 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
4074
4075 Which gets addressed with evldd instructions. */
4076 if (TARGET_E500_DOUBLE)
4077 return SPE_CONST_OFFSET_OK (offset);
4078
7393f7f8 4079 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
4080 extra = 4;
4081 else if (offset & 3)
4082 return false;
4083 break;
4084
4085 case TFmode:
17caeff2
JM
4086 if (TARGET_E500_DOUBLE)
4087 return (SPE_CONST_OFFSET_OK (offset)
4088 && SPE_CONST_OFFSET_OK (offset + 8));
4089
4f011e1e 4090 case TDmode:
4d588c14 4091 case TImode:
7393f7f8 4092 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
4093 extra = 12;
4094 else if (offset & 3)
4095 return false;
4096 else
4097 extra = 8;
4098 break;
4099
4100 default:
4101 break;
4102 }
4103
b1917422
AM
4104 offset += 0x8000;
4105 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
4106}
4107
6fb5fa3c 4108bool
a2369ed3 4109legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
4110{
4111 rtx op0, op1;
4112
4113 if (GET_CODE (x) != PLUS)
4114 return false;
850e8d3d 4115
4d588c14
RH
4116 op0 = XEXP (x, 0);
4117 op1 = XEXP (x, 1);
4118
bf00cc0f 4119 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
4120 replaced with proper base and index regs. */
4121 if (!strict
4122 && reload_in_progress
4123 && (REG_P (op0) || GET_CODE (op0) == PLUS)
4124 && REG_P (op1))
4125 return true;
4126
4127 return (REG_P (op0) && REG_P (op1)
4128 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
4129 && INT_REG_OK_FOR_INDEX_P (op1, strict))
4130 || (INT_REG_OK_FOR_BASE_P (op1, strict)
4131 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
4132}
4133
001b9eb6
PH
4134bool
4135avoiding_indexed_address_p (enum machine_mode mode)
4136{
4137 /* Avoid indexed addressing for modes that have non-indexed
4138 load/store instruction forms. */
4139 return TARGET_AVOID_XFORM && !ALTIVEC_VECTOR_MODE (mode);
4140}
4141
48d72335 4142inline bool
a2369ed3 4143legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
4144{
4145 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
4146}
4147
48d72335 4148bool
4c81e946
FJ
4149macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
4150{
c4ad648e 4151 if (!TARGET_MACHO || !flag_pic
9390387d 4152 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
4153 return false;
4154 x = XEXP (x, 0);
4c81e946
FJ
4155
4156 if (GET_CODE (x) != LO_SUM)
4157 return false;
4158 if (GET_CODE (XEXP (x, 0)) != REG)
4159 return false;
4160 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
4161 return false;
4162 x = XEXP (x, 1);
4163
4164 return CONSTANT_P (x);
4165}
4166
4d588c14 4167static bool
a2369ed3 4168legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
4169{
4170 if (GET_CODE (x) != LO_SUM)
4171 return false;
4172 if (GET_CODE (XEXP (x, 0)) != REG)
4173 return false;
4174 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
4175 return false;
54b695e7 4176 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 4177 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4178 || mode == DDmode || mode == TDmode
17caeff2 4179 || mode == DImode))
f82f556d 4180 return false;
4d588c14
RH
4181 x = XEXP (x, 1);
4182
8622e235 4183 if (TARGET_ELF || TARGET_MACHO)
4d588c14 4184 {
a29077da 4185 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
4186 return false;
4187 if (TARGET_TOC)
4188 return false;
4189 if (GET_MODE_NUNITS (mode) != 1)
4190 return false;
5e5f01b9 4191 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 4192 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
696e45ba 4193 && !(TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
4d4447b5 4194 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
4195 return false;
4196
4197 return CONSTANT_P (x);
4198 }
4199
4200 return false;
4201}
4202
4203
9ebbca7d
GK
4204/* Try machine-dependent ways of modifying an illegitimate address
4205 to be legitimate. If we find one, return the new, valid address.
4206 This is used from only one place: `memory_address' in explow.c.
4207
a4f6c312
SS
4208 OLDX is the address as it was before break_out_memory_refs was
4209 called. In some cases it is useful to look at this to decide what
4210 needs to be done.
9ebbca7d 4211
a4f6c312
SS
4212 It is always safe for this function to do nothing. It exists to
4213 recognize opportunities to optimize the output.
9ebbca7d
GK
4214
4215 On RS/6000, first check for the sum of a register with a constant
4216 integer that is out of range. If so, generate code to add the
4217 constant with the low-order 16 bits masked to the register and force
4218 this result into another register (this can be done with `cau').
4219 Then generate an address of REG+(CONST&0xffff), allowing for the
4220 possibility of bit 16 being a one.
4221
4222 Then check for the sum of a register and something not constant, try to
4223 load the other things into a register and return the sum. */
4d588c14 4224
9ebbca7d 4225rtx
a2369ed3
DJ
4226rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
4227 enum machine_mode mode)
0ac081f6 4228{
c4501e62
JJ
4229 if (GET_CODE (x) == SYMBOL_REF)
4230 {
4231 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
4232 if (model != 0)
4233 return rs6000_legitimize_tls_address (x, model);
4234 }
4235
f676971a 4236 if (GET_CODE (x) == PLUS
9ebbca7d
GK
4237 && GET_CODE (XEXP (x, 0)) == REG
4238 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb 4239 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
7da13f1d
NF
4240 && !((TARGET_POWERPC64
4241 && (mode == DImode || mode == TImode)
4242 && (INTVAL (XEXP (x, 1)) & 3) != 0)
4243 || SPE_VECTOR_MODE (mode)
efc05e3c 4244 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 4245 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
4246 || mode == DImode || mode == DDmode
4247 || mode == TDmode))))
f676971a 4248 {
9ebbca7d
GK
4249 HOST_WIDE_INT high_int, low_int;
4250 rtx sum;
a65c591c
DE
4251 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
4252 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
4253 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
4254 GEN_INT (high_int)), 0);
4255 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
4256 }
f676971a 4257 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
4258 && GET_CODE (XEXP (x, 0)) == REG
4259 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 4260 && GET_MODE_NUNITS (mode) == 1
696e45ba 4261 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 4262 || TARGET_POWERPC64
efc05e3c 4263 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 4264 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 4265 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 4266 && !avoiding_indexed_address_p (mode)
efc05e3c
PB
4267 && mode != TImode
4268 && mode != TFmode
4269 && mode != TDmode)
9ebbca7d
GK
4270 {
4271 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
4272 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
4273 }
0ac081f6
AH
4274 else if (ALTIVEC_VECTOR_MODE (mode))
4275 {
4276 rtx reg;
4277
4278 /* Make sure both operands are registers. */
4279 if (GET_CODE (x) == PLUS)
9f85ed45 4280 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
4281 force_reg (Pmode, XEXP (x, 1)));
4282
4283 reg = force_reg (Pmode, x);
4284 return reg;
4285 }
4d4cbc0e 4286 else if (SPE_VECTOR_MODE (mode)
17caeff2 4287 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 4288 || mode == DDmode || mode == TDmode
54b695e7 4289 || mode == DImode)))
a3170dc6 4290 {
54b695e7 4291 if (mode == DImode)
506d7b68 4292 return x;
a3170dc6
AH
4293 /* We accept [reg + reg] and [reg + OFFSET]. */
4294
4295 if (GET_CODE (x) == PLUS)
61dd226f
NF
4296 {
4297 rtx op1 = XEXP (x, 0);
4298 rtx op2 = XEXP (x, 1);
4299 rtx y;
4300
4301 op1 = force_reg (Pmode, op1);
4302
4303 if (GET_CODE (op2) != REG
4304 && (GET_CODE (op2) != CONST_INT
4305 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
4306 || (GET_MODE_SIZE (mode) > 8
4307 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
4308 op2 = force_reg (Pmode, op2);
4309
4310 /* We can't always do [reg + reg] for these, because [reg +
4311 reg + offset] is not a legitimate addressing mode. */
4312 y = gen_rtx_PLUS (Pmode, op1, op2);
4313
4f011e1e 4314 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
4315 return force_reg (Pmode, y);
4316 else
4317 return y;
4318 }
a3170dc6
AH
4319
4320 return force_reg (Pmode, x);
4321 }
f1384257
AM
4322 else if (TARGET_ELF
4323 && TARGET_32BIT
4324 && TARGET_NO_TOC
4325 && ! flag_pic
9ebbca7d 4326 && GET_CODE (x) != CONST_INT
f676971a 4327 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 4328 && CONSTANT_P (x)
6ac7bf2c
GK
4329 && GET_MODE_NUNITS (mode) == 1
4330 && (GET_MODE_BITSIZE (mode) <= 32
696e45ba 4331 || ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 4332 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
4333 {
4334 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
4335 emit_insn (gen_elf_high (reg, x));
4336 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 4337 }
ee890fe2
SS
4338 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
4339 && ! flag_pic
ab82a49f
AP
4340#if TARGET_MACHO
4341 && ! MACHO_DYNAMIC_NO_PIC_P
4342#endif
ee890fe2 4343 && GET_CODE (x) != CONST_INT
f676971a 4344 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 4345 && CONSTANT_P (x)
506a7bc8 4346 && GET_MODE_NUNITS (mode) == 1
696e45ba 4347 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 4348 || (mode != DFmode && mode != DDmode))
f676971a 4349 && mode != DImode
ee890fe2
SS
4350 && mode != TImode)
4351 {
4352 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
4353 emit_insn (gen_macho_high (reg, x));
4354 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 4355 }
f676971a 4356 else if (TARGET_TOC
0cdc04e8 4357 && GET_CODE (x) == SYMBOL_REF
4d588c14 4358 && constant_pool_expr_p (x)
a9098fd0 4359 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
4360 {
4361 return create_TOC_reference (x);
4362 }
4363 else
506d7b68 4364 return x;
9ebbca7d 4365}
258bfae2 4366
fdbe66f2 4367/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
4368 We need to emit DTP-relative relocations. */
4369
fdbe66f2 4370static void
c973d557
JJ
4371rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
4372{
4373 switch (size)
4374 {
4375 case 4:
4376 fputs ("\t.long\t", file);
4377 break;
4378 case 8:
4379 fputs (DOUBLE_INT_ASM_OP, file);
4380 break;
4381 default:
37409796 4382 gcc_unreachable ();
c973d557
JJ
4383 }
4384 output_addr_const (file, x);
4385 fputs ("@dtprel+0x8000", file);
4386}
4387
c4501e62
JJ
4388/* Construct the SYMBOL_REF for the tls_get_addr function. */
4389
4390static GTY(()) rtx rs6000_tls_symbol;
4391static rtx
863d938c 4392rs6000_tls_get_addr (void)
c4501e62
JJ
4393{
4394 if (!rs6000_tls_symbol)
4395 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
4396
4397 return rs6000_tls_symbol;
4398}
4399
4400/* Construct the SYMBOL_REF for TLS GOT references. */
4401
4402static GTY(()) rtx rs6000_got_symbol;
4403static rtx
863d938c 4404rs6000_got_sym (void)
c4501e62
JJ
4405{
4406 if (!rs6000_got_symbol)
4407 {
4408 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
4409 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
4410 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 4411 }
c4501e62
JJ
4412
4413 return rs6000_got_symbol;
4414}
4415
4416/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
4417 this (thread-local) address. */
4418
4419static rtx
a2369ed3 4420rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
4421{
4422 rtx dest, insn;
4423
4424 dest = gen_reg_rtx (Pmode);
4425 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
4426 {
4427 rtx tlsreg;
4428
4429 if (TARGET_64BIT)
4430 {
4431 tlsreg = gen_rtx_REG (Pmode, 13);
4432 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
4433 }
4434 else
4435 {
4436 tlsreg = gen_rtx_REG (Pmode, 2);
4437 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
4438 }
4439 emit_insn (insn);
4440 }
4441 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
4442 {
4443 rtx tlsreg, tmp;
4444
4445 tmp = gen_reg_rtx (Pmode);
4446 if (TARGET_64BIT)
4447 {
4448 tlsreg = gen_rtx_REG (Pmode, 13);
4449 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
4450 }
4451 else
4452 {
4453 tlsreg = gen_rtx_REG (Pmode, 2);
4454 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
4455 }
4456 emit_insn (insn);
4457 if (TARGET_64BIT)
4458 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
4459 else
4460 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
4461 emit_insn (insn);
4462 }
4463 else
4464 {
4465 rtx r3, got, tga, tmp1, tmp2, eqv;
4466
4fed8f8f
AM
4467 /* We currently use relocations like @got@tlsgd for tls, which
4468 means the linker will handle allocation of tls entries, placing
4469 them in the .got section. So use a pointer to the .got section,
4470 not one to secondary TOC sections used by 64-bit -mminimal-toc,
4471 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 4472 if (TARGET_64BIT)
972f427b 4473 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
4474 else
4475 {
4476 if (flag_pic == 1)
4477 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
4478 else
4479 {
4480 rtx gsym = rs6000_got_sym ();
4481 got = gen_reg_rtx (Pmode);
4482 if (flag_pic == 0)
4483 rs6000_emit_move (got, gsym, Pmode);
4484 else
4485 {
e65a3857 4486 rtx tmp3, mem;
c4501e62
JJ
4487 rtx first, last;
4488
c4501e62
JJ
4489 tmp1 = gen_reg_rtx (Pmode);
4490 tmp2 = gen_reg_rtx (Pmode);
4491 tmp3 = gen_reg_rtx (Pmode);
542a8afa 4492 mem = gen_const_mem (Pmode, tmp1);
c4501e62 4493
e65a3857
DE
4494 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
4495 emit_move_insn (tmp1,
1de43f85 4496 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
4497 emit_move_insn (tmp2, mem);
4498 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
4499 last = emit_move_insn (got, tmp3);
bd94cb6e 4500 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
4501 }
4502 }
4503 }
4504
4505 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4506 {
4507 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4508 tga = rs6000_tls_get_addr ();
4509
4510 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4511 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4512 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4513 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4514 else if (DEFAULT_ABI == ABI_V4)
4515 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 4516 else
02135bc1
SB
4517 gcc_unreachable ();
4518
c4501e62 4519 start_sequence ();
c4501e62 4520 insn = emit_call_insn (insn);
becfd6e5 4521 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4522 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4523 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4524 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4525 insn = get_insns ();
4526 end_sequence ();
4527 emit_libcall_block (insn, dest, r3, addr);
4528 }
4529 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4530 {
4531 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4532 tga = rs6000_tls_get_addr ();
4533
4534 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4535 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4536 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4537 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4538 else if (DEFAULT_ABI == ABI_V4)
4539 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 4540 else
02135bc1
SB
4541 gcc_unreachable ();
4542
c4501e62 4543 start_sequence ();
c4501e62 4544 insn = emit_call_insn (insn);
becfd6e5 4545 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4546 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4547 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4548 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4549 insn = get_insns ();
4550 end_sequence ();
4551 tmp1 = gen_reg_rtx (Pmode);
4552 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4553 UNSPEC_TLSLD);
4554 emit_libcall_block (insn, tmp1, r3, eqv);
4555 if (rs6000_tls_size == 16)
4556 {
4557 if (TARGET_64BIT)
4558 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4559 else
4560 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4561 }
4562 else if (rs6000_tls_size == 32)
4563 {
4564 tmp2 = gen_reg_rtx (Pmode);
4565 if (TARGET_64BIT)
4566 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4567 else
4568 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4569 emit_insn (insn);
4570 if (TARGET_64BIT)
4571 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4572 else
4573 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4574 }
4575 else
4576 {
4577 tmp2 = gen_reg_rtx (Pmode);
4578 if (TARGET_64BIT)
4579 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4580 else
4581 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4582 emit_insn (insn);
4583 insn = gen_rtx_SET (Pmode, dest,
4584 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4585 }
4586 emit_insn (insn);
4587 }
4588 else
4589 {
a7b376ee 4590 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4591 tmp2 = gen_reg_rtx (Pmode);
4592 if (TARGET_64BIT)
4593 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4594 else
4595 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4596 emit_insn (insn);
4597 if (TARGET_64BIT)
4598 insn = gen_tls_tls_64 (dest, tmp2, addr);
4599 else
4600 insn = gen_tls_tls_32 (dest, tmp2, addr);
4601 emit_insn (insn);
4602 }
4603 }
4604
4605 return dest;
4606}
4607
c4501e62
JJ
4608/* Return 1 if X contains a thread-local symbol. */
4609
4610bool
a2369ed3 4611rs6000_tls_referenced_p (rtx x)
c4501e62 4612{
cd413cab
AP
4613 if (! TARGET_HAVE_TLS)
4614 return false;
4615
c4501e62
JJ
4616 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4617}
4618
4619/* Return 1 if *X is a thread-local symbol. This is the same as
4620 rs6000_tls_symbol_ref except for the type of the unused argument. */
4621
9390387d 4622static int
a2369ed3 4623rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4624{
4625 return RS6000_SYMBOL_REF_TLS_P (*x);
4626}
4627
24ea750e
DJ
4628/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4629 replace the input X, or the original X if no replacement is called for.
4630 The output parameter *WIN is 1 if the calling macro should goto WIN,
4631 0 if it should not.
4632
4633 For RS/6000, we wish to handle large displacements off a base
4634 register by splitting the addend across an addiu/addis and the mem insn.
4635 This cuts number of extra insns needed from 3 to 1.
4636
4637 On Darwin, we use this to generate code for floating point constants.
4638 A movsf_low is generated so we wind up with 2 instructions rather than 3.
08a6a74b
RS
4639 The Darwin code is inside #if TARGET_MACHO because only then are the
4640 machopic_* functions defined. */
24ea750e 4641rtx
f676971a 4642rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4643 int opnum, int type,
4644 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4645{
f676971a 4646 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4647 if (GET_CODE (x) == PLUS
4648 && GET_CODE (XEXP (x, 0)) == PLUS
4649 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4650 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4651 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4652 {
4653 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4654 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4655 opnum, (enum reload_type)type);
24ea750e
DJ
4656 *win = 1;
4657 return x;
4658 }
3deb2758 4659
24ea750e
DJ
4660#if TARGET_MACHO
4661 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4662 && GET_CODE (x) == LO_SUM
4663 && GET_CODE (XEXP (x, 0)) == PLUS
4664 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4665 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
24ea750e 4666 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
08a6a74b 4667 && machopic_operand_p (XEXP (x, 1)))
24ea750e
DJ
4668 {
4669 /* Result of previous invocation of this function on Darwin
6f317ef3 4670 floating point constant. */
24ea750e 4671 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4672 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4673 opnum, (enum reload_type)type);
24ea750e
DJ
4674 *win = 1;
4675 return x;
4676 }
4677#endif
4937d02d
DE
4678
4679 /* Force ld/std non-word aligned offset into base register by wrapping
4680 in offset 0. */
4681 if (GET_CODE (x) == PLUS
4682 && GET_CODE (XEXP (x, 0)) == REG
4683 && REGNO (XEXP (x, 0)) < 32
c6c3dba9 4684 && INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 1)
4937d02d
DE
4685 && GET_CODE (XEXP (x, 1)) == CONST_INT
4686 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4687 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4688 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4689 && TARGET_POWERPC64)
4690 {
4691 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4692 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4693 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4694 opnum, (enum reload_type) type);
4695 *win = 1;
4696 return x;
4697 }
4698
24ea750e
DJ
4699 if (GET_CODE (x) == PLUS
4700 && GET_CODE (XEXP (x, 0)) == REG
4701 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
c6c3dba9 4702 && INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 1)
78c875e8 4703 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4704 && !SPE_VECTOR_MODE (mode)
17caeff2 4705 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4706 || mode == DDmode || mode == TDmode
54b695e7 4707 || mode == DImode))
78c875e8 4708 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4709 {
4710 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4711 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4712 HOST_WIDE_INT high
c4ad648e 4713 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4714
4715 /* Check for 32-bit overflow. */
4716 if (high + low != val)
c4ad648e 4717 {
24ea750e
DJ
4718 *win = 0;
4719 return x;
4720 }
4721
4722 /* Reload the high part into a base reg; leave the low part
c4ad648e 4723 in the mem directly. */
24ea750e
DJ
4724
4725 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4726 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4727 GEN_INT (high)),
4728 GEN_INT (low));
24ea750e
DJ
4729
4730 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4731 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4732 opnum, (enum reload_type)type);
24ea750e
DJ
4733 *win = 1;
4734 return x;
4735 }
4937d02d 4736
24ea750e 4737 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4738 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4739 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4740#if TARGET_MACHO
4741 && DEFAULT_ABI == ABI_DARWIN
a29077da 4742 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4743#else
4744 && DEFAULT_ABI == ABI_V4
4745 && !flag_pic
4746#endif
7393f7f8 4747 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4748 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4749 without fprs. */
0d8c1c97 4750 && mode != TFmode
7393f7f8 4751 && mode != TDmode
7b5d92b2 4752 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4753 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
696e45ba 4754 || (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)))
24ea750e 4755 {
8308679f 4756#if TARGET_MACHO
a29077da
GK
4757 if (flag_pic)
4758 {
08a6a74b 4759 rtx offset = machopic_gen_offset (x);
a29077da
GK
4760 x = gen_rtx_LO_SUM (GET_MODE (x),
4761 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4762 gen_rtx_HIGH (Pmode, offset)), offset);
4763 }
4764 else
8308679f 4765#endif
a29077da 4766 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4767 gen_rtx_HIGH (Pmode, x), x);
a29077da 4768
24ea750e 4769 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4770 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4771 opnum, (enum reload_type)type);
24ea750e
DJ
4772 *win = 1;
4773 return x;
4774 }
4937d02d 4775
dec1f3aa
DE
4776 /* Reload an offset address wrapped by an AND that represents the
4777 masking of the lower bits. Strip the outer AND and let reload
4778 convert the offset address into an indirect address. */
4779 if (TARGET_ALTIVEC
4780 && ALTIVEC_VECTOR_MODE (mode)
4781 && GET_CODE (x) == AND
4782 && GET_CODE (XEXP (x, 0)) == PLUS
4783 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4784 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4785 && GET_CODE (XEXP (x, 1)) == CONST_INT
4786 && INTVAL (XEXP (x, 1)) == -16)
4787 {
4788 x = XEXP (x, 0);
4789 *win = 1;
4790 return x;
4791 }
4792
24ea750e 4793 if (TARGET_TOC
0cdc04e8 4794 && GET_CODE (x) == SYMBOL_REF
4d588c14 4795 && constant_pool_expr_p (x)
c1f11548 4796 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4797 {
194c524a 4798 x = create_TOC_reference (x);
24ea750e
DJ
4799 *win = 1;
4800 return x;
4801 }
4802 *win = 0;
4803 return x;
f676971a 4804}
24ea750e 4805
331d9186 4806/* TARGET_LEGITIMATE_ADDRESS_P recognizes an RTL expression
258bfae2
FS
4807 that is a valid memory address for an instruction.
4808 The MODE argument is the machine mode for the MEM expression
4809 that wants to use this address.
4810
4811 On the RS/6000, there are four valid address: a SYMBOL_REF that
4812 refers to a constant pool entry of an address (or the sum of it
4813 plus a constant), a short (16-bit signed) constant plus a register,
4814 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4815 auto-increment. For DFmode, DDmode and DImode with a constant plus
4816 register, we must ensure that both words are addressable or PowerPC64
4817 with offset word aligned.
258bfae2 4818
4d4447b5 4819 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4820 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4821 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2 4822 during assembly output. */
c6c3dba9
PB
4823bool
4824rs6000_legitimate_address_p (enum machine_mode mode, rtx x, bool reg_ok_strict)
258bfae2 4825{
850e8d3d
DN
4826 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4827 if (TARGET_ALTIVEC
4828 && ALTIVEC_VECTOR_MODE (mode)
4829 && GET_CODE (x) == AND
4830 && GET_CODE (XEXP (x, 1)) == CONST_INT
4831 && INTVAL (XEXP (x, 1)) == -16)
4832 x = XEXP (x, 0);
4833
c4501e62
JJ
4834 if (RS6000_SYMBOL_REF_TLS_P (x))
4835 return 0;
4d588c14 4836 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4837 return 1;
4838 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4839 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4840 && !SPE_VECTOR_MODE (mode)
429ec7dc 4841 && mode != TFmode
7393f7f8 4842 && mode != TDmode
54b695e7 4843 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4844 && !(TARGET_E500_DOUBLE
4845 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4846 && TARGET_UPDATE
4d588c14 4847 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4848 return 1;
d04b6e6e 4849 if (legitimate_small_data_p (mode, x))
258bfae2 4850 return 1;
4d588c14 4851 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4852 return 1;
4853 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4854 if (! reg_ok_strict
4855 && GET_CODE (x) == PLUS
4856 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4857 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4858 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4859 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4860 return 1;
76d2b81d 4861 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4862 return 1;
4863 if (mode != TImode
76d2b81d 4864 && mode != TFmode
7393f7f8 4865 && mode != TDmode
960c5c79 4866 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 4867 || TARGET_POWERPC64
4f011e1e
JM
4868 || (mode != DFmode && mode != DDmode)
4869 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4870 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 4871 && !avoiding_indexed_address_p (mode)
4d588c14 4872 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4873 return 1;
6fb5fa3c
DB
4874 if (GET_CODE (x) == PRE_MODIFY
4875 && mode != TImode
4876 && mode != TFmode
4877 && mode != TDmode
696e45ba 4878 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
6fb5fa3c 4879 || TARGET_POWERPC64
4d4447b5 4880 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4881 && (TARGET_POWERPC64 || mode != DImode)
4882 && !ALTIVEC_VECTOR_MODE (mode)
4883 && !SPE_VECTOR_MODE (mode)
4884 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4885 && !(TARGET_E500_DOUBLE
4886 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4887 && TARGET_UPDATE
4888 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4889 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
001b9eb6
PH
4890 || (!avoiding_indexed_address_p (mode)
4891 && legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict)))
6fb5fa3c
DB
4892 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4893 return 1;
4d588c14 4894 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4895 return 1;
4896 return 0;
4897}
4d588c14
RH
4898
4899/* Go to LABEL if ADDR (a legitimate address expression)
4900 has an effect that depends on the machine mode it is used for.
4901
4902 On the RS/6000 this is true of all integral offsets (since AltiVec
4903 modes don't allow them) or is a pre-increment or decrement.
4904
4905 ??? Except that due to conceptual problems in offsettable_address_p
4906 we can't really report the problems of integral offsets. So leave
f676971a 4907 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4908 sub-words of a TFmode operand, which is what we had before. */
4909
4910bool
a2369ed3 4911rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4912{
4913 switch (GET_CODE (addr))
4914 {
4915 case PLUS:
4916 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4917 {
4918 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4919 return val + 12 + 0x8000 >= 0x10000;
4920 }
4921 break;
4922
4923 case LO_SUM:
4924 return true;
4925
619fe064 4926 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4927 case PRE_MODIFY:
4928 return TARGET_UPDATE;
4d588c14
RH
4929
4930 default:
4931 break;
4932 }
4933
4934 return false;
4935}
d8ecbcdb 4936
944258eb
RS
4937/* Implement FIND_BASE_TERM. */
4938
4939rtx
4940rs6000_find_base_term (rtx op)
4941{
4942 rtx base, offset;
4943
4944 split_const (op, &base, &offset);
4945 if (GET_CODE (base) == UNSPEC)
4946 switch (XINT (base, 1))
4947 {
4948 case UNSPEC_TOCREL:
4949 case UNSPEC_MACHOPIC_OFFSET:
4950 /* OP represents SYM [+ OFFSET] - ANCHOR. SYM is the base term
4951 for aliasing purposes. */
4952 return XVECEXP (base, 0, 0);
4953 }
4954
4955 return op;
4956}
4957
d04b6e6e
EB
4958/* More elaborate version of recog's offsettable_memref_p predicate
4959 that works around the ??? note of rs6000_mode_dependent_address.
4960 In particular it accepts
4961
4962 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4963
4964 in 32-bit mode, that the recog predicate rejects. */
4965
4966bool
4967rs6000_offsettable_memref_p (rtx op)
4968{
4969 if (!MEM_P (op))
4970 return false;
4971
4972 /* First mimic offsettable_memref_p. */
4973 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4974 return true;
4975
4976 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4977 the latter predicate knows nothing about the mode of the memory
4978 reference and, therefore, assumes that it is the largest supported
4979 mode (TFmode). As a consequence, legitimate offsettable memory
4980 references are rejected. rs6000_legitimate_offset_address_p contains
4981 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4982 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4983}
4984
2aa4498c
AH
4985/* Change register usage conditional on target flags. */
4986void
4987rs6000_conditional_register_usage (void)
4988{
4989 int i;
4990
4991 /* Set MQ register fixed (already call_used) if not POWER
4992 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4993 be allocated. */
4994 if (! TARGET_POWER)
4995 fixed_regs[64] = 1;
4996
7c9ac5c0 4997 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4998 if (TARGET_64BIT)
4999 fixed_regs[13] = call_used_regs[13]
5000 = call_really_used_regs[13] = 1;
5001
5002 /* Conditionally disable FPRs. */
5003 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
5004 for (i = 32; i < 64; i++)
5005 fixed_regs[i] = call_used_regs[i]
c4ad648e 5006 = call_really_used_regs[i] = 1;
2aa4498c 5007
7c9ac5c0
PH
5008 /* The TOC register is not killed across calls in a way that is
5009 visible to the compiler. */
5010 if (DEFAULT_ABI == ABI_AIX)
5011 call_really_used_regs[2] = 0;
5012
2aa4498c
AH
5013 if (DEFAULT_ABI == ABI_V4
5014 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
5015 && flag_pic == 2)
5016 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
5017
5018 if (DEFAULT_ABI == ABI_V4
5019 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
5020 && flag_pic == 1)
5021 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
5022 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
5023 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
5024
5025 if (DEFAULT_ABI == ABI_DARWIN
5026 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 5027 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
5028 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
5029 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
5030
b4db40bf
JJ
5031 if (TARGET_TOC && TARGET_MINIMAL_TOC)
5032 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
5033 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
5034
2aa4498c
AH
5035 if (TARGET_SPE)
5036 {
5037 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
5038 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
5039 registers in prologues and epilogues. We no longer use r14
5040 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
5041 pool for link-compatibility with older versions of GCC. Once
5042 "old" code has died out, we can return r14 to the allocation
5043 pool. */
5044 fixed_regs[14]
5045 = call_used_regs[14]
5046 = call_really_used_regs[14] = 1;
2aa4498c
AH
5047 }
5048
cacf1ca8 5049 if (!TARGET_ALTIVEC && !TARGET_VSX)
2aa4498c
AH
5050 {
5051 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
5052 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
5053 call_really_used_regs[VRSAVE_REGNO] = 1;
5054 }
5055
cacf1ca8 5056 if (TARGET_ALTIVEC || TARGET_VSX)
0db747be
DE
5057 global_regs[VSCR_REGNO] = 1;
5058
2aa4498c 5059 if (TARGET_ALTIVEC_ABI)
0db747be
DE
5060 {
5061 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
5062 call_used_regs[i] = call_really_used_regs[i] = 1;
5063
5064 /* AIX reserves VR20:31 in non-extended ABI mode. */
5065 if (TARGET_XCOFF)
5066 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
5067 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
5068 }
2aa4498c 5069}
fb4d4348 5070\f
a4f6c312
SS
5071/* Try to output insns to set TARGET equal to the constant C if it can
5072 be done in less than N insns. Do all computations in MODE.
5073 Returns the place where the output has been placed if it can be
5074 done and the insns have been emitted. If it would take more than N
5075 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
5076
5077rtx
f676971a 5078rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 5079 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 5080{
af8cb5c5 5081 rtx result, insn, set;
2bfcf297
DB
5082 HOST_WIDE_INT c0, c1;
5083
37409796 5084 switch (mode)
2bfcf297 5085 {
37409796
NS
5086 case QImode:
5087 case HImode:
2bfcf297 5088 if (dest == NULL)
c4ad648e 5089 dest = gen_reg_rtx (mode);
2bfcf297
DB
5090 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
5091 return dest;
bb8df8a6 5092
37409796 5093 case SImode:
b3a13419 5094 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 5095
d448860e 5096 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
5097 GEN_INT (INTVAL (source)
5098 & (~ (HOST_WIDE_INT) 0xffff))));
5099 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 5100 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
5101 GEN_INT (INTVAL (source) & 0xffff))));
5102 result = dest;
37409796
NS
5103 break;
5104
5105 case DImode:
5106 switch (GET_CODE (source))
af8cb5c5 5107 {
37409796 5108 case CONST_INT:
af8cb5c5
DE
5109 c0 = INTVAL (source);
5110 c1 = -(c0 < 0);
37409796 5111 break;
bb8df8a6 5112
37409796 5113 case CONST_DOUBLE:
2bfcf297 5114#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
5115 c0 = CONST_DOUBLE_LOW (source);
5116 c1 = -(c0 < 0);
2bfcf297 5117#else
af8cb5c5
DE
5118 c0 = CONST_DOUBLE_LOW (source);
5119 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 5120#endif
37409796
NS
5121 break;
5122
5123 default:
5124 gcc_unreachable ();
af8cb5c5 5125 }
af8cb5c5
DE
5126
5127 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
5128 break;
5129
5130 default:
5131 gcc_unreachable ();
2bfcf297 5132 }
2bfcf297 5133
af8cb5c5
DE
5134 insn = get_last_insn ();
5135 set = single_set (insn);
5136 if (! CONSTANT_P (SET_SRC (set)))
5137 set_unique_reg_note (insn, REG_EQUAL, source);
5138
5139 return result;
2bfcf297
DB
5140}
5141
5142/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
5143 fall back to a straight forward decomposition. We do this to avoid
5144 exponential run times encountered when looking for longer sequences
5145 with rs6000_emit_set_const. */
5146static rtx
a2369ed3 5147rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
5148{
5149 if (!TARGET_POWERPC64)
5150 {
5151 rtx operand1, operand2;
5152
5153 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
5154 DImode);
d448860e 5155 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
5156 DImode);
5157 emit_move_insn (operand1, GEN_INT (c1));
5158 emit_move_insn (operand2, GEN_INT (c2));
5159 }
5160 else
5161 {
bc06712d 5162 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 5163
bc06712d 5164 ud1 = c1 & 0xffff;
f921c9c9 5165 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 5166#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 5167 c2 = c1 >> 32;
2bfcf297 5168#endif
bc06712d 5169 ud3 = c2 & 0xffff;
f921c9c9 5170 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 5171
f676971a 5172 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 5173 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 5174 {
bc06712d 5175 if (ud1 & 0x8000)
b78d48dd 5176 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
5177 else
5178 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 5179 }
2bfcf297 5180
f676971a 5181 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 5182 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 5183 {
bc06712d 5184 if (ud2 & 0x8000)
f676971a 5185 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 5186 - 0x80000000));
252b88f7 5187 else
bc06712d
TR
5188 emit_move_insn (dest, GEN_INT (ud2 << 16));
5189 if (ud1 != 0)
d448860e
JH
5190 emit_move_insn (copy_rtx (dest),
5191 gen_rtx_IOR (DImode, copy_rtx (dest),
5192 GEN_INT (ud1)));
252b88f7 5193 }
f676971a 5194 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
5195 || (ud4 == 0 && ! (ud3 & 0x8000)))
5196 {
5197 if (ud3 & 0x8000)
f676971a 5198 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
5199 - 0x80000000));
5200 else
5201 emit_move_insn (dest, GEN_INT (ud3 << 16));
5202
5203 if (ud2 != 0)
d448860e
JH
5204 emit_move_insn (copy_rtx (dest),
5205 gen_rtx_IOR (DImode, copy_rtx (dest),
5206 GEN_INT (ud2)));
5207 emit_move_insn (copy_rtx (dest),
5208 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
5209 GEN_INT (16)));
bc06712d 5210 if (ud1 != 0)
d448860e
JH
5211 emit_move_insn (copy_rtx (dest),
5212 gen_rtx_IOR (DImode, copy_rtx (dest),
5213 GEN_INT (ud1)));
bc06712d 5214 }
f676971a 5215 else
bc06712d
TR
5216 {
5217 if (ud4 & 0x8000)
f676971a 5218 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
5219 - 0x80000000));
5220 else
5221 emit_move_insn (dest, GEN_INT (ud4 << 16));
5222
5223 if (ud3 != 0)
d448860e
JH
5224 emit_move_insn (copy_rtx (dest),
5225 gen_rtx_IOR (DImode, copy_rtx (dest),
5226 GEN_INT (ud3)));
2bfcf297 5227
d448860e
JH
5228 emit_move_insn (copy_rtx (dest),
5229 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
5230 GEN_INT (32)));
bc06712d 5231 if (ud2 != 0)
d448860e
JH
5232 emit_move_insn (copy_rtx (dest),
5233 gen_rtx_IOR (DImode, copy_rtx (dest),
5234 GEN_INT (ud2 << 16)));
bc06712d 5235 if (ud1 != 0)
d448860e
JH
5236 emit_move_insn (copy_rtx (dest),
5237 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
5238 }
5239 }
2bfcf297
DB
5240 return dest;
5241}
5242
76d2b81d 5243/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 5244 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
5245
5246static void
5247rs6000_eliminate_indexed_memrefs (rtx operands[2])
5248{
5249 if (GET_CODE (operands[0]) == MEM
5250 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 5251 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
5252 && ! reload_in_progress)
5253 operands[0]
5254 = replace_equiv_address (operands[0],
5255 copy_addr_to_reg (XEXP (operands[0], 0)));
5256
5257 if (GET_CODE (operands[1]) == MEM
5258 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 5259 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
5260 && ! reload_in_progress)
5261 operands[1]
5262 = replace_equiv_address (operands[1],
5263 copy_addr_to_reg (XEXP (operands[1], 0)));
5264}
5265
fb4d4348
GK
5266/* Emit a move from SOURCE to DEST in mode MODE. */
5267void
a2369ed3 5268rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
5269{
5270 rtx operands[2];
5271 operands[0] = dest;
5272 operands[1] = source;
f676971a 5273
fb4d4348
GK
5274 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
5275 if (GET_CODE (operands[1]) == CONST_DOUBLE
5276 && ! FLOAT_MODE_P (mode)
5277 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5278 {
5279 /* FIXME. This should never happen. */
5280 /* Since it seems that it does, do the safe thing and convert
5281 to a CONST_INT. */
2496c7bd 5282 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 5283 }
37409796
NS
5284 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
5285 || FLOAT_MODE_P (mode)
5286 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
5287 || CONST_DOUBLE_LOW (operands[1]) < 0)
5288 && (CONST_DOUBLE_HIGH (operands[1]) != -1
5289 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 5290
c9e8cb32
DD
5291 /* Check if GCC is setting up a block move that will end up using FP
5292 registers as temporaries. We must make sure this is acceptable. */
5293 if (GET_CODE (operands[0]) == MEM
5294 && GET_CODE (operands[1]) == MEM
5295 && mode == DImode
41543739
GK
5296 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
5297 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
5298 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
5299 ? 32 : MEM_ALIGN (operands[0])))
5300 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 5301 ? 32
41543739
GK
5302 : MEM_ALIGN (operands[1]))))
5303 && ! MEM_VOLATILE_P (operands [0])
5304 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 5305 {
41543739
GK
5306 emit_move_insn (adjust_address (operands[0], SImode, 0),
5307 adjust_address (operands[1], SImode, 0));
d448860e
JH
5308 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
5309 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
5310 return;
5311 }
630d42a0 5312
b3a13419 5313 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 5314 && !gpc_reg_operand (operands[1], mode))
f6219a5e 5315 operands[1] = force_reg (mode, operands[1]);
a9098fd0 5316
a3170dc6 5317 if (mode == SFmode && ! TARGET_POWERPC
696e45ba 5318 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
ffc14f31 5319 && GET_CODE (operands[0]) == MEM)
fb4d4348 5320 {
ffc14f31
GK
5321 int regnum;
5322
5323 if (reload_in_progress || reload_completed)
5324 regnum = true_regnum (operands[1]);
5325 else if (GET_CODE (operands[1]) == REG)
5326 regnum = REGNO (operands[1]);
5327 else
5328 regnum = -1;
f676971a 5329
fb4d4348
GK
5330 /* If operands[1] is a register, on POWER it may have
5331 double-precision data in it, so truncate it to single
5332 precision. */
5333 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
5334 {
5335 rtx newreg;
b3a13419 5336 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 5337 : gen_reg_rtx (mode));
fb4d4348
GK
5338 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
5339 operands[1] = newreg;
5340 }
5341 }
5342
c4501e62
JJ
5343 /* Recognize the case where operand[1] is a reference to thread-local
5344 data and load its address to a register. */
84f52ebd 5345 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 5346 {
84f52ebd
RH
5347 enum tls_model model;
5348 rtx tmp = operands[1];
5349 rtx addend = NULL;
5350
5351 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5352 {
5353 addend = XEXP (XEXP (tmp, 0), 1);
5354 tmp = XEXP (XEXP (tmp, 0), 0);
5355 }
5356
5357 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5358 model = SYMBOL_REF_TLS_MODEL (tmp);
5359 gcc_assert (model != 0);
5360
5361 tmp = rs6000_legitimize_tls_address (tmp, model);
5362 if (addend)
5363 {
5364 tmp = gen_rtx_PLUS (mode, tmp, addend);
5365 tmp = force_operand (tmp, operands[0]);
5366 }
5367 operands[1] = tmp;
c4501e62
JJ
5368 }
5369
8f4e6caf
RH
5370 /* Handle the case where reload calls us with an invalid address. */
5371 if (reload_in_progress && mode == Pmode
69ef87e2 5372 && (! general_operand (operands[1], mode)
8f4e6caf
RH
5373 || ! nonimmediate_operand (operands[0], mode)))
5374 goto emit_set;
5375
a9baceb1
GK
5376 /* 128-bit constant floating-point values on Darwin should really be
5377 loaded as two parts. */
8521c414 5378 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
5379 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
5380 {
5381 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
5382 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
5383 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
5384 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
5385 simplify_gen_subreg (imode, operands[1], mode, 0),
5386 imode);
5387 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
5388 GET_MODE_SIZE (imode)),
5389 simplify_gen_subreg (imode, operands[1], mode,
5390 GET_MODE_SIZE (imode)),
5391 imode);
a9baceb1
GK
5392 return;
5393 }
5394
e41b2a33
PB
5395 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
5396 cfun->machine->sdmode_stack_slot =
5397 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
5398
5399 if (reload_in_progress
5400 && mode == SDmode
5401 && MEM_P (operands[0])
5402 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
5403 && REG_P (operands[1]))
5404 {
5405 if (FP_REGNO_P (REGNO (operands[1])))
5406 {
5407 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
5408 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5409 emit_insn (gen_movsd_store (mem, operands[1]));
5410 }
5411 else if (INT_REGNO_P (REGNO (operands[1])))
5412 {
5413 rtx mem = adjust_address_nv (operands[0], mode, 4);
5414 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5415 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
5416 }
5417 else
5418 gcc_unreachable();
5419 return;
5420 }
5421 if (reload_in_progress
5422 && mode == SDmode
5423 && REG_P (operands[0])
5424 && MEM_P (operands[1])
5425 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
5426 {
5427 if (FP_REGNO_P (REGNO (operands[0])))
5428 {
5429 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
5430 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5431 emit_insn (gen_movsd_load (operands[0], mem));
5432 }
5433 else if (INT_REGNO_P (REGNO (operands[0])))
5434 {
5435 rtx mem = adjust_address_nv (operands[1], mode, 4);
5436 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5437 emit_insn (gen_movsd_hardfloat (operands[0], mem));
5438 }
5439 else
5440 gcc_unreachable();
5441 return;
5442 }
5443
fb4d4348
GK
5444 /* FIXME: In the long term, this switch statement should go away
5445 and be replaced by a sequence of tests based on things like
5446 mode == Pmode. */
5447 switch (mode)
5448 {
5449 case HImode:
5450 case QImode:
5451 if (CONSTANT_P (operands[1])
5452 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 5453 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
5454 break;
5455
06f4e019 5456 case TFmode:
7393f7f8 5457 case TDmode:
76d2b81d
DJ
5458 rs6000_eliminate_indexed_memrefs (operands);
5459 /* fall through */
5460
fb4d4348 5461 case DFmode:
7393f7f8 5462 case DDmode:
fb4d4348 5463 case SFmode:
e41b2a33 5464 case SDmode:
f676971a 5465 if (CONSTANT_P (operands[1])
fb4d4348 5466 && ! easy_fp_constant (operands[1], mode))
a9098fd0 5467 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5468 break;
f676971a 5469
0ac081f6
AH
5470 case V16QImode:
5471 case V8HImode:
5472 case V4SFmode:
5473 case V4SImode:
a3170dc6
AH
5474 case V4HImode:
5475 case V2SFmode:
5476 case V2SImode:
00a892b8 5477 case V1DImode:
69ef87e2 5478 if (CONSTANT_P (operands[1])
d744e06e 5479 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
5480 operands[1] = force_const_mem (mode, operands[1]);
5481 break;
f676971a 5482
fb4d4348 5483 case SImode:
a9098fd0 5484 case DImode:
fb4d4348
GK
5485 /* Use default pattern for address of ELF small data */
5486 if (TARGET_ELF
a9098fd0 5487 && mode == Pmode
f607bc57 5488 && DEFAULT_ABI == ABI_V4
f676971a 5489 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
5490 || GET_CODE (operands[1]) == CONST)
5491 && small_data_operand (operands[1], mode))
fb4d4348
GK
5492 {
5493 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5494 return;
5495 }
5496
f607bc57 5497 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
5498 && mode == Pmode && mode == SImode
5499 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
5500 {
5501 emit_insn (gen_movsi_got (operands[0], operands[1]));
5502 return;
5503 }
5504
ee890fe2 5505 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
5506 && TARGET_NO_TOC
5507 && ! flag_pic
a9098fd0 5508 && mode == Pmode
fb4d4348
GK
5509 && CONSTANT_P (operands[1])
5510 && GET_CODE (operands[1]) != HIGH
5511 && GET_CODE (operands[1]) != CONST_INT)
5512 {
b3a13419
ILT
5513 rtx target = (!can_create_pseudo_p ()
5514 ? operands[0]
5515 : gen_reg_rtx (mode));
fb4d4348
GK
5516
5517 /* If this is a function address on -mcall-aixdesc,
5518 convert it to the address of the descriptor. */
5519 if (DEFAULT_ABI == ABI_AIX
5520 && GET_CODE (operands[1]) == SYMBOL_REF
5521 && XSTR (operands[1], 0)[0] == '.')
5522 {
5523 const char *name = XSTR (operands[1], 0);
5524 rtx new_ref;
5525 while (*name == '.')
5526 name++;
5527 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5528 CONSTANT_POOL_ADDRESS_P (new_ref)
5529 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 5530 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 5531 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 5532 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
5533 operands[1] = new_ref;
5534 }
7509c759 5535
ee890fe2
SS
5536 if (DEFAULT_ABI == ABI_DARWIN)
5537 {
ab82a49f
AP
5538#if TARGET_MACHO
5539 if (MACHO_DYNAMIC_NO_PIC_P)
5540 {
5541 /* Take care of any required data indirection. */
5542 operands[1] = rs6000_machopic_legitimize_pic_address (
5543 operands[1], mode, operands[0]);
5544 if (operands[0] != operands[1])
5545 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 5546 operands[0], operands[1]));
ab82a49f
AP
5547 return;
5548 }
5549#endif
b8a55285
AP
5550 emit_insn (gen_macho_high (target, operands[1]));
5551 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5552 return;
5553 }
5554
fb4d4348
GK
5555 emit_insn (gen_elf_high (target, operands[1]));
5556 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5557 return;
5558 }
5559
a9098fd0
GK
5560 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5561 and we have put it in the TOC, we just need to make a TOC-relative
5562 reference to it. */
5563 if (TARGET_TOC
5564 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5565 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5566 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5567 get_pool_mode (operands[1])))
fb4d4348 5568 {
a9098fd0 5569 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5570 }
a9098fd0
GK
5571 else if (mode == Pmode
5572 && CONSTANT_P (operands[1])
38886f37
AO
5573 && ((GET_CODE (operands[1]) != CONST_INT
5574 && ! easy_fp_constant (operands[1], mode))
5575 || (GET_CODE (operands[1]) == CONST_INT
5576 && num_insns_constant (operands[1], mode) > 2)
5577 || (GET_CODE (operands[0]) == REG
5578 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5579 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5580 && ! legitimate_constant_pool_address_p (operands[1])
5581 && ! toc_relative_expr_p (operands[1]))
fb4d4348 5582 {
fb4d4348 5583
c859cda6 5584#if TARGET_MACHO
ee890fe2 5585 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5586 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5587 {
ee890fe2
SS
5588 operands[1] =
5589 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5590 operands[0]);
5591 if (operands[0] != operands[1])
5592 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5593 return;
5594 }
c859cda6 5595#endif
ee890fe2 5596
fb4d4348
GK
5597 /* If we are to limit the number of things we put in the TOC and
5598 this is a symbol plus a constant we can add in one insn,
5599 just put the symbol in the TOC and add the constant. Don't do
5600 this if reload is in progress. */
5601 if (GET_CODE (operands[1]) == CONST
5602 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5603 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5604 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5605 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5606 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5607 && ! side_effects_p (operands[0]))
5608 {
a4f6c312
SS
5609 rtx sym =
5610 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5611 rtx other = XEXP (XEXP (operands[1], 0), 1);
5612
a9098fd0
GK
5613 sym = force_reg (mode, sym);
5614 if (mode == SImode)
5615 emit_insn (gen_addsi3 (operands[0], sym, other));
5616 else
5617 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5618 return;
5619 }
5620
a9098fd0 5621 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5622
f676971a 5623 if (TARGET_TOC
0cdc04e8 5624 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5625 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5626 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5627 get_pool_constant (XEXP (operands[1], 0)),
5628 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5629 {
ba4828e0 5630 operands[1]
542a8afa 5631 = gen_const_mem (mode,
c4ad648e 5632 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5633 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5634 }
fb4d4348
GK
5635 }
5636 break;
a9098fd0 5637
fb4d4348 5638 case TImode:
76d2b81d
DJ
5639 rs6000_eliminate_indexed_memrefs (operands);
5640
27dc0551
DE
5641 if (TARGET_POWER)
5642 {
5643 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5644 gen_rtvec (2,
5645 gen_rtx_SET (VOIDmode,
5646 operands[0], operands[1]),
5647 gen_rtx_CLOBBER (VOIDmode,
5648 gen_rtx_SCRATCH (SImode)))));
5649 return;
5650 }
fb4d4348
GK
5651 break;
5652
5653 default:
37409796 5654 gcc_unreachable ();
fb4d4348
GK
5655 }
5656
a9098fd0
GK
5657 /* Above, we may have called force_const_mem which may have returned
5658 an invalid address. If we can, fix this up; otherwise, reload will
5659 have to deal with it. */
8f4e6caf
RH
5660 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5661 operands[1] = validize_mem (operands[1]);
a9098fd0 5662
8f4e6caf 5663 emit_set:
fb4d4348
GK
5664 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5665}
4697a36c 5666\f
2858f73a
GK
5667/* Nonzero if we can use a floating-point register to pass this arg. */
5668#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5669 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a 5670 && (CUM)->fregno <= FP_ARG_MAX_REG \
56f4cc04 5671 && TARGET_HARD_FLOAT && TARGET_FPRS)
2858f73a
GK
5672
5673/* Nonzero if we can use an AltiVec register to pass this arg. */
cacf1ca8
MM
5674#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5675 ((ALTIVEC_VECTOR_MODE (MODE) || VSX_VECTOR_MODE (MODE)) \
5676 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5677 && TARGET_ALTIVEC_ABI \
83953138 5678 && (NAMED))
2858f73a 5679
c6e8c921
GK
5680/* Return a nonzero value to say to return the function value in
5681 memory, just as large structures are always returned. TYPE will be
5682 the data type of the value, and FNTYPE will be the type of the
5683 function doing the returning, or @code{NULL} for libcalls.
5684
5685 The AIX ABI for the RS/6000 specifies that all structures are
5686 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5687 specifies that structures <= 8 bytes are returned in r3/r4, but a
5688 draft put them in memory, and GCC used to implement the draft
df01da37 5689 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5690 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5691 compatibility can change DRAFT_V4_STRUCT_RET to override the
5692 default, and -m switches get the final word. See
5693 rs6000_override_options for more details.
5694
5695 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5696 long double support is enabled. These values are returned in memory.
5697
5698 int_size_in_bytes returns -1 for variable size objects, which go in
5699 memory always. The cast to unsigned makes -1 > 8. */
5700
5701static bool
586de218 5702rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5703{
594a51fe
SS
5704 /* In the darwin64 abi, try to use registers for larger structs
5705 if possible. */
0b5383eb 5706 if (rs6000_darwin64_abi
594a51fe 5707 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5708 && int_size_in_bytes (type) > 0)
5709 {
5710 CUMULATIVE_ARGS valcum;
5711 rtx valret;
5712
5713 valcum.words = 0;
5714 valcum.fregno = FP_ARG_MIN_REG;
5715 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5716 /* Do a trial code generation as if this were going to be passed
5717 as an argument; if any part goes in memory, we return NULL. */
5718 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5719 if (valret)
5720 return false;
5721 /* Otherwise fall through to more conventional ABI rules. */
5722 }
594a51fe 5723
c6e8c921 5724 if (AGGREGATE_TYPE_P (type)
df01da37 5725 && (aix_struct_return
c6e8c921
GK
5726 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5727 return true;
b693336b 5728
bada2eb8
DE
5729 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5730 modes only exist for GCC vector types if -maltivec. */
5731 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5732 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5733 return false;
5734
b693336b
PB
5735 /* Return synthetic vectors in memory. */
5736 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5737 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5738 {
5739 static bool warned_for_return_big_vectors = false;
5740 if (!warned_for_return_big_vectors)
5741 {
d4ee4d25 5742 warning (0, "GCC vector returned by reference: "
b693336b
PB
5743 "non-standard ABI extension with no compatibility guarantee");
5744 warned_for_return_big_vectors = true;
5745 }
5746 return true;
5747 }
5748
602ea4d3 5749 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5750 return true;
ad630bef 5751
c6e8c921
GK
5752 return false;
5753}
5754
4697a36c
MM
5755/* Initialize a variable CUM of type CUMULATIVE_ARGS
5756 for a call to a function whose data type is FNTYPE.
5757 For a library call, FNTYPE is 0.
5758
5759 For incoming args we set the number of arguments in the prototype large
1c20ae99 5760 so we never return a PARALLEL. */
4697a36c
MM
5761
5762void
f676971a 5763init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5764 rtx libname ATTRIBUTE_UNUSED, int incoming,
5765 int libcall, int n_named_args)
4697a36c
MM
5766{
5767 static CUMULATIVE_ARGS zero_cumulative;
5768
5769 *cum = zero_cumulative;
5770 cum->words = 0;
5771 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5772 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5773 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5774 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5775 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5776 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5777 cum->stdarg = fntype
5778 && (TYPE_ARG_TYPES (fntype) != 0
5779 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5780 != void_type_node));
4697a36c 5781
0f6937fe
AM
5782 cum->nargs_prototype = 0;
5783 if (incoming || cum->prototype)
5784 cum->nargs_prototype = n_named_args;
4697a36c 5785
a5c76ee6 5786 /* Check for a longcall attribute. */
3eb4e360
AM
5787 if ((!fntype && rs6000_default_long_calls)
5788 || (fntype
5789 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5790 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5791 cum->call_cookie |= CALL_LONG;
6a4cee5f 5792
4697a36c
MM
5793 if (TARGET_DEBUG_ARG)
5794 {
5795 fprintf (stderr, "\ninit_cumulative_args:");
5796 if (fntype)
5797 {
5798 tree ret_type = TREE_TYPE (fntype);
5799 fprintf (stderr, " ret code = %s,",
5800 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5801 }
5802
6a4cee5f
MM
5803 if (cum->call_cookie & CALL_LONG)
5804 fprintf (stderr, " longcall,");
5805
4697a36c
MM
5806 fprintf (stderr, " proto = %d, nargs = %d\n",
5807 cum->prototype, cum->nargs_prototype);
5808 }
f676971a 5809
c4ad648e
AM
5810 if (fntype
5811 && !TARGET_ALTIVEC
5812 && TARGET_ALTIVEC_ABI
5813 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5814 {
c85ce869 5815 error ("cannot return value in vector register because"
c4ad648e 5816 " altivec instructions are disabled, use -maltivec"
c85ce869 5817 " to enable them");
c4ad648e 5818 }
4697a36c
MM
5819}
5820\f
fe984136
RH
5821/* Return true if TYPE must be passed on the stack and not in registers. */
5822
5823static bool
586de218 5824rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5825{
5826 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5827 return must_pass_in_stack_var_size (mode, type);
5828 else
5829 return must_pass_in_stack_var_size_or_pad (mode, type);
5830}
5831
c229cba9
DE
5832/* If defined, a C expression which determines whether, and in which
5833 direction, to pad out an argument with extra space. The value
5834 should be of type `enum direction': either `upward' to pad above
5835 the argument, `downward' to pad below, or `none' to inhibit
5836 padding.
5837
5838 For the AIX ABI structs are always stored left shifted in their
5839 argument slot. */
5840
9ebbca7d 5841enum direction
586de218 5842function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5843{
6e985040
AM
5844#ifndef AGGREGATE_PADDING_FIXED
5845#define AGGREGATE_PADDING_FIXED 0
5846#endif
5847#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5848#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5849#endif
5850
5851 if (!AGGREGATE_PADDING_FIXED)
5852 {
5853 /* GCC used to pass structures of the same size as integer types as
5854 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5855 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5856 passed padded downward, except that -mstrict-align further
5857 muddied the water in that multi-component structures of 2 and 4
5858 bytes in size were passed padded upward.
5859
5860 The following arranges for best compatibility with previous
5861 versions of gcc, but removes the -mstrict-align dependency. */
5862 if (BYTES_BIG_ENDIAN)
5863 {
5864 HOST_WIDE_INT size = 0;
5865
5866 if (mode == BLKmode)
5867 {
5868 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5869 size = int_size_in_bytes (type);
5870 }
5871 else
5872 size = GET_MODE_SIZE (mode);
5873
5874 if (size == 1 || size == 2 || size == 4)
5875 return downward;
5876 }
5877 return upward;
5878 }
5879
5880 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5881 {
5882 if (type != 0 && AGGREGATE_TYPE_P (type))
5883 return upward;
5884 }
c229cba9 5885
d3704c46
KH
5886 /* Fall back to the default. */
5887 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5888}
5889
b6c9286a 5890/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5891 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5892 PARM_BOUNDARY is used for all arguments.
f676971a 5893
84e9ad15
AM
5894 V.4 wants long longs and doubles to be double word aligned. Just
5895 testing the mode size is a boneheaded way to do this as it means
5896 that other types such as complex int are also double word aligned.
5897 However, we're stuck with this because changing the ABI might break
5898 existing library interfaces.
5899
b693336b
PB
5900 Doubleword align SPE vectors.
5901 Quadword align Altivec vectors.
5902 Quadword align large synthetic vector types. */
b6c9286a
MM
5903
5904int
b693336b 5905function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5906{
84e9ad15
AM
5907 if (DEFAULT_ABI == ABI_V4
5908 && (GET_MODE_SIZE (mode) == 8
5909 || (TARGET_HARD_FLOAT
5910 && TARGET_FPRS
7393f7f8 5911 && (mode == TFmode || mode == TDmode))))
4ed78545 5912 return 64;
ad630bef
DE
5913 else if (SPE_VECTOR_MODE (mode)
5914 || (type && TREE_CODE (type) == VECTOR_TYPE
5915 && int_size_in_bytes (type) >= 8
5916 && int_size_in_bytes (type) < 16))
e1f83b4d 5917 return 64;
cacf1ca8 5918 else if ((ALTIVEC_VECTOR_MODE (mode) || VSX_VECTOR_MODE (mode))
ad630bef
DE
5919 || (type && TREE_CODE (type) == VECTOR_TYPE
5920 && int_size_in_bytes (type) >= 16))
0ac081f6 5921 return 128;
0b5383eb
DJ
5922 else if (rs6000_darwin64_abi && mode == BLKmode
5923 && type && TYPE_ALIGN (type) > 64)
5924 return 128;
9ebbca7d 5925 else
b6c9286a 5926 return PARM_BOUNDARY;
b6c9286a 5927}
c53bdcf5 5928
294bd182
AM
5929/* For a function parm of MODE and TYPE, return the starting word in
5930 the parameter area. NWORDS of the parameter area are already used. */
5931
5932static unsigned int
5933rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5934{
5935 unsigned int align;
5936 unsigned int parm_offset;
5937
5938 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5939 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5940 return nwords + (-(parm_offset + nwords) & align);
5941}
5942
c53bdcf5
AM
5943/* Compute the size (in words) of a function argument. */
5944
5945static unsigned long
5946rs6000_arg_size (enum machine_mode mode, tree type)
5947{
5948 unsigned long size;
5949
5950 if (mode != BLKmode)
5951 size = GET_MODE_SIZE (mode);
5952 else
5953 size = int_size_in_bytes (type);
5954
5955 if (TARGET_32BIT)
5956 return (size + 3) >> 2;
5957 else
5958 return (size + 7) >> 3;
5959}
b6c9286a 5960\f
0b5383eb 5961/* Use this to flush pending int fields. */
594a51fe
SS
5962
5963static void
0b5383eb
DJ
5964rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5965 HOST_WIDE_INT bitpos)
594a51fe 5966{
0b5383eb
DJ
5967 unsigned int startbit, endbit;
5968 int intregs, intoffset;
5969 enum machine_mode mode;
594a51fe 5970
0b5383eb
DJ
5971 if (cum->intoffset == -1)
5972 return;
594a51fe 5973
0b5383eb
DJ
5974 intoffset = cum->intoffset;
5975 cum->intoffset = -1;
5976
5977 if (intoffset % BITS_PER_WORD != 0)
5978 {
5979 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5980 MODE_INT, 0);
5981 if (mode == BLKmode)
594a51fe 5982 {
0b5383eb
DJ
5983 /* We couldn't find an appropriate mode, which happens,
5984 e.g., in packed structs when there are 3 bytes to load.
5985 Back intoffset back to the beginning of the word in this
5986 case. */
5987 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5988 }
594a51fe 5989 }
0b5383eb
DJ
5990
5991 startbit = intoffset & -BITS_PER_WORD;
5992 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5993 intregs = (endbit - startbit) / BITS_PER_WORD;
5994 cum->words += intregs;
5995}
5996
5997/* The darwin64 ABI calls for us to recurse down through structs,
5998 looking for elements passed in registers. Unfortunately, we have
5999 to track int register count here also because of misalignments
6000 in powerpc alignment mode. */
6001
6002static void
6003rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
6004 tree type,
6005 HOST_WIDE_INT startbitpos)
6006{
6007 tree f;
6008
6009 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6010 if (TREE_CODE (f) == FIELD_DECL)
6011 {
6012 HOST_WIDE_INT bitpos = startbitpos;
6013 tree ftype = TREE_TYPE (f);
70fb00df
AP
6014 enum machine_mode mode;
6015 if (ftype == error_mark_node)
6016 continue;
6017 mode = TYPE_MODE (ftype);
0b5383eb
DJ
6018
6019 if (DECL_SIZE (f) != 0
6020 && host_integerp (bit_position (f), 1))
6021 bitpos += int_bit_position (f);
6022
6023 /* ??? FIXME: else assume zero offset. */
6024
6025 if (TREE_CODE (ftype) == RECORD_TYPE)
6026 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
6027 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
6028 {
6029 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
6030 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
6031 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
6032 }
6033 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
6034 {
6035 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
6036 cum->vregno++;
6037 cum->words += 2;
6038 }
6039 else if (cum->intoffset == -1)
6040 cum->intoffset = bitpos;
6041 }
594a51fe
SS
6042}
6043
4697a36c
MM
6044/* Update the data in CUM to advance over an argument
6045 of mode MODE and data type TYPE.
b2d04ecf
AM
6046 (TYPE is null for libcalls where that information may not be available.)
6047
6048 Note that for args passed by reference, function_arg will be called
6049 with MODE and TYPE set to that of the pointer to the arg, not the arg
6050 itself. */
4697a36c
MM
6051
6052void
f676971a 6053function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 6054 tree type, int named, int depth)
4697a36c 6055{
0b5383eb
DJ
6056 int size;
6057
594a51fe
SS
6058 /* Only tick off an argument if we're not recursing. */
6059 if (depth == 0)
6060 cum->nargs_prototype--;
4697a36c 6061
ad630bef
DE
6062 if (TARGET_ALTIVEC_ABI
6063 && (ALTIVEC_VECTOR_MODE (mode)
cacf1ca8 6064 || VSX_VECTOR_MODE (mode)
ad630bef
DE
6065 || (type && TREE_CODE (type) == VECTOR_TYPE
6066 && int_size_in_bytes (type) == 16)))
0ac081f6 6067 {
4ed78545
AM
6068 bool stack = false;
6069
2858f73a 6070 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 6071 {
6d0ef01e
HP
6072 cum->vregno++;
6073 if (!TARGET_ALTIVEC)
c85ce869 6074 error ("cannot pass argument in vector register because"
6d0ef01e 6075 " altivec instructions are disabled, use -maltivec"
c85ce869 6076 " to enable them");
4ed78545
AM
6077
6078 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 6079 even if it is going to be passed in a vector register.
4ed78545
AM
6080 Darwin does the same for variable-argument functions. */
6081 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
6082 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
6083 stack = true;
6d0ef01e 6084 }
4ed78545
AM
6085 else
6086 stack = true;
6087
6088 if (stack)
c4ad648e 6089 {
a594a19c 6090 int align;
f676971a 6091
2858f73a
GK
6092 /* Vector parameters must be 16-byte aligned. This places
6093 them at 2 mod 4 in terms of words in 32-bit mode, since
6094 the parameter save area starts at offset 24 from the
6095 stack. In 64-bit mode, they just have to start on an
6096 even word, since the parameter save area is 16-byte
6097 aligned. Space for GPRs is reserved even if the argument
6098 will be passed in memory. */
6099 if (TARGET_32BIT)
4ed78545 6100 align = (2 - cum->words) & 3;
2858f73a
GK
6101 else
6102 align = cum->words & 1;
c53bdcf5 6103 cum->words += align + rs6000_arg_size (mode, type);
f676971a 6104
a594a19c
GK
6105 if (TARGET_DEBUG_ARG)
6106 {
f676971a 6107 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
6108 cum->words, align);
6109 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 6110 cum->nargs_prototype, cum->prototype,
2858f73a 6111 GET_MODE_NAME (mode));
a594a19c
GK
6112 }
6113 }
0ac081f6 6114 }
a4b0320c 6115 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
6116 && !cum->stdarg
6117 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 6118 cum->sysv_gregno++;
594a51fe
SS
6119
6120 else if (rs6000_darwin64_abi
6121 && mode == BLKmode
0b5383eb
DJ
6122 && TREE_CODE (type) == RECORD_TYPE
6123 && (size = int_size_in_bytes (type)) > 0)
6124 {
6125 /* Variable sized types have size == -1 and are
6126 treated as if consisting entirely of ints.
6127 Pad to 16 byte boundary if needed. */
6128 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6129 && (cum->words % 2) != 0)
6130 cum->words++;
6131 /* For varargs, we can just go up by the size of the struct. */
6132 if (!named)
6133 cum->words += (size + 7) / 8;
6134 else
6135 {
6136 /* It is tempting to say int register count just goes up by
6137 sizeof(type)/8, but this is wrong in a case such as
6138 { int; double; int; } [powerpc alignment]. We have to
6139 grovel through the fields for these too. */
6140 cum->intoffset = 0;
6141 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 6142 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
6143 size * BITS_PER_UNIT);
6144 }
6145 }
f607bc57 6146 else if (DEFAULT_ABI == ABI_V4)
4697a36c 6147 {
a3170dc6 6148 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
6149 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6150 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
6151 || (mode == TFmode && !TARGET_IEEEQUAD)
6152 || mode == SDmode || mode == DDmode || mode == TDmode))
4697a36c 6153 {
2d83f070
JJ
6154 /* _Decimal128 must use an even/odd register pair. This assumes
6155 that the register number is odd when fregno is odd. */
6156 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6157 cum->fregno++;
6158
6159 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6160 <= FP_ARG_V4_MAX_REG)
602ea4d3 6161 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
6162 else
6163 {
602ea4d3 6164 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
6165 if (mode == DFmode || mode == TFmode
6166 || mode == DDmode || mode == TDmode)
c4ad648e 6167 cum->words += cum->words & 1;
c53bdcf5 6168 cum->words += rs6000_arg_size (mode, type);
4cc833b7 6169 }
4697a36c 6170 }
4cc833b7
RH
6171 else
6172 {
b2d04ecf 6173 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6174 int gregno = cum->sysv_gregno;
6175
4ed78545
AM
6176 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6177 (r7,r8) or (r9,r10). As does any other 2 word item such
6178 as complex int due to a historical mistake. */
6179 if (n_words == 2)
6180 gregno += (1 - gregno) & 1;
4cc833b7 6181
4ed78545 6182 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
6183 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
6184 {
4ed78545
AM
6185 /* Long long and SPE vectors are aligned on the stack.
6186 So are other 2 word items such as complex int due to
6187 a historical mistake. */
4cc833b7
RH
6188 if (n_words == 2)
6189 cum->words += cum->words & 1;
6190 cum->words += n_words;
6191 }
4697a36c 6192
4cc833b7
RH
6193 /* Note: continuing to accumulate gregno past when we've started
6194 spilling to the stack indicates the fact that we've started
6195 spilling to the stack to expand_builtin_saveregs. */
6196 cum->sysv_gregno = gregno + n_words;
6197 }
4697a36c 6198
4cc833b7
RH
6199 if (TARGET_DEBUG_ARG)
6200 {
6201 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
6202 cum->words, cum->fregno);
6203 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
6204 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
6205 fprintf (stderr, "mode = %4s, named = %d\n",
6206 GET_MODE_NAME (mode), named);
6207 }
4697a36c
MM
6208 }
6209 else
4cc833b7 6210 {
b2d04ecf 6211 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
6212 int start_words = cum->words;
6213 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 6214
294bd182 6215 cum->words = align_words + n_words;
4697a36c 6216
ebb109ad 6217 if (SCALAR_FLOAT_MODE_P (mode)
56f4cc04 6218 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
6219 {
6220 /* _Decimal128 must be passed in an even/odd float register pair.
6221 This assumes that the register number is odd when fregno is
6222 odd. */
6223 if (mode == TDmode && (cum->fregno % 2) == 1)
6224 cum->fregno++;
6225 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
6226 }
4cc833b7
RH
6227
6228 if (TARGET_DEBUG_ARG)
6229 {
6230 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
6231 cum->words, cum->fregno);
6232 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
6233 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 6234 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 6235 named, align_words - start_words, depth);
4cc833b7
RH
6236 }
6237 }
4697a36c 6238}
a6c9bed4 6239
f82f556d
AH
6240static rtx
6241spe_build_register_parallel (enum machine_mode mode, int gregno)
6242{
17caeff2 6243 rtx r1, r3, r5, r7;
f82f556d 6244
37409796 6245 switch (mode)
f82f556d 6246 {
37409796 6247 case DFmode:
54b695e7
AH
6248 r1 = gen_rtx_REG (DImode, gregno);
6249 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
6250 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
6251
6252 case DCmode:
17caeff2 6253 case TFmode:
54b695e7
AH
6254 r1 = gen_rtx_REG (DImode, gregno);
6255 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
6256 r3 = gen_rtx_REG (DImode, gregno + 2);
6257 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
6258 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 6259
17caeff2
JM
6260 case TCmode:
6261 r1 = gen_rtx_REG (DImode, gregno);
6262 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
6263 r3 = gen_rtx_REG (DImode, gregno + 2);
6264 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
6265 r5 = gen_rtx_REG (DImode, gregno + 4);
6266 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
6267 r7 = gen_rtx_REG (DImode, gregno + 6);
6268 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
6269 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
6270
37409796
NS
6271 default:
6272 gcc_unreachable ();
f82f556d 6273 }
f82f556d 6274}
b78d48dd 6275
f82f556d 6276/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 6277static rtx
f676971a 6278rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6279 tree type)
a6c9bed4 6280{
f82f556d
AH
6281 int gregno = cum->sysv_gregno;
6282
6283 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 6284 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 6285 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 6286 || mode == DCmode || mode == TCmode))
f82f556d 6287 {
b5870bee
AH
6288 int n_words = rs6000_arg_size (mode, type);
6289
f82f556d 6290 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 6291 if (mode == DFmode)
b5870bee 6292 gregno += (1 - gregno) & 1;
f82f556d 6293
b5870bee
AH
6294 /* Multi-reg args are not split between registers and stack. */
6295 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
6296 return NULL_RTX;
6297
6298 return spe_build_register_parallel (mode, gregno);
6299 }
a6c9bed4
AH
6300 if (cum->stdarg)
6301 {
c53bdcf5 6302 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
6303
6304 /* SPE vectors are put in odd registers. */
6305 if (n_words == 2 && (gregno & 1) == 0)
6306 gregno += 1;
6307
6308 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
6309 {
6310 rtx r1, r2;
6311 enum machine_mode m = SImode;
6312
6313 r1 = gen_rtx_REG (m, gregno);
6314 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
6315 r2 = gen_rtx_REG (m, gregno + 1);
6316 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
6317 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
6318 }
6319 else
b78d48dd 6320 return NULL_RTX;
a6c9bed4
AH
6321 }
6322 else
6323 {
f82f556d
AH
6324 if (gregno <= GP_ARG_MAX_REG)
6325 return gen_rtx_REG (mode, gregno);
a6c9bed4 6326 else
b78d48dd 6327 return NULL_RTX;
a6c9bed4
AH
6328 }
6329}
6330
0b5383eb
DJ
6331/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
6332 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 6333
0b5383eb 6334static void
bb8df8a6 6335rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 6336 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 6337{
0b5383eb
DJ
6338 enum machine_mode mode;
6339 unsigned int regno;
6340 unsigned int startbit, endbit;
6341 int this_regno, intregs, intoffset;
6342 rtx reg;
594a51fe 6343
0b5383eb
DJ
6344 if (cum->intoffset == -1)
6345 return;
6346
6347 intoffset = cum->intoffset;
6348 cum->intoffset = -1;
6349
6350 /* If this is the trailing part of a word, try to only load that
6351 much into the register. Otherwise load the whole register. Note
6352 that in the latter case we may pick up unwanted bits. It's not a
6353 problem at the moment but may wish to revisit. */
6354
6355 if (intoffset % BITS_PER_WORD != 0)
594a51fe 6356 {
0b5383eb
DJ
6357 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
6358 MODE_INT, 0);
6359 if (mode == BLKmode)
6360 {
6361 /* We couldn't find an appropriate mode, which happens,
6362 e.g., in packed structs when there are 3 bytes to load.
6363 Back intoffset back to the beginning of the word in this
6364 case. */
6365 intoffset = intoffset & -BITS_PER_WORD;
6366 mode = word_mode;
6367 }
6368 }
6369 else
6370 mode = word_mode;
6371
6372 startbit = intoffset & -BITS_PER_WORD;
6373 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
6374 intregs = (endbit - startbit) / BITS_PER_WORD;
6375 this_regno = cum->words + intoffset / BITS_PER_WORD;
6376
6377 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
6378 cum->use_stack = 1;
bb8df8a6 6379
0b5383eb
DJ
6380 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
6381 if (intregs <= 0)
6382 return;
6383
6384 intoffset /= BITS_PER_UNIT;
6385 do
6386 {
6387 regno = GP_ARG_MIN_REG + this_regno;
6388 reg = gen_rtx_REG (mode, regno);
6389 rvec[(*k)++] =
6390 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
6391
6392 this_regno += 1;
6393 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
6394 mode = word_mode;
6395 intregs -= 1;
6396 }
6397 while (intregs > 0);
6398}
6399
6400/* Recursive workhorse for the following. */
6401
6402static void
586de218 6403rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
6404 HOST_WIDE_INT startbitpos, rtx rvec[],
6405 int *k)
6406{
6407 tree f;
6408
6409 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6410 if (TREE_CODE (f) == FIELD_DECL)
6411 {
6412 HOST_WIDE_INT bitpos = startbitpos;
6413 tree ftype = TREE_TYPE (f);
70fb00df
AP
6414 enum machine_mode mode;
6415 if (ftype == error_mark_node)
6416 continue;
6417 mode = TYPE_MODE (ftype);
0b5383eb
DJ
6418
6419 if (DECL_SIZE (f) != 0
6420 && host_integerp (bit_position (f), 1))
6421 bitpos += int_bit_position (f);
6422
6423 /* ??? FIXME: else assume zero offset. */
6424
6425 if (TREE_CODE (ftype) == RECORD_TYPE)
6426 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
6427 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 6428 {
0b5383eb
DJ
6429#if 0
6430 switch (mode)
594a51fe 6431 {
0b5383eb
DJ
6432 case SCmode: mode = SFmode; break;
6433 case DCmode: mode = DFmode; break;
6434 case TCmode: mode = TFmode; break;
6435 default: break;
594a51fe 6436 }
0b5383eb
DJ
6437#endif
6438 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6439 rvec[(*k)++]
bb8df8a6 6440 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
6441 gen_rtx_REG (mode, cum->fregno++),
6442 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 6443 if (mode == TFmode || mode == TDmode)
0b5383eb 6444 cum->fregno++;
594a51fe 6445 }
0b5383eb
DJ
6446 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
6447 {
6448 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6449 rvec[(*k)++]
bb8df8a6
EC
6450 = gen_rtx_EXPR_LIST (VOIDmode,
6451 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
6452 GEN_INT (bitpos / BITS_PER_UNIT));
6453 }
6454 else if (cum->intoffset == -1)
6455 cum->intoffset = bitpos;
6456 }
6457}
594a51fe 6458
0b5383eb
DJ
6459/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
6460 the register(s) to be used for each field and subfield of a struct
6461 being passed by value, along with the offset of where the
6462 register's value may be found in the block. FP fields go in FP
6463 register, vector fields go in vector registers, and everything
bb8df8a6 6464 else goes in int registers, packed as in memory.
8ff40a74 6465
0b5383eb
DJ
6466 This code is also used for function return values. RETVAL indicates
6467 whether this is the case.
8ff40a74 6468
a4d05547 6469 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 6470 calling convention. */
594a51fe 6471
0b5383eb 6472static rtx
586de218 6473rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
6474 int named, bool retval)
6475{
6476 rtx rvec[FIRST_PSEUDO_REGISTER];
6477 int k = 1, kbase = 1;
6478 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6479 /* This is a copy; modifications are not visible to our caller. */
6480 CUMULATIVE_ARGS copy_cum = *orig_cum;
6481 CUMULATIVE_ARGS *cum = &copy_cum;
6482
6483 /* Pad to 16 byte boundary if needed. */
6484 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6485 && (cum->words % 2) != 0)
6486 cum->words++;
6487
6488 cum->intoffset = 0;
6489 cum->use_stack = 0;
6490 cum->named = named;
6491
6492 /* Put entries into rvec[] for individual FP and vector fields, and
6493 for the chunks of memory that go in int regs. Note we start at
6494 element 1; 0 is reserved for an indication of using memory, and
6495 may or may not be filled in below. */
6496 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6497 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6498
6499 /* If any part of the struct went on the stack put all of it there.
6500 This hack is because the generic code for
6501 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6502 parts of the struct are not at the beginning. */
6503 if (cum->use_stack)
6504 {
6505 if (retval)
6506 return NULL_RTX; /* doesn't go in registers at all */
6507 kbase = 0;
6508 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6509 }
6510 if (k > 1 || cum->use_stack)
6511 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
6512 else
6513 return NULL_RTX;
6514}
6515
b78d48dd
FJ
6516/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6517
6518static rtx
ec6376ab 6519rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 6520{
ec6376ab
AM
6521 int n_units;
6522 int i, k;
6523 rtx rvec[GP_ARG_NUM_REG + 1];
6524
6525 if (align_words >= GP_ARG_NUM_REG)
6526 return NULL_RTX;
6527
6528 n_units = rs6000_arg_size (mode, type);
6529
6530 /* Optimize the simple case where the arg fits in one gpr, except in
6531 the case of BLKmode due to assign_parms assuming that registers are
6532 BITS_PER_WORD wide. */
6533 if (n_units == 0
6534 || (n_units == 1 && mode != BLKmode))
6535 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6536
6537 k = 0;
6538 if (align_words + n_units > GP_ARG_NUM_REG)
6539 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6540 using a magic NULL_RTX component.
79773478
AM
6541 This is not strictly correct. Only some of the arg belongs in
6542 memory, not all of it. However, the normal scheme using
6543 function_arg_partial_nregs can result in unusual subregs, eg.
6544 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6545 store the whole arg to memory is often more efficient than code
6546 to store pieces, and we know that space is available in the right
6547 place for the whole arg. */
ec6376ab
AM
6548 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6549
6550 i = 0;
6551 do
36a454e1 6552 {
ec6376ab
AM
6553 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6554 rtx off = GEN_INT (i++ * 4);
6555 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6556 }
ec6376ab
AM
6557 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6558
6559 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6560}
6561
4697a36c
MM
6562/* Determine where to put an argument to a function.
6563 Value is zero to push the argument on the stack,
6564 or a hard register in which to store the argument.
6565
6566 MODE is the argument's machine mode.
6567 TYPE is the data type of the argument (as a tree).
6568 This is null for libcalls where that information may
6569 not be available.
6570 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6571 the preceding args and about the function being called. It is
6572 not modified in this routine.
4697a36c
MM
6573 NAMED is nonzero if this argument is a named parameter
6574 (otherwise it is an extra parameter matching an ellipsis).
6575
6576 On RS/6000 the first eight words of non-FP are normally in registers
6577 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6578 Under V.4, the first 8 FP args are in registers.
6579
6580 If this is floating-point and no prototype is specified, we use
6581 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6582 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6583 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6584 doesn't support PARALLEL anyway.
6585
6586 Note that for args passed by reference, function_arg will be called
6587 with MODE and TYPE set to that of the pointer to the arg, not the arg
6588 itself. */
4697a36c 6589
9390387d 6590rtx
f676971a 6591function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6592 tree type, int named)
4697a36c 6593{
4cc833b7 6594 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6595
a4f6c312
SS
6596 /* Return a marker to indicate whether CR1 needs to set or clear the
6597 bit that V.4 uses to say fp args were passed in registers.
6598 Assume that we don't need the marker for software floating point,
6599 or compiler generated library calls. */
4697a36c
MM
6600 if (mode == VOIDmode)
6601 {
f607bc57 6602 if (abi == ABI_V4
b9599e46 6603 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6604 && (cum->stdarg
6605 || (cum->nargs_prototype < 0
6606 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6607 {
a3170dc6
AH
6608 /* For the SPE, we need to crxor CR6 always. */
6609 if (TARGET_SPE_ABI)
6610 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6611 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6612 return GEN_INT (cum->call_cookie
6613 | ((cum->fregno == FP_ARG_MIN_REG)
6614 ? CALL_V4_SET_FP_ARGS
6615 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6616 }
4697a36c 6617
7509c759 6618 return GEN_INT (cum->call_cookie);
4697a36c
MM
6619 }
6620
0b5383eb
DJ
6621 if (rs6000_darwin64_abi && mode == BLKmode
6622 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6623 {
0b5383eb 6624 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6625 if (rslt != NULL_RTX)
6626 return rslt;
6627 /* Else fall through to usual handling. */
6628 }
6629
2858f73a 6630 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6631 if (TARGET_64BIT && ! cum->prototype)
6632 {
c4ad648e
AM
6633 /* Vector parameters get passed in vector register
6634 and also in GPRs or memory, in absence of prototype. */
6635 int align_words;
6636 rtx slot;
6637 align_words = (cum->words + 1) & ~1;
6638
6639 if (align_words >= GP_ARG_NUM_REG)
6640 {
6641 slot = NULL_RTX;
6642 }
6643 else
6644 {
6645 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6646 }
6647 return gen_rtx_PARALLEL (mode,
6648 gen_rtvec (2,
6649 gen_rtx_EXPR_LIST (VOIDmode,
6650 slot, const0_rtx),
6651 gen_rtx_EXPR_LIST (VOIDmode,
6652 gen_rtx_REG (mode, cum->vregno),
6653 const0_rtx)));
c72d6c26
HP
6654 }
6655 else
6656 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6657 else if (TARGET_ALTIVEC_ABI
6658 && (ALTIVEC_VECTOR_MODE (mode)
cacf1ca8 6659 || VSX_VECTOR_MODE (mode)
ad630bef
DE
6660 || (type && TREE_CODE (type) == VECTOR_TYPE
6661 && int_size_in_bytes (type) == 16)))
0ac081f6 6662 {
2858f73a 6663 if (named || abi == ABI_V4)
a594a19c 6664 return NULL_RTX;
0ac081f6 6665 else
a594a19c
GK
6666 {
6667 /* Vector parameters to varargs functions under AIX or Darwin
6668 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6669 int align, align_words, n_words;
6670 enum machine_mode part_mode;
a594a19c
GK
6671
6672 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6673 2 mod 4 in terms of words in 32-bit mode, since the parameter
6674 save area starts at offset 24 from the stack. In 64-bit mode,
6675 they just have to start on an even word, since the parameter
6676 save area is 16-byte aligned. */
6677 if (TARGET_32BIT)
4ed78545 6678 align = (2 - cum->words) & 3;
2858f73a
GK
6679 else
6680 align = cum->words & 1;
a594a19c
GK
6681 align_words = cum->words + align;
6682
6683 /* Out of registers? Memory, then. */
6684 if (align_words >= GP_ARG_NUM_REG)
6685 return NULL_RTX;
ec6376ab
AM
6686
6687 if (TARGET_32BIT && TARGET_POWERPC64)
6688 return rs6000_mixed_function_arg (mode, type, align_words);
6689
2858f73a
GK
6690 /* The vector value goes in GPRs. Only the part of the
6691 value in GPRs is reported here. */
ec6376ab
AM
6692 part_mode = mode;
6693 n_words = rs6000_arg_size (mode, type);
6694 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6695 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6696 is either wholly in GPRs or half in GPRs and half not. */
6697 part_mode = DImode;
ec6376ab
AM
6698
6699 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6700 }
0ac081f6 6701 }
f82f556d
AH
6702 else if (TARGET_SPE_ABI && TARGET_SPE
6703 && (SPE_VECTOR_MODE (mode)
18f63bfa 6704 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6705 || mode == DCmode
6706 || mode == TFmode
6707 || mode == TCmode))))
a6c9bed4 6708 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6709
f607bc57 6710 else if (abi == ABI_V4)
4697a36c 6711 {
a3170dc6 6712 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
6713 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6714 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
cf8e1455
DE
6715 || (mode == TFmode && !TARGET_IEEEQUAD)
6716 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6717 {
2d83f070
JJ
6718 /* _Decimal128 must use an even/odd register pair. This assumes
6719 that the register number is odd when fregno is odd. */
6720 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6721 cum->fregno++;
6722
6723 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6724 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6725 return gen_rtx_REG (mode, cum->fregno);
6726 else
b78d48dd 6727 return NULL_RTX;
4cc833b7
RH
6728 }
6729 else
6730 {
b2d04ecf 6731 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6732 int gregno = cum->sysv_gregno;
6733
4ed78545
AM
6734 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6735 (r7,r8) or (r9,r10). As does any other 2 word item such
6736 as complex int due to a historical mistake. */
6737 if (n_words == 2)
6738 gregno += (1 - gregno) & 1;
4cc833b7 6739
4ed78545 6740 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6741 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6742 return NULL_RTX;
ec6376ab
AM
6743
6744 if (TARGET_32BIT && TARGET_POWERPC64)
6745 return rs6000_mixed_function_arg (mode, type,
6746 gregno - GP_ARG_MIN_REG);
6747 return gen_rtx_REG (mode, gregno);
4cc833b7 6748 }
4697a36c 6749 }
4cc833b7
RH
6750 else
6751 {
294bd182 6752 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6753
2d83f070
JJ
6754 /* _Decimal128 must be passed in an even/odd float register pair.
6755 This assumes that the register number is odd when fregno is odd. */
6756 if (mode == TDmode && (cum->fregno % 2) == 1)
6757 cum->fregno++;
6758
2858f73a 6759 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6760 {
ec6376ab
AM
6761 rtx rvec[GP_ARG_NUM_REG + 1];
6762 rtx r;
6763 int k;
c53bdcf5
AM
6764 bool needs_psave;
6765 enum machine_mode fmode = mode;
c53bdcf5
AM
6766 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6767
6768 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6769 {
c53bdcf5
AM
6770 /* Currently, we only ever need one reg here because complex
6771 doubles are split. */
7393f7f8
BE
6772 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6773 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6774
7393f7f8
BE
6775 /* Long double or _Decimal128 split over regs and memory. */
6776 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6777 }
c53bdcf5
AM
6778
6779 /* Do we also need to pass this arg in the parameter save
6780 area? */
6781 needs_psave = (type
6782 && (cum->nargs_prototype <= 0
6783 || (DEFAULT_ABI == ABI_AIX
de17c25f 6784 && TARGET_XL_COMPAT
c53bdcf5
AM
6785 && align_words >= GP_ARG_NUM_REG)));
6786
6787 if (!needs_psave && mode == fmode)
ec6376ab 6788 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6789
ec6376ab 6790 k = 0;
c53bdcf5
AM
6791 if (needs_psave)
6792 {
ec6376ab 6793 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6794 This piece must come first, before the fprs. */
c53bdcf5
AM
6795 if (align_words < GP_ARG_NUM_REG)
6796 {
6797 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6798
6799 if (align_words + n_words > GP_ARG_NUM_REG
6800 || (TARGET_32BIT && TARGET_POWERPC64))
6801 {
6802 /* If this is partially on the stack, then we only
6803 include the portion actually in registers here. */
6804 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6805 rtx off;
79773478
AM
6806 int i = 0;
6807 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6808 /* Not all of the arg fits in gprs. Say that it
6809 goes in memory too, using a magic NULL_RTX
6810 component. Also see comment in
6811 rs6000_mixed_function_arg for why the normal
6812 function_arg_partial_nregs scheme doesn't work
6813 in this case. */
6814 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6815 const0_rtx);
ec6376ab
AM
6816 do
6817 {
6818 r = gen_rtx_REG (rmode,
6819 GP_ARG_MIN_REG + align_words);
2e6c9641 6820 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6821 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6822 }
6823 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6824 }
6825 else
6826 {
6827 /* The whole arg fits in gprs. */
6828 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6829 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6830 }
c53bdcf5 6831 }
ec6376ab
AM
6832 else
6833 /* It's entirely in memory. */
6834 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6835 }
6836
ec6376ab
AM
6837 /* Describe where this piece goes in the fprs. */
6838 r = gen_rtx_REG (fmode, cum->fregno);
6839 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6840
6841 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6842 }
6843 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6844 {
ec6376ab
AM
6845 if (TARGET_32BIT && TARGET_POWERPC64)
6846 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6847
4eeca74f
AM
6848 if (mode == BLKmode)
6849 mode = Pmode;
6850
b2d04ecf
AM
6851 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6852 }
4cc833b7
RH
6853 else
6854 return NULL_RTX;
4697a36c 6855 }
4697a36c
MM
6856}
6857\f
ec6376ab 6858/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6859 the number of bytes passed in registers. For args passed entirely in
6860 registers or entirely in memory, zero. When an arg is described by a
6861 PARALLEL, perhaps using more than one register type, this function
6862 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6863
78a52f11
RH
6864static int
6865rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6866 tree type, bool named)
4697a36c 6867{
c53bdcf5 6868 int ret = 0;
ec6376ab 6869 int align_words;
c53bdcf5 6870
f607bc57 6871 if (DEFAULT_ABI == ABI_V4)
4697a36c 6872 return 0;
4697a36c 6873
c53bdcf5
AM
6874 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6875 && cum->nargs_prototype >= 0)
6876 return 0;
6877
0b5383eb
DJ
6878 /* In this complicated case we just disable the partial_nregs code. */
6879 if (rs6000_darwin64_abi && mode == BLKmode
6880 && TREE_CODE (type) == RECORD_TYPE
6881 && int_size_in_bytes (type) > 0)
6882 return 0;
6883
294bd182 6884 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6885
79773478
AM
6886 if (USE_FP_FOR_ARG_P (cum, mode, type))
6887 {
fb63c729
AM
6888 /* If we are passing this arg in the fixed parameter save area
6889 (gprs or memory) as well as fprs, then this function should
79773478
AM
6890 return the number of partial bytes passed in the parameter
6891 save area rather than partial bytes passed in fprs. */
6892 if (type
6893 && (cum->nargs_prototype <= 0
6894 || (DEFAULT_ABI == ABI_AIX
6895 && TARGET_XL_COMPAT
6896 && align_words >= GP_ARG_NUM_REG)))
6897 return 0;
6898 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6899 > FP_ARG_MAX_REG + 1)
ac7e839c 6900 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6901 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6902 return 0;
6903 }
6904
ec6376ab
AM
6905 if (align_words < GP_ARG_NUM_REG
6906 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6907 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6908
c53bdcf5 6909 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6910 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6911
c53bdcf5 6912 return ret;
4697a36c
MM
6913}
6914\f
6915/* A C expression that indicates when an argument must be passed by
6916 reference. If nonzero for an argument, a copy of that argument is
6917 made in memory and a pointer to the argument is passed instead of
6918 the argument itself. The pointer is passed in whatever way is
6919 appropriate for passing a pointer to that type.
6920
b2d04ecf
AM
6921 Under V.4, aggregates and long double are passed by reference.
6922
6923 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6924 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6925
6926 As an extension to all ABIs, variable sized types are passed by
6927 reference. */
4697a36c 6928
8cd5a4e0 6929static bool
f676971a 6930rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6931 enum machine_mode mode, const_tree type,
bada2eb8 6932 bool named ATTRIBUTE_UNUSED)
4697a36c 6933{
602ea4d3 6934 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6935 {
6936 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6937 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6938 return 1;
6939 }
6940
6941 if (!type)
6942 return 0;
4697a36c 6943
bada2eb8
DE
6944 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6945 {
6946 if (TARGET_DEBUG_ARG)
6947 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6948 return 1;
6949 }
6950
6951 if (int_size_in_bytes (type) < 0)
6952 {
6953 if (TARGET_DEBUG_ARG)
6954 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6955 return 1;
6956 }
6957
6958 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6959 modes only exist for GCC vector types if -maltivec. */
6960 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6961 {
6962 if (TARGET_DEBUG_ARG)
6963 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6964 return 1;
6965 }
b693336b
PB
6966
6967 /* Pass synthetic vectors in memory. */
bada2eb8 6968 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6969 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6970 {
6971 static bool warned_for_pass_big_vectors = false;
6972 if (TARGET_DEBUG_ARG)
6973 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6974 if (!warned_for_pass_big_vectors)
6975 {
d4ee4d25 6976 warning (0, "GCC vector passed by reference: "
b693336b
PB
6977 "non-standard ABI extension with no compatibility guarantee");
6978 warned_for_pass_big_vectors = true;
6979 }
6980 return 1;
6981 }
6982
b2d04ecf 6983 return 0;
4697a36c 6984}
5985c7a6
FJ
6985
6986static void
2d9db8eb 6987rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6988{
6989 int i;
6990 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6991
6992 if (nregs == 0)
6993 return;
6994
c4ad648e 6995 for (i = 0; i < nregs; i++)
5985c7a6 6996 {
9390387d 6997 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6998 if (reload_completed)
c4ad648e
AM
6999 {
7000 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
7001 tem = NULL_RTX;
7002 else
7003 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 7004 i * GET_MODE_SIZE (reg_mode));
c4ad648e 7005 }
5985c7a6
FJ
7006 else
7007 tem = replace_equiv_address (tem, XEXP (tem, 0));
7008
37409796 7009 gcc_assert (tem);
5985c7a6
FJ
7010
7011 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
7012 }
7013}
4697a36c
MM
7014\f
7015/* Perform any needed actions needed for a function that is receiving a
f676971a 7016 variable number of arguments.
4697a36c
MM
7017
7018 CUM is as above.
7019
7020 MODE and TYPE are the mode and type of the current parameter.
7021
7022 PRETEND_SIZE is a variable that should be set to the amount of stack
7023 that must be pushed by the prolog to pretend that our caller pushed
7024 it.
7025
7026 Normally, this macro will push all remaining incoming registers on the
7027 stack and set PRETEND_SIZE to the length of the registers pushed. */
7028
c6e8c921 7029static void
f676971a 7030setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
7031 tree type, int *pretend_size ATTRIBUTE_UNUSED,
7032 int no_rtl)
4697a36c 7033{
4cc833b7
RH
7034 CUMULATIVE_ARGS next_cum;
7035 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 7036 rtx save_area = NULL_RTX, mem;
4862826d
ILT
7037 int first_reg_offset;
7038 alias_set_type set;
4697a36c 7039
f31bf321 7040 /* Skip the last named argument. */
d34c5b80 7041 next_cum = *cum;
594a51fe 7042 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 7043
f607bc57 7044 if (DEFAULT_ABI == ABI_V4)
d34c5b80 7045 {
5b667039
JJ
7046 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
7047
60e2d0ca 7048 if (! no_rtl)
5b667039
JJ
7049 {
7050 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
7051 HOST_WIDE_INT offset = 0;
7052
7053 /* Try to optimize the size of the varargs save area.
7054 The ABI requires that ap.reg_save_area is doubleword
7055 aligned, but we don't need to allocate space for all
7056 the bytes, only those to which we actually will save
7057 anything. */
7058 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
7059 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
7060 if (TARGET_HARD_FLOAT && TARGET_FPRS
7061 && next_cum.fregno <= FP_ARG_V4_MAX_REG
7062 && cfun->va_list_fpr_size)
7063 {
7064 if (gpr_reg_num)
7065 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
7066 * UNITS_PER_FP_WORD;
7067 if (cfun->va_list_fpr_size
7068 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
7069 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
7070 else
7071 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
7072 * UNITS_PER_FP_WORD;
7073 }
7074 if (gpr_reg_num)
7075 {
7076 offset = -((first_reg_offset * reg_size) & ~7);
7077 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
7078 {
7079 gpr_reg_num = cfun->va_list_gpr_size;
7080 if (reg_size == 4 && (first_reg_offset & 1))
7081 gpr_reg_num++;
7082 }
7083 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
7084 }
7085 else if (fpr_size)
7086 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
7087 * UNITS_PER_FP_WORD
7088 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 7089
5b667039
JJ
7090 if (gpr_size + fpr_size)
7091 {
7092 rtx reg_save_area
7093 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
7094 gcc_assert (GET_CODE (reg_save_area) == MEM);
7095 reg_save_area = XEXP (reg_save_area, 0);
7096 if (GET_CODE (reg_save_area) == PLUS)
7097 {
7098 gcc_assert (XEXP (reg_save_area, 0)
7099 == virtual_stack_vars_rtx);
7100 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
7101 offset += INTVAL (XEXP (reg_save_area, 1));
7102 }
7103 else
7104 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
7105 }
7106
7107 cfun->machine->varargs_save_offset = offset;
7108 save_area = plus_constant (virtual_stack_vars_rtx, offset);
7109 }
4697a36c 7110 }
60e2d0ca 7111 else
4697a36c 7112 {
d34c5b80 7113 first_reg_offset = next_cum.words;
4cc833b7 7114 save_area = virtual_incoming_args_rtx;
4697a36c 7115
fe984136 7116 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 7117 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 7118 }
4697a36c 7119
dfafc897 7120 set = get_varargs_alias_set ();
9d30f3c1
JJ
7121 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
7122 && cfun->va_list_gpr_size)
4cc833b7 7123 {
9d30f3c1
JJ
7124 int nregs = GP_ARG_NUM_REG - first_reg_offset;
7125
7126 if (va_list_gpr_counter_field)
7127 {
7128 /* V4 va_list_gpr_size counts number of registers needed. */
7129 if (nregs > cfun->va_list_gpr_size)
7130 nregs = cfun->va_list_gpr_size;
7131 }
7132 else
7133 {
7134 /* char * va_list instead counts number of bytes needed. */
7135 if (nregs > cfun->va_list_gpr_size / reg_size)
7136 nregs = cfun->va_list_gpr_size / reg_size;
7137 }
7138
dfafc897 7139 mem = gen_rtx_MEM (BLKmode,
c4ad648e 7140 plus_constant (save_area,
13e2e16e
DE
7141 first_reg_offset * reg_size));
7142 MEM_NOTRAP_P (mem) = 1;
ba4828e0 7143 set_mem_alias_set (mem, set);
8ac61af7 7144 set_mem_align (mem, BITS_PER_WORD);
dfafc897 7145
f676971a 7146 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 7147 nregs);
4697a36c
MM
7148 }
7149
4697a36c 7150 /* Save FP registers if needed. */
f607bc57 7151 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
7152 && TARGET_HARD_FLOAT && TARGET_FPRS
7153 && ! no_rtl
9d30f3c1
JJ
7154 && next_cum.fregno <= FP_ARG_V4_MAX_REG
7155 && cfun->va_list_fpr_size)
4697a36c 7156 {
9d30f3c1 7157 int fregno = next_cum.fregno, nregs;
9ebbca7d 7158 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 7159 rtx lab = gen_label_rtx ();
5b667039
JJ
7160 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
7161 * UNITS_PER_FP_WORD);
4697a36c 7162
c4ad648e
AM
7163 emit_jump_insn
7164 (gen_rtx_SET (VOIDmode,
7165 pc_rtx,
7166 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 7167 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 7168 const0_rtx),
39403d82 7169 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
7170 pc_rtx)));
7171
9d30f3c1
JJ
7172 for (nregs = 0;
7173 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 7174 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 7175 {
696e45ba
ME
7176 mem = gen_rtx_MEM ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
7177 ? DFmode : SFmode,
7178 plus_constant (save_area, off));
7179 MEM_NOTRAP_P (mem) = 1;
7180 set_mem_alias_set (mem, set);
7181 set_mem_align (mem, GET_MODE_ALIGNMENT (
7182 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
7183 ? DFmode : SFmode));
7184 emit_move_insn (mem, gen_rtx_REG (
7185 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
7186 ? DFmode : SFmode, fregno));
4697a36c 7187 }
4cc833b7
RH
7188
7189 emit_label (lab);
4697a36c 7190 }
4697a36c 7191}
4697a36c 7192
dfafc897 7193/* Create the va_list data type. */
2c4974b7 7194
c35d187f
RH
7195static tree
7196rs6000_build_builtin_va_list (void)
dfafc897 7197{
64c2816f 7198 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 7199
9ebbca7d
GK
7200 /* For AIX, prefer 'char *' because that's what the system
7201 header files like. */
f607bc57 7202 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 7203 return build_pointer_type (char_type_node);
dfafc897 7204
f1e639b1 7205 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
71856685
AH
7206 type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
7207 get_identifier ("__va_list_tag"), record);
0f4fd75d 7208
71856685 7209 f_gpr = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 7210 unsigned_char_type_node);
71856685 7211 f_fpr = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 7212 unsigned_char_type_node);
64c2816f
DT
7213 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
7214 every user file. */
71856685
AH
7215 f_res = build_decl (BUILTINS_LOCATION, FIELD_DECL,
7216 get_identifier ("reserved"), short_unsigned_type_node);
7217 f_ovf = build_decl (BUILTINS_LOCATION, FIELD_DECL,
7218 get_identifier ("overflow_arg_area"),
dfafc897 7219 ptr_type_node);
71856685
AH
7220 f_sav = build_decl (BUILTINS_LOCATION, FIELD_DECL,
7221 get_identifier ("reg_save_area"),
dfafc897
FS
7222 ptr_type_node);
7223
9d30f3c1
JJ
7224 va_list_gpr_counter_field = f_gpr;
7225 va_list_fpr_counter_field = f_fpr;
7226
dfafc897
FS
7227 DECL_FIELD_CONTEXT (f_gpr) = record;
7228 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 7229 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
7230 DECL_FIELD_CONTEXT (f_ovf) = record;
7231 DECL_FIELD_CONTEXT (f_sav) = record;
7232
bab45a51
FS
7233 TREE_CHAIN (record) = type_decl;
7234 TYPE_NAME (record) = type_decl;
dfafc897
FS
7235 TYPE_FIELDS (record) = f_gpr;
7236 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
7237 TREE_CHAIN (f_fpr) = f_res;
7238 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
7239 TREE_CHAIN (f_ovf) = f_sav;
7240
7241 layout_type (record);
7242
7243 /* The correct type is an array type of one element. */
7244 return build_array_type (record, build_index_type (size_zero_node));
7245}
7246
7247/* Implement va_start. */
7248
d7bd8aeb 7249static void
a2369ed3 7250rs6000_va_start (tree valist, rtx nextarg)
4697a36c 7251{
dfafc897 7252 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 7253 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 7254 tree gpr, fpr, ovf, sav, t;
2c4974b7 7255
dfafc897 7256 /* Only SVR4 needs something special. */
f607bc57 7257 if (DEFAULT_ABI != ABI_V4)
dfafc897 7258 {
e5faf155 7259 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
7260 return;
7261 }
7262
973a648b 7263 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 7264 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
7265 f_res = TREE_CHAIN (f_fpr);
7266 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
7267 f_sav = TREE_CHAIN (f_ovf);
7268
872a65b5 7269 valist = build_va_arg_indirect_ref (valist);
47a25a46 7270 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
7271 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
7272 f_fpr, NULL_TREE);
7273 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
7274 f_ovf, NULL_TREE);
7275 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
7276 f_sav, NULL_TREE);
dfafc897
FS
7277
7278 /* Count number of gp and fp argument registers used. */
38173d38
JH
7279 words = crtl->args.info.words;
7280 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 7281 GP_ARG_NUM_REG);
38173d38 7282 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 7283 FP_ARG_NUM_REG);
dfafc897
FS
7284
7285 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
7286 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
7287 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
7288 words, n_gpr, n_fpr);
dfafc897 7289
9d30f3c1
JJ
7290 if (cfun->va_list_gpr_size)
7291 {
726a989a 7292 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
47a25a46 7293 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
7294 TREE_SIDE_EFFECTS (t) = 1;
7295 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7296 }
58c8adc1 7297
9d30f3c1
JJ
7298 if (cfun->va_list_fpr_size)
7299 {
726a989a 7300 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 7301 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
7302 TREE_SIDE_EFFECTS (t) = 1;
7303 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7304 }
dfafc897
FS
7305
7306 /* Find the overflow area. */
7307 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7308 if (words != 0)
5be014d5
AP
7309 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
7310 size_int (words * UNITS_PER_WORD));
726a989a 7311 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
7312 TREE_SIDE_EFFECTS (t) = 1;
7313 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7314
9d30f3c1
JJ
7315 /* If there were no va_arg invocations, don't set up the register
7316 save area. */
7317 if (!cfun->va_list_gpr_size
7318 && !cfun->va_list_fpr_size
7319 && n_gpr < GP_ARG_NUM_REG
7320 && n_fpr < FP_ARG_V4_MAX_REG)
7321 return;
7322
dfafc897
FS
7323 /* Find the register save area. */
7324 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 7325 if (cfun->machine->varargs_save_offset)
5be014d5
AP
7326 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
7327 size_int (cfun->machine->varargs_save_offset));
726a989a 7328 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
7329 TREE_SIDE_EFFECTS (t) = 1;
7330 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7331}
7332
7333/* Implement va_arg. */
7334
23a60a04 7335tree
726a989a
RB
7336rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
7337 gimple_seq *post_p)
cd3ce9b4 7338{
cd3ce9b4
JM
7339 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
7340 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 7341 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
7342 tree lab_false, lab_over, addr;
7343 int align;
7344 tree ptrtype = build_pointer_type (type);
7393f7f8 7345 int regalign = 0;
726a989a 7346 gimple stmt;
cd3ce9b4 7347
08b0dc1b
RH
7348 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
7349 {
7350 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 7351 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
7352 }
7353
cd3ce9b4
JM
7354 if (DEFAULT_ABI != ABI_V4)
7355 {
08b0dc1b 7356 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
7357 {
7358 tree elem_type = TREE_TYPE (type);
7359 enum machine_mode elem_mode = TYPE_MODE (elem_type);
7360 int elem_size = GET_MODE_SIZE (elem_mode);
7361
7362 if (elem_size < UNITS_PER_WORD)
7363 {
23a60a04 7364 tree real_part, imag_part;
726a989a 7365 gimple_seq post = NULL;
cd3ce9b4 7366
23a60a04
JM
7367 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
7368 &post);
7369 /* Copy the value into a temporary, lest the formal temporary
7370 be reused out from under us. */
7371 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
726a989a 7372 gimple_seq_add_seq (pre_p, post);
cd3ce9b4 7373
23a60a04
JM
7374 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
7375 post_p);
cd3ce9b4 7376
47a25a46 7377 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
7378 }
7379 }
7380
23a60a04 7381 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
7382 }
7383
7384 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7385 f_fpr = TREE_CHAIN (f_gpr);
7386 f_res = TREE_CHAIN (f_fpr);
7387 f_ovf = TREE_CHAIN (f_res);
7388 f_sav = TREE_CHAIN (f_ovf);
7389
872a65b5 7390 valist = build_va_arg_indirect_ref (valist);
47a25a46 7391 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
7392 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
7393 f_fpr, NULL_TREE);
7394 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
7395 f_ovf, NULL_TREE);
7396 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
7397 f_sav, NULL_TREE);
cd3ce9b4
JM
7398
7399 size = int_size_in_bytes (type);
7400 rsize = (size + 3) / 4;
7401 align = 1;
7402
08b0dc1b 7403 if (TARGET_HARD_FLOAT && TARGET_FPRS
696e45ba
ME
7404 && ((TARGET_SINGLE_FLOAT && TYPE_MODE (type) == SFmode)
7405 || (TARGET_DOUBLE_FLOAT
7406 && (TYPE_MODE (type) == DFmode
7407 || TYPE_MODE (type) == TFmode
7408 || TYPE_MODE (type) == SDmode
7409 || TYPE_MODE (type) == DDmode
7410 || TYPE_MODE (type) == TDmode))))
cd3ce9b4
JM
7411 {
7412 /* FP args go in FP registers, if present. */
cd3ce9b4 7413 reg = fpr;
602ea4d3 7414 n_reg = (size + 7) / 8;
696e45ba
ME
7415 sav_ofs = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4) * 4;
7416 sav_scale = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4);
e41b2a33 7417 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
7418 align = 8;
7419 }
7420 else
7421 {
7422 /* Otherwise into GP registers. */
cd3ce9b4
JM
7423 reg = gpr;
7424 n_reg = rsize;
7425 sav_ofs = 0;
7426 sav_scale = 4;
7427 if (n_reg == 2)
7428 align = 8;
7429 }
7430
7431 /* Pull the value out of the saved registers.... */
7432
7433 lab_over = NULL;
7434 addr = create_tmp_var (ptr_type_node, "addr");
7435 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7436
7437 /* AltiVec vectors never go in registers when -mabi=altivec. */
7438 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
7439 align = 16;
7440 else
7441 {
71856685
AH
7442 lab_false = create_artificial_label (input_location);
7443 lab_over = create_artificial_label (input_location);
cd3ce9b4
JM
7444
7445 /* Long long and SPE vectors are aligned in the registers.
7446 As are any other 2 gpr item such as complex int due to a
7447 historical mistake. */
7448 u = reg;
602ea4d3 7449 if (n_reg == 2 && reg == gpr)
cd3ce9b4 7450 {
7393f7f8 7451 regalign = 1;
726a989a 7452 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7453 build_int_cst (TREE_TYPE (reg), n_reg - 1));
726a989a
RB
7454 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
7455 unshare_expr (reg), u);
cd3ce9b4 7456 }
7393f7f8
BE
7457 /* _Decimal128 is passed in even/odd fpr pairs; the stored
7458 reg number is 0 for f1, so we want to make it odd. */
7459 else if (reg == fpr && TYPE_MODE (type) == TDmode)
7460 {
726a989a 7461 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
383e91e4 7462 build_int_cst (TREE_TYPE (reg), 1));
726a989a 7463 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7393f7f8 7464 }
cd3ce9b4 7465
95674810 7466 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
7467 t = build2 (GE_EXPR, boolean_type_node, u, t);
7468 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7469 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7470 gimplify_and_add (t, pre_p);
7471
7472 t = sav;
7473 if (sav_ofs)
5be014d5 7474 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 7475
726a989a 7476 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7477 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
7478 u = fold_convert (sizetype, u);
7479 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7480 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 7481
e41b2a33
PB
7482 /* _Decimal32 varargs are located in the second word of the 64-bit
7483 FP register for 32-bit binaries. */
4f011e1e
JM
7484 if (!TARGET_POWERPC64
7485 && TARGET_HARD_FLOAT && TARGET_FPRS
7486 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
7487 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7488
726a989a 7489 gimplify_assign (addr, t, pre_p);
cd3ce9b4 7490
726a989a 7491 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
cd3ce9b4 7492
726a989a
RB
7493 stmt = gimple_build_label (lab_false);
7494 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4 7495
7393f7f8 7496 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
7497 {
7498 /* Ensure that we don't find any more args in regs.
7393f7f8 7499 Alignment has taken care of for special cases. */
726a989a 7500 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
cd3ce9b4
JM
7501 }
7502 }
7503
7504 /* ... otherwise out of the overflow area. */
7505
7506 /* Care for on-stack alignment if needed. */
7507 t = ovf;
7508 if (align != 1)
7509 {
5be014d5
AP
7510 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7511 t = fold_convert (sizetype, t);
4a90aeeb 7512 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
7513 size_int (-align));
7514 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
7515 }
7516 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7517
726a989a 7518 gimplify_assign (unshare_expr (addr), t, pre_p);
cd3ce9b4 7519
5be014d5 7520 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
726a989a 7521 gimplify_assign (unshare_expr (ovf), t, pre_p);
cd3ce9b4
JM
7522
7523 if (lab_over)
7524 {
726a989a
RB
7525 stmt = gimple_build_label (lab_over);
7526 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4
JM
7527 }
7528
0cfbc62b
JM
7529 if (STRICT_ALIGNMENT
7530 && (TYPE_ALIGN (type)
7531 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7532 {
7533 /* The value (of type complex double, for example) may not be
7534 aligned in memory in the saved registers, so copy via a
7535 temporary. (This is the same code as used for SPARC.) */
7536 tree tmp = create_tmp_var (type, "va_arg_tmp");
7537 tree dest_addr = build_fold_addr_expr (tmp);
7538
5039610b
SL
7539 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7540 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
7541
7542 gimplify_and_add (copy, pre_p);
7543 addr = dest_addr;
7544 }
7545
08b0dc1b 7546 addr = fold_convert (ptrtype, addr);
872a65b5 7547 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
7548}
7549
0ac081f6
AH
7550/* Builtins. */
7551
58646b77
PB
7552static void
7553def_builtin (int mask, const char *name, tree type, int code)
7554{
96038623 7555 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
7556 {
7557 if (rs6000_builtin_decls[code])
cacf1ca8
MM
7558 fatal_error ("internal error: builtin function to %s already processed.",
7559 name);
58646b77
PB
7560
7561 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7562 add_builtin_function (name, type, code, BUILT_IN_MD,
7563 NULL, NULL_TREE);
58646b77
PB
7564 }
7565}
0ac081f6 7566
24408032
AH
7567/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7568
2212663f 7569static const struct builtin_description bdesc_3arg[] =
24408032
AH
7570{
7571 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7572 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7573 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7574 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7575 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7576 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7577 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7578 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7579 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7580 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7581 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7582 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7583 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7584 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7585 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7586 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7587 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7588 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7589 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7590 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7591 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7592 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7593 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7594
7595 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7596 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7597 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7598 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7599 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7600 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7601 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7602 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7603 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7604 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7605 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7606 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7607 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7608 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7609 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7610
7611 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7612 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7613 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7614 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7615 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7616 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7617 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7618 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7619 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7620};
2212663f 7621
95385cbb
AH
7622/* DST operations: void foo (void *, const int, const char). */
7623
7624static const struct builtin_description bdesc_dst[] =
7625{
7626 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7627 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7628 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7629 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7630
7631 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7632 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7633 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7634 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7635};
7636
2212663f 7637/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7638
a3170dc6 7639static struct builtin_description bdesc_2arg[] =
0ac081f6 7640{
f18c054f
DB
7641 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7642 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7643 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7644 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7645 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7646 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7647 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7648 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7649 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7650 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7651 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7652 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7653 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7654 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7655 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7656 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7657 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7658 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7659 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7660 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7661 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7662 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7663 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7664 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7665 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7666 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7667 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7668 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7669 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7670 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7671 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7672 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7673 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7674 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7675 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7676 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7677 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7678 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7679 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7680 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7681 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7682 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7683 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7684 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7685 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7686 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7687 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7688 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7689 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7690 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7691 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7692 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7693 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7694 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7695 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7696 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7697 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7698 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7699 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7700 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7701 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7702 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7703 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7704 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7705 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7706 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7707 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7708 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7709 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7710 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7711 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7712 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7713 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7714 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7715 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7716 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7717 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7718 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7719 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7720 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7721 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7722 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7723 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7724 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7725 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7726 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7727 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7728 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7729 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7730 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7731 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7732 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7733 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7734 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7735 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7736 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7737 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7738 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7739 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7740 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7741 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7742 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7743 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7744 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7745 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7746 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7747 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7748 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7749 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7750 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7751 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7752
58646b77
PB
7753 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7754 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7755 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7756 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7757 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7758 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7759 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7760 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7761 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7762 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7763 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7764 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7765 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7766 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7767 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7768 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7769 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7770 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7771 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7772 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7773 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7774 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7775 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7776 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7777 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7778 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7779 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7780 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7781 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7782 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7798 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7799 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7800 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7801 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7802 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7803 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7804 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7805 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7806 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7807 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7808 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7809 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7810 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7811 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7812 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7813 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7814 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7815 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7816 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7817 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7818 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7819 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7820 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7821 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7822 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7823 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7824 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7825 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7826 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7827 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7828 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7829 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7830 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7831 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7832 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7833 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7834 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7835 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7836 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7837 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7838 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7842 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7843 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7844 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7845 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7846 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7847 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7848 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7849 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7850 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7851 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7852 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7853 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7854 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7855 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7856 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7857 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7858 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7859 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7860 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7861 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7862 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7863 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7864 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7865 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7866 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7867 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7868 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7869 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7870 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7871 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7872 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7879 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7880
96038623
DE
7881 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7882 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7883 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7884 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7885 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7886 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7887 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7888 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7889 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7890 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7891
a3170dc6
AH
7892 /* Place holder, leave as first spe builtin. */
7893 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7894 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7895 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7896 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7897 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7898 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7899 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7900 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7901 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7902 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7903 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7904 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7905 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7906 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7907 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7908 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7909 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7910 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7911 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7912 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7913 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7914 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7915 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7916 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7917 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7918 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7919 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7920 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7921 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7922 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7923 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7924 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7925 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7926 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7927 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7928 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7929 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7930 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7931 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7932 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7933 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7934 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7935 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7936 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7937 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7938 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7939 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7940 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7941 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7942 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7943 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7944 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7945 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7946 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7947 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7948 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7949 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7950 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7951 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7952 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7953 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7954 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7955 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7956 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7957 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7958 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7959 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7960 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7961 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7962 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7963 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7964 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7965 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7966 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7967 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7968 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7969 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7970 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7971 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7972 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7973 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7974 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7975 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7976 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7977 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7978 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7979 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7980 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7981 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7982 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7983 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7984 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7985 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7986 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7987 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7988 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7989 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7990 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7991 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7992 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7993 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7994 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7995 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7996 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7997 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7998 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7999 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
8000 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
8001 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
8002
8003 /* SPE binary operations expecting a 5-bit unsigned literal. */
8004 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
8005
8006 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
8007 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
8008 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
8009 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
8010 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
8011 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
8012 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
8013 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
8014 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
8015 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
8016 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
8017 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
8018 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
8019 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
8020 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
8021 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
8022 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
8023 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
8024 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
8025 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
8026 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
8027 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
8028 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
8029 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
8030 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
8031 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
8032
8033 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 8034 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
8035};
8036
8037/* AltiVec predicates. */
8038
8039struct builtin_description_predicates
8040{
8041 const unsigned int mask;
8042 const enum insn_code icode;
8043 const char *opcode;
8044 const char *const name;
8045 const enum rs6000_builtins code;
8046};
8047
8048static const struct builtin_description_predicates bdesc_altivec_preds[] =
8049{
8050 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
8051 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
8052 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
8053 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
8054 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
8055 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
8056 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
8057 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
8058 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
8059 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
8060 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
8061 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
8062 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
8063
81f40b79
ILT
8064 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
8065 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
8066 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 8067};
24408032 8068
a3170dc6
AH
8069/* SPE predicates. */
8070static struct builtin_description bdesc_spe_predicates[] =
8071{
8072 /* Place-holder. Leave as first. */
8073 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
8074 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
8075 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
8076 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
8077 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
8078 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
8079 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
8080 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
8081 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
8082 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
8083 /* Place-holder. Leave as last. */
8084 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
8085};
8086
8087/* SPE evsel predicates. */
8088static struct builtin_description bdesc_spe_evsel[] =
8089{
8090 /* Place-holder. Leave as first. */
8091 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
8092 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
8093 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
8094 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
8095 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
8096 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
8097 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
8098 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
8099 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
8100 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
8101 /* Place-holder. Leave as last. */
8102 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
8103};
8104
96038623
DE
8105/* PAIRED predicates. */
8106static const struct builtin_description bdesc_paired_preds[] =
8107{
8108 /* Place-holder. Leave as first. */
8109 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
8110 /* Place-holder. Leave as last. */
8111 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
8112};
8113
b6d08ca1 8114/* ABS* operations. */
100c4561
AH
8115
8116static const struct builtin_description bdesc_abs[] =
8117{
8118 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
8119 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
8120 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
8121 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
8122 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
8123 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
8124 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
8125};
8126
617e0e1d
DB
8127/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
8128 foo (VECa). */
24408032 8129
a3170dc6 8130static struct builtin_description bdesc_1arg[] =
2212663f 8131{
617e0e1d
DB
8132 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
8133 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
8134 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
8135 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
8136 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
8137 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
8138 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
8139 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
8140 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
8141 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
8142 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
8143 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
8144 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
8145 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
8146 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
8147 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
8148 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 8149
58646b77
PB
8150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
8151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
8152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
8153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
8154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
8155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
8156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
8157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
8158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
8159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
8160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
8161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
8162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
8163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
8164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
8165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
8166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
8167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
8168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
8169
a3170dc6
AH
8170 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
8171 end with SPE_BUILTIN_EVSUBFUSIAAW. */
8172 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
8173 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
8174 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
8175 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
8176 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
8177 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
8178 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
8179 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
8180 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
8181 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
8182 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
8183 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
8184 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
8185 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
8186 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
8187 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
8188 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
8189 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
8190 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
8191 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
8192 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
8193 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
8194 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 8195 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
8196 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
8197 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
8198 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
8199 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
8200
8201 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
8202 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
8203
8204 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
8205 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
8206 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
8207 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
8208 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
8209};
8210
8211static rtx
5039610b 8212rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8213{
8214 rtx pat;
5039610b 8215 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8216 rtx op0 = expand_normal (arg0);
2212663f
DB
8217 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8218 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8219
0559cc77
DE
8220 if (icode == CODE_FOR_nothing)
8221 /* Builtin not supported on this processor. */
8222 return 0;
8223
20e26713
AH
8224 /* If we got invalid arguments bail out before generating bad rtl. */
8225 if (arg0 == error_mark_node)
9a171fcd 8226 return const0_rtx;
20e26713 8227
0559cc77
DE
8228 if (icode == CODE_FOR_altivec_vspltisb
8229 || icode == CODE_FOR_altivec_vspltish
8230 || icode == CODE_FOR_altivec_vspltisw
8231 || icode == CODE_FOR_spe_evsplatfi
8232 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
8233 {
8234 /* Only allow 5-bit *signed* literals. */
b44140e7 8235 if (GET_CODE (op0) != CONST_INT
afca671b
DP
8236 || INTVAL (op0) > 15
8237 || INTVAL (op0) < -16)
b44140e7
AH
8238 {
8239 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 8240 return const0_rtx;
b44140e7 8241 }
b44140e7
AH
8242 }
8243
c62f2db5 8244 if (target == 0
2212663f
DB
8245 || GET_MODE (target) != tmode
8246 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8247 target = gen_reg_rtx (tmode);
8248
8249 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8250 op0 = copy_to_mode_reg (mode0, op0);
8251
8252 pat = GEN_FCN (icode) (target, op0);
8253 if (! pat)
8254 return 0;
8255 emit_insn (pat);
0ac081f6 8256
2212663f
DB
8257 return target;
8258}
ae4b4a02 8259
100c4561 8260static rtx
5039610b 8261altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
8262{
8263 rtx pat, scratch1, scratch2;
5039610b 8264 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8265 rtx op0 = expand_normal (arg0);
100c4561
AH
8266 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8267 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8268
8269 /* If we have invalid arguments, bail out before generating bad rtl. */
8270 if (arg0 == error_mark_node)
9a171fcd 8271 return const0_rtx;
100c4561
AH
8272
8273 if (target == 0
8274 || GET_MODE (target) != tmode
8275 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8276 target = gen_reg_rtx (tmode);
8277
8278 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8279 op0 = copy_to_mode_reg (mode0, op0);
8280
8281 scratch1 = gen_reg_rtx (mode0);
8282 scratch2 = gen_reg_rtx (mode0);
8283
8284 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
8285 if (! pat)
8286 return 0;
8287 emit_insn (pat);
8288
8289 return target;
8290}
8291
0ac081f6 8292static rtx
5039610b 8293rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
8294{
8295 rtx pat;
5039610b
SL
8296 tree arg0 = CALL_EXPR_ARG (exp, 0);
8297 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8298 rtx op0 = expand_normal (arg0);
8299 rtx op1 = expand_normal (arg1);
0ac081f6
AH
8300 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8301 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8302 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8303
0559cc77
DE
8304 if (icode == CODE_FOR_nothing)
8305 /* Builtin not supported on this processor. */
8306 return 0;
8307
20e26713
AH
8308 /* If we got invalid arguments bail out before generating bad rtl. */
8309 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 8310 return const0_rtx;
20e26713 8311
0559cc77
DE
8312 if (icode == CODE_FOR_altivec_vcfux
8313 || icode == CODE_FOR_altivec_vcfsx
8314 || icode == CODE_FOR_altivec_vctsxs
8315 || icode == CODE_FOR_altivec_vctuxs
8316 || icode == CODE_FOR_altivec_vspltb
8317 || icode == CODE_FOR_altivec_vsplth
8318 || icode == CODE_FOR_altivec_vspltw
8319 || icode == CODE_FOR_spe_evaddiw
8320 || icode == CODE_FOR_spe_evldd
8321 || icode == CODE_FOR_spe_evldh
8322 || icode == CODE_FOR_spe_evldw
8323 || icode == CODE_FOR_spe_evlhhesplat
8324 || icode == CODE_FOR_spe_evlhhossplat
8325 || icode == CODE_FOR_spe_evlhhousplat
8326 || icode == CODE_FOR_spe_evlwhe
8327 || icode == CODE_FOR_spe_evlwhos
8328 || icode == CODE_FOR_spe_evlwhou
8329 || icode == CODE_FOR_spe_evlwhsplat
8330 || icode == CODE_FOR_spe_evlwwsplat
8331 || icode == CODE_FOR_spe_evrlwi
8332 || icode == CODE_FOR_spe_evslwi
8333 || icode == CODE_FOR_spe_evsrwis
f5119d10 8334 || icode == CODE_FOR_spe_evsubifw
0559cc77 8335 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
8336 {
8337 /* Only allow 5-bit unsigned literals. */
8bb418a3 8338 STRIP_NOPS (arg1);
b44140e7
AH
8339 if (TREE_CODE (arg1) != INTEGER_CST
8340 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8341 {
8342 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 8343 return const0_rtx;
b44140e7 8344 }
b44140e7
AH
8345 }
8346
c62f2db5 8347 if (target == 0
0ac081f6
AH
8348 || GET_MODE (target) != tmode
8349 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8350 target = gen_reg_rtx (tmode);
8351
8352 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8353 op0 = copy_to_mode_reg (mode0, op0);
8354 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8355 op1 = copy_to_mode_reg (mode1, op1);
8356
8357 pat = GEN_FCN (icode) (target, op0, op1);
8358 if (! pat)
8359 return 0;
8360 emit_insn (pat);
8361
8362 return target;
8363}
6525c0e7 8364
ae4b4a02 8365static rtx
f676971a 8366altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 8367 tree exp, rtx target)
ae4b4a02
AH
8368{
8369 rtx pat, scratch;
5039610b
SL
8370 tree cr6_form = CALL_EXPR_ARG (exp, 0);
8371 tree arg0 = CALL_EXPR_ARG (exp, 1);
8372 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8373 rtx op0 = expand_normal (arg0);
8374 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
8375 enum machine_mode tmode = SImode;
8376 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8377 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8378 int cr6_form_int;
8379
8380 if (TREE_CODE (cr6_form) != INTEGER_CST)
8381 {
8382 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 8383 return const0_rtx;
ae4b4a02
AH
8384 }
8385 else
8386 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
8387
37409796 8388 gcc_assert (mode0 == mode1);
ae4b4a02
AH
8389
8390 /* If we have invalid arguments, bail out before generating bad rtl. */
8391 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 8392 return const0_rtx;
ae4b4a02
AH
8393
8394 if (target == 0
8395 || GET_MODE (target) != tmode
8396 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8397 target = gen_reg_rtx (tmode);
8398
8399 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8400 op0 = copy_to_mode_reg (mode0, op0);
8401 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8402 op1 = copy_to_mode_reg (mode1, op1);
8403
8404 scratch = gen_reg_rtx (mode0);
8405
8406 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 8407 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
8408 if (! pat)
8409 return 0;
8410 emit_insn (pat);
8411
8412 /* The vec_any* and vec_all* predicates use the same opcodes for two
8413 different operations, but the bits in CR6 will be different
8414 depending on what information we want. So we have to play tricks
8415 with CR6 to get the right bits out.
8416
8417 If you think this is disgusting, look at the specs for the
8418 AltiVec predicates. */
8419
c4ad648e
AM
8420 switch (cr6_form_int)
8421 {
8422 case 0:
8423 emit_insn (gen_cr6_test_for_zero (target));
8424 break;
8425 case 1:
8426 emit_insn (gen_cr6_test_for_zero_reverse (target));
8427 break;
8428 case 2:
8429 emit_insn (gen_cr6_test_for_lt (target));
8430 break;
8431 case 3:
8432 emit_insn (gen_cr6_test_for_lt_reverse (target));
8433 break;
8434 default:
8435 error ("argument 1 of __builtin_altivec_predicate is out of range");
8436 break;
8437 }
ae4b4a02
AH
8438
8439 return target;
8440}
8441
96038623
DE
8442static rtx
8443paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
8444{
8445 rtx pat, addr;
8446 tree arg0 = CALL_EXPR_ARG (exp, 0);
8447 tree arg1 = CALL_EXPR_ARG (exp, 1);
8448 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8449 enum machine_mode mode0 = Pmode;
8450 enum machine_mode mode1 = Pmode;
8451 rtx op0 = expand_normal (arg0);
8452 rtx op1 = expand_normal (arg1);
8453
8454 if (icode == CODE_FOR_nothing)
8455 /* Builtin not supported on this processor. */
8456 return 0;
8457
8458 /* If we got invalid arguments bail out before generating bad rtl. */
8459 if (arg0 == error_mark_node || arg1 == error_mark_node)
8460 return const0_rtx;
8461
8462 if (target == 0
8463 || GET_MODE (target) != tmode
8464 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8465 target = gen_reg_rtx (tmode);
8466
8467 op1 = copy_to_mode_reg (mode1, op1);
8468
8469 if (op0 == const0_rtx)
8470 {
8471 addr = gen_rtx_MEM (tmode, op1);
8472 }
8473 else
8474 {
8475 op0 = copy_to_mode_reg (mode0, op0);
8476 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8477 }
8478
8479 pat = GEN_FCN (icode) (target, addr);
8480
8481 if (! pat)
8482 return 0;
8483 emit_insn (pat);
8484
8485 return target;
8486}
8487
b4a62fa0 8488static rtx
0b61703c 8489altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
b4a62fa0
SB
8490{
8491 rtx pat, addr;
5039610b
SL
8492 tree arg0 = CALL_EXPR_ARG (exp, 0);
8493 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
8494 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8495 enum machine_mode mode0 = Pmode;
8496 enum machine_mode mode1 = Pmode;
84217346
MD
8497 rtx op0 = expand_normal (arg0);
8498 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
8499
8500 if (icode == CODE_FOR_nothing)
8501 /* Builtin not supported on this processor. */
8502 return 0;
8503
8504 /* If we got invalid arguments bail out before generating bad rtl. */
8505 if (arg0 == error_mark_node || arg1 == error_mark_node)
8506 return const0_rtx;
8507
8508 if (target == 0
8509 || GET_MODE (target) != tmode
8510 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8511 target = gen_reg_rtx (tmode);
8512
f676971a 8513 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
8514
8515 if (op0 == const0_rtx)
8516 {
0b61703c 8517 addr = gen_rtx_MEM (blk ? BLKmode : tmode, op1);
b4a62fa0
SB
8518 }
8519 else
8520 {
8521 op0 = copy_to_mode_reg (mode0, op0);
0b61703c 8522 addr = gen_rtx_MEM (blk ? BLKmode : tmode, gen_rtx_PLUS (Pmode, op0, op1));
b4a62fa0
SB
8523 }
8524
8525 pat = GEN_FCN (icode) (target, addr);
8526
8527 if (! pat)
8528 return 0;
8529 emit_insn (pat);
8530
8531 return target;
8532}
8533
61bea3b0 8534static rtx
5039610b 8535spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 8536{
5039610b
SL
8537 tree arg0 = CALL_EXPR_ARG (exp, 0);
8538 tree arg1 = CALL_EXPR_ARG (exp, 1);
8539 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8540 rtx op0 = expand_normal (arg0);
8541 rtx op1 = expand_normal (arg1);
8542 rtx op2 = expand_normal (arg2);
61bea3b0
AH
8543 rtx pat;
8544 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8545 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8546 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8547
8548 /* Invalid arguments. Bail before doing anything stoopid! */
8549 if (arg0 == error_mark_node
8550 || arg1 == error_mark_node
8551 || arg2 == error_mark_node)
8552 return const0_rtx;
8553
8554 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8555 op0 = copy_to_mode_reg (mode2, op0);
8556 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8557 op1 = copy_to_mode_reg (mode0, op1);
8558 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8559 op2 = copy_to_mode_reg (mode1, op2);
8560
8561 pat = GEN_FCN (icode) (op1, op2, op0);
8562 if (pat)
8563 emit_insn (pat);
8564 return NULL_RTX;
8565}
8566
96038623
DE
8567static rtx
8568paired_expand_stv_builtin (enum insn_code icode, tree exp)
8569{
8570 tree arg0 = CALL_EXPR_ARG (exp, 0);
8571 tree arg1 = CALL_EXPR_ARG (exp, 1);
8572 tree arg2 = CALL_EXPR_ARG (exp, 2);
8573 rtx op0 = expand_normal (arg0);
8574 rtx op1 = expand_normal (arg1);
8575 rtx op2 = expand_normal (arg2);
8576 rtx pat, addr;
8577 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8578 enum machine_mode mode1 = Pmode;
8579 enum machine_mode mode2 = Pmode;
8580
8581 /* Invalid arguments. Bail before doing anything stoopid! */
8582 if (arg0 == error_mark_node
8583 || arg1 == error_mark_node
8584 || arg2 == error_mark_node)
8585 return const0_rtx;
8586
8587 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8588 op0 = copy_to_mode_reg (tmode, op0);
8589
8590 op2 = copy_to_mode_reg (mode2, op2);
8591
8592 if (op1 == const0_rtx)
8593 {
8594 addr = gen_rtx_MEM (tmode, op2);
8595 }
8596 else
8597 {
8598 op1 = copy_to_mode_reg (mode1, op1);
8599 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8600 }
8601
8602 pat = GEN_FCN (icode) (addr, op0);
8603 if (pat)
8604 emit_insn (pat);
8605 return NULL_RTX;
8606}
8607
6525c0e7 8608static rtx
5039610b 8609altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8610{
5039610b
SL
8611 tree arg0 = CALL_EXPR_ARG (exp, 0);
8612 tree arg1 = CALL_EXPR_ARG (exp, 1);
8613 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8614 rtx op0 = expand_normal (arg0);
8615 rtx op1 = expand_normal (arg1);
8616 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8617 rtx pat, addr;
8618 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8619 enum machine_mode mode1 = Pmode;
8620 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8621
8622 /* Invalid arguments. Bail before doing anything stoopid! */
8623 if (arg0 == error_mark_node
8624 || arg1 == error_mark_node
8625 || arg2 == error_mark_node)
9a171fcd 8626 return const0_rtx;
6525c0e7 8627
b4a62fa0
SB
8628 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8629 op0 = copy_to_mode_reg (tmode, op0);
8630
f676971a 8631 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8632
8633 if (op1 == const0_rtx)
8634 {
8635 addr = gen_rtx_MEM (tmode, op2);
8636 }
8637 else
8638 {
8639 op1 = copy_to_mode_reg (mode1, op1);
8640 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8641 }
6525c0e7 8642
b4a62fa0 8643 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8644 if (pat)
8645 emit_insn (pat);
8646 return NULL_RTX;
8647}
8648
2212663f 8649static rtx
5039610b 8650rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8651{
8652 rtx pat;
5039610b
SL
8653 tree arg0 = CALL_EXPR_ARG (exp, 0);
8654 tree arg1 = CALL_EXPR_ARG (exp, 1);
8655 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8656 rtx op0 = expand_normal (arg0);
8657 rtx op1 = expand_normal (arg1);
8658 rtx op2 = expand_normal (arg2);
2212663f
DB
8659 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8660 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8661 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8662 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8663
774b5662
DE
8664 if (icode == CODE_FOR_nothing)
8665 /* Builtin not supported on this processor. */
8666 return 0;
8667
20e26713
AH
8668 /* If we got invalid arguments bail out before generating bad rtl. */
8669 if (arg0 == error_mark_node
8670 || arg1 == error_mark_node
8671 || arg2 == error_mark_node)
9a171fcd 8672 return const0_rtx;
20e26713 8673
aba5fb01
NS
8674 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8675 || icode == CODE_FOR_altivec_vsldoi_v4si
8676 || icode == CODE_FOR_altivec_vsldoi_v8hi
8677 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8678 {
8679 /* Only allow 4-bit unsigned literals. */
8bb418a3 8680 STRIP_NOPS (arg2);
b44140e7
AH
8681 if (TREE_CODE (arg2) != INTEGER_CST
8682 || TREE_INT_CST_LOW (arg2) & ~0xf)
8683 {
8684 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8685 return const0_rtx;
b44140e7 8686 }
b44140e7
AH
8687 }
8688
c62f2db5 8689 if (target == 0
2212663f
DB
8690 || GET_MODE (target) != tmode
8691 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8692 target = gen_reg_rtx (tmode);
8693
8694 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8695 op0 = copy_to_mode_reg (mode0, op0);
8696 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8697 op1 = copy_to_mode_reg (mode1, op1);
8698 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8699 op2 = copy_to_mode_reg (mode2, op2);
8700
49e39588
RE
8701 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8702 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8703 else
8704 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8705 if (! pat)
8706 return 0;
8707 emit_insn (pat);
8708
8709 return target;
8710}
92898235 8711
3a9b8c7e 8712/* Expand the lvx builtins. */
0ac081f6 8713static rtx
a2369ed3 8714altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8715{
5039610b 8716 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8717 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8718 tree arg0;
8719 enum machine_mode tmode, mode0;
7c3abc73 8720 rtx pat, op0;
3a9b8c7e 8721 enum insn_code icode;
92898235 8722
0ac081f6
AH
8723 switch (fcode)
8724 {
f18c054f 8725 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8726 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8727 break;
f18c054f 8728 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8729 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8730 break;
8731 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8732 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8733 break;
8734 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8735 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8736 break;
8737 default:
8738 *expandedp = false;
8739 return NULL_RTX;
8740 }
0ac081f6 8741
3a9b8c7e 8742 *expandedp = true;
f18c054f 8743
5039610b 8744 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8745 op0 = expand_normal (arg0);
3a9b8c7e
AH
8746 tmode = insn_data[icode].operand[0].mode;
8747 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8748
3a9b8c7e
AH
8749 if (target == 0
8750 || GET_MODE (target) != tmode
8751 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8752 target = gen_reg_rtx (tmode);
24408032 8753
3a9b8c7e
AH
8754 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8755 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8756
3a9b8c7e
AH
8757 pat = GEN_FCN (icode) (target, op0);
8758 if (! pat)
8759 return 0;
8760 emit_insn (pat);
8761 return target;
8762}
f18c054f 8763
3a9b8c7e
AH
8764/* Expand the stvx builtins. */
8765static rtx
f676971a 8766altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8767 bool *expandedp)
3a9b8c7e 8768{
5039610b 8769 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8770 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8771 tree arg0, arg1;
8772 enum machine_mode mode0, mode1;
7c3abc73 8773 rtx pat, op0, op1;
3a9b8c7e 8774 enum insn_code icode;
f18c054f 8775
3a9b8c7e
AH
8776 switch (fcode)
8777 {
8778 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8779 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8780 break;
8781 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8782 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8783 break;
8784 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8785 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8786 break;
8787 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8788 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8789 break;
8790 default:
8791 *expandedp = false;
8792 return NULL_RTX;
8793 }
24408032 8794
5039610b
SL
8795 arg0 = CALL_EXPR_ARG (exp, 0);
8796 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8797 op0 = expand_normal (arg0);
8798 op1 = expand_normal (arg1);
3a9b8c7e
AH
8799 mode0 = insn_data[icode].operand[0].mode;
8800 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8801
3a9b8c7e
AH
8802 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8803 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8804 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8805 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8806
3a9b8c7e
AH
8807 pat = GEN_FCN (icode) (op0, op1);
8808 if (pat)
8809 emit_insn (pat);
f18c054f 8810
3a9b8c7e
AH
8811 *expandedp = true;
8812 return NULL_RTX;
8813}
f18c054f 8814
3a9b8c7e
AH
8815/* Expand the dst builtins. */
8816static rtx
f676971a 8817altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8818 bool *expandedp)
3a9b8c7e 8819{
5039610b 8820 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8821 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8822 tree arg0, arg1, arg2;
8823 enum machine_mode mode0, mode1, mode2;
7c3abc73 8824 rtx pat, op0, op1, op2;
586de218 8825 const struct builtin_description *d;
a3170dc6 8826 size_t i;
f18c054f 8827
3a9b8c7e 8828 *expandedp = false;
f18c054f 8829
3a9b8c7e 8830 /* Handle DST variants. */
586de218 8831 d = bdesc_dst;
3a9b8c7e
AH
8832 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8833 if (d->code == fcode)
8834 {
5039610b
SL
8835 arg0 = CALL_EXPR_ARG (exp, 0);
8836 arg1 = CALL_EXPR_ARG (exp, 1);
8837 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8838 op0 = expand_normal (arg0);
8839 op1 = expand_normal (arg1);
8840 op2 = expand_normal (arg2);
3a9b8c7e
AH
8841 mode0 = insn_data[d->icode].operand[0].mode;
8842 mode1 = insn_data[d->icode].operand[1].mode;
8843 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8844
3a9b8c7e
AH
8845 /* Invalid arguments, bail out before generating bad rtl. */
8846 if (arg0 == error_mark_node
8847 || arg1 == error_mark_node
8848 || arg2 == error_mark_node)
8849 return const0_rtx;
f18c054f 8850
86e7df90 8851 *expandedp = true;
8bb418a3 8852 STRIP_NOPS (arg2);
3a9b8c7e
AH
8853 if (TREE_CODE (arg2) != INTEGER_CST
8854 || TREE_INT_CST_LOW (arg2) & ~0x3)
8855 {
9e637a26 8856 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8857 return const0_rtx;
8858 }
f18c054f 8859
3a9b8c7e 8860 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8861 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8862 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8863 op1 = copy_to_mode_reg (mode1, op1);
24408032 8864
3a9b8c7e
AH
8865 pat = GEN_FCN (d->icode) (op0, op1, op2);
8866 if (pat != 0)
8867 emit_insn (pat);
f18c054f 8868
3a9b8c7e
AH
8869 return NULL_RTX;
8870 }
f18c054f 8871
3a9b8c7e
AH
8872 return NULL_RTX;
8873}
24408032 8874
7a4eca66
DE
8875/* Expand vec_init builtin. */
8876static rtx
5039610b 8877altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8878{
8879 enum machine_mode tmode = TYPE_MODE (type);
8880 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8881 int i, n_elt = GET_MODE_NUNITS (tmode);
8882 rtvec v = rtvec_alloc (n_elt);
8883
8884 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8885 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8886
5039610b 8887 for (i = 0; i < n_elt; ++i)
7a4eca66 8888 {
5039610b 8889 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8890 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8891 }
8892
7a4eca66
DE
8893 if (!target || !register_operand (target, tmode))
8894 target = gen_reg_rtx (tmode);
8895
8896 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8897 return target;
8898}
8899
8900/* Return the integer constant in ARG. Constrain it to be in the range
8901 of the subparts of VEC_TYPE; issue an error if not. */
8902
8903static int
8904get_element_number (tree vec_type, tree arg)
8905{
8906 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8907
8908 if (!host_integerp (arg, 1)
8909 || (elt = tree_low_cst (arg, 1), elt > max))
8910 {
8911 error ("selector must be an integer constant in the range 0..%wi", max);
8912 return 0;
8913 }
8914
8915 return elt;
8916}
8917
8918/* Expand vec_set builtin. */
8919static rtx
5039610b 8920altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8921{
8922 enum machine_mode tmode, mode1;
8923 tree arg0, arg1, arg2;
8924 int elt;
8925 rtx op0, op1;
8926
5039610b
SL
8927 arg0 = CALL_EXPR_ARG (exp, 0);
8928 arg1 = CALL_EXPR_ARG (exp, 1);
8929 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8930
8931 tmode = TYPE_MODE (TREE_TYPE (arg0));
8932 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8933 gcc_assert (VECTOR_MODE_P (tmode));
8934
bbbbb16a
ILT
8935 op0 = expand_expr (arg0, NULL_RTX, tmode, EXPAND_NORMAL);
8936 op1 = expand_expr (arg1, NULL_RTX, mode1, EXPAND_NORMAL);
7a4eca66
DE
8937 elt = get_element_number (TREE_TYPE (arg0), arg2);
8938
8939 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8940 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8941
8942 op0 = force_reg (tmode, op0);
8943 op1 = force_reg (mode1, op1);
8944
8945 rs6000_expand_vector_set (op0, op1, elt);
8946
8947 return op0;
8948}
8949
8950/* Expand vec_ext builtin. */
8951static rtx
5039610b 8952altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8953{
8954 enum machine_mode tmode, mode0;
8955 tree arg0, arg1;
8956 int elt;
8957 rtx op0;
8958
5039610b
SL
8959 arg0 = CALL_EXPR_ARG (exp, 0);
8960 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8961
84217346 8962 op0 = expand_normal (arg0);
7a4eca66
DE
8963 elt = get_element_number (TREE_TYPE (arg0), arg1);
8964
8965 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8966 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8967 gcc_assert (VECTOR_MODE_P (mode0));
8968
8969 op0 = force_reg (mode0, op0);
8970
8971 if (optimize || !target || !register_operand (target, tmode))
8972 target = gen_reg_rtx (tmode);
8973
8974 rs6000_expand_vector_extract (target, op0, elt);
8975
8976 return target;
8977}
8978
3a9b8c7e
AH
8979/* Expand the builtin in EXP and store the result in TARGET. Store
8980 true in *EXPANDEDP if we found a builtin to expand. */
8981static rtx
a2369ed3 8982altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8983{
586de218
KG
8984 const struct builtin_description *d;
8985 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8986 size_t i;
8987 enum insn_code icode;
5039610b 8988 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8989 tree arg0;
8990 rtx op0, pat;
8991 enum machine_mode tmode, mode0;
3a9b8c7e 8992 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8993
58646b77
PB
8994 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8995 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8996 {
8997 *expandedp = true;
ea40ba9c 8998 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8999 return const0_rtx;
9000 }
9001
3a9b8c7e
AH
9002 target = altivec_expand_ld_builtin (exp, target, expandedp);
9003 if (*expandedp)
9004 return target;
0ac081f6 9005
3a9b8c7e
AH
9006 target = altivec_expand_st_builtin (exp, target, expandedp);
9007 if (*expandedp)
9008 return target;
9009
9010 target = altivec_expand_dst_builtin (exp, target, expandedp);
9011 if (*expandedp)
9012 return target;
9013
9014 *expandedp = true;
95385cbb 9015
3a9b8c7e
AH
9016 switch (fcode)
9017 {
6525c0e7 9018 case ALTIVEC_BUILTIN_STVX:
5039610b 9019 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 9020 case ALTIVEC_BUILTIN_STVEBX:
5039610b 9021 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 9022 case ALTIVEC_BUILTIN_STVEHX:
5039610b 9023 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 9024 case ALTIVEC_BUILTIN_STVEWX:
5039610b 9025 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 9026 case ALTIVEC_BUILTIN_STVXL:
5039610b 9027 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 9028
0b61703c
AP
9029 case ALTIVEC_BUILTIN_STVLX:
9030 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlx, exp);
9031 case ALTIVEC_BUILTIN_STVLXL:
9032 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
9033 case ALTIVEC_BUILTIN_STVRX:
9034 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
9035 case ALTIVEC_BUILTIN_STVRXL:
9036 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
9037
95385cbb
AH
9038 case ALTIVEC_BUILTIN_MFVSCR:
9039 icode = CODE_FOR_altivec_mfvscr;
9040 tmode = insn_data[icode].operand[0].mode;
9041
9042 if (target == 0
9043 || GET_MODE (target) != tmode
9044 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9045 target = gen_reg_rtx (tmode);
f676971a 9046
95385cbb 9047 pat = GEN_FCN (icode) (target);
0ac081f6
AH
9048 if (! pat)
9049 return 0;
9050 emit_insn (pat);
95385cbb
AH
9051 return target;
9052
9053 case ALTIVEC_BUILTIN_MTVSCR:
9054 icode = CODE_FOR_altivec_mtvscr;
5039610b 9055 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 9056 op0 = expand_normal (arg0);
95385cbb
AH
9057 mode0 = insn_data[icode].operand[0].mode;
9058
9059 /* If we got invalid arguments bail out before generating bad rtl. */
9060 if (arg0 == error_mark_node)
9a171fcd 9061 return const0_rtx;
95385cbb
AH
9062
9063 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
9064 op0 = copy_to_mode_reg (mode0, op0);
9065
9066 pat = GEN_FCN (icode) (op0);
9067 if (pat)
9068 emit_insn (pat);
9069 return NULL_RTX;
3a9b8c7e 9070
95385cbb
AH
9071 case ALTIVEC_BUILTIN_DSSALL:
9072 emit_insn (gen_altivec_dssall ());
9073 return NULL_RTX;
9074
9075 case ALTIVEC_BUILTIN_DSS:
9076 icode = CODE_FOR_altivec_dss;
5039610b 9077 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 9078 STRIP_NOPS (arg0);
84217346 9079 op0 = expand_normal (arg0);
95385cbb
AH
9080 mode0 = insn_data[icode].operand[0].mode;
9081
9082 /* If we got invalid arguments bail out before generating bad rtl. */
9083 if (arg0 == error_mark_node)
9a171fcd 9084 return const0_rtx;
95385cbb 9085
b44140e7
AH
9086 if (TREE_CODE (arg0) != INTEGER_CST
9087 || TREE_INT_CST_LOW (arg0) & ~0x3)
9088 {
9089 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 9090 return const0_rtx;
b44140e7
AH
9091 }
9092
95385cbb
AH
9093 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
9094 op0 = copy_to_mode_reg (mode0, op0);
9095
9096 emit_insn (gen_altivec_dss (op0));
0ac081f6 9097 return NULL_RTX;
7a4eca66
DE
9098
9099 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
9100 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
9101 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
9102 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 9103 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
9104
9105 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
9106 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
9107 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
9108 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 9109 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
9110
9111 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
9112 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
9113 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
9114 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 9115 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
9116
9117 default:
9118 break;
9119 /* Fall through. */
0ac081f6 9120 }
24408032 9121
100c4561 9122 /* Expand abs* operations. */
586de218 9123 d = bdesc_abs;
ca7558fc 9124 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 9125 if (d->code == fcode)
5039610b 9126 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 9127
ae4b4a02 9128 /* Expand the AltiVec predicates. */
586de218 9129 dp = bdesc_altivec_preds;
ca7558fc 9130 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 9131 if (dp->code == fcode)
c4ad648e 9132 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 9133 exp, target);
ae4b4a02 9134
6525c0e7
AH
9135 /* LV* are funky. We initialized them differently. */
9136 switch (fcode)
9137 {
9138 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 9139 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
0b61703c 9140 exp, target, false);
6525c0e7 9141 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 9142 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
0b61703c 9143 exp, target, false);
6525c0e7 9144 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 9145 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
0b61703c 9146 exp, target, false);
6525c0e7 9147 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 9148 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
0b61703c 9149 exp, target, false);
6525c0e7 9150 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 9151 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
0b61703c 9152 exp, target, false);
6525c0e7 9153 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 9154 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
0b61703c 9155 exp, target, false);
6525c0e7 9156 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 9157 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
0b61703c
AP
9158 exp, target, false);
9159 case ALTIVEC_BUILTIN_LVLX:
9160 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
9161 exp, target, true);
9162 case ALTIVEC_BUILTIN_LVLXL:
9163 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlxl,
9164 exp, target, true);
9165 case ALTIVEC_BUILTIN_LVRX:
9166 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
9167 exp, target, true);
9168 case ALTIVEC_BUILTIN_LVRXL:
9169 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
9170 exp, target, true);
6525c0e7
AH
9171 default:
9172 break;
9173 /* Fall through. */
9174 }
95385cbb 9175
92898235 9176 *expandedp = false;
0ac081f6
AH
9177 return NULL_RTX;
9178}
9179
96038623
DE
9180/* Expand the builtin in EXP and store the result in TARGET. Store
9181 true in *EXPANDEDP if we found a builtin to expand. */
9182static rtx
9183paired_expand_builtin (tree exp, rtx target, bool * expandedp)
9184{
9185 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9186 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 9187 const struct builtin_description *d;
96038623
DE
9188 size_t i;
9189
9190 *expandedp = true;
9191
9192 switch (fcode)
9193 {
9194 case PAIRED_BUILTIN_STX:
9195 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
9196 case PAIRED_BUILTIN_LX:
9197 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
9198 default:
9199 break;
9200 /* Fall through. */
9201 }
9202
9203 /* Expand the paired predicates. */
23a651fc 9204 d = bdesc_paired_preds;
96038623
DE
9205 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
9206 if (d->code == fcode)
9207 return paired_expand_predicate_builtin (d->icode, exp, target);
9208
9209 *expandedp = false;
9210 return NULL_RTX;
9211}
9212
a3170dc6
AH
9213/* Binops that need to be initialized manually, but can be expanded
9214 automagically by rs6000_expand_binop_builtin. */
9215static struct builtin_description bdesc_2arg_spe[] =
9216{
9217 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
9218 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
9219 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
9220 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
9221 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
9222 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
9223 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
9224 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
9225 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
9226 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
9227 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
9228 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
9229 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
9230 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
9231 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
9232 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
9233 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
9234 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
9235 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
9236 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
9237 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
9238 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
9239};
9240
9241/* Expand the builtin in EXP and store the result in TARGET. Store
9242 true in *EXPANDEDP if we found a builtin to expand.
9243
9244 This expands the SPE builtins that are not simple unary and binary
9245 operations. */
9246static rtx
a2369ed3 9247spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 9248{
5039610b 9249 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
9250 tree arg1, arg0;
9251 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
9252 enum insn_code icode;
9253 enum machine_mode tmode, mode0;
9254 rtx pat, op0;
9255 struct builtin_description *d;
9256 size_t i;
9257
9258 *expandedp = true;
9259
9260 /* Syntax check for a 5-bit unsigned immediate. */
9261 switch (fcode)
9262 {
9263 case SPE_BUILTIN_EVSTDD:
9264 case SPE_BUILTIN_EVSTDH:
9265 case SPE_BUILTIN_EVSTDW:
9266 case SPE_BUILTIN_EVSTWHE:
9267 case SPE_BUILTIN_EVSTWHO:
9268 case SPE_BUILTIN_EVSTWWE:
9269 case SPE_BUILTIN_EVSTWWO:
5039610b 9270 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
9271 if (TREE_CODE (arg1) != INTEGER_CST
9272 || TREE_INT_CST_LOW (arg1) & ~0x1f)
9273 {
9274 error ("argument 2 must be a 5-bit unsigned literal");
9275 return const0_rtx;
9276 }
9277 break;
9278 default:
9279 break;
9280 }
9281
00332c9f
AH
9282 /* The evsplat*i instructions are not quite generic. */
9283 switch (fcode)
9284 {
9285 case SPE_BUILTIN_EVSPLATFI:
9286 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 9287 exp, target);
00332c9f
AH
9288 case SPE_BUILTIN_EVSPLATI:
9289 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 9290 exp, target);
00332c9f
AH
9291 default:
9292 break;
9293 }
9294
a3170dc6
AH
9295 d = (struct builtin_description *) bdesc_2arg_spe;
9296 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
9297 if (d->code == fcode)
5039610b 9298 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
9299
9300 d = (struct builtin_description *) bdesc_spe_predicates;
9301 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
9302 if (d->code == fcode)
5039610b 9303 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
9304
9305 d = (struct builtin_description *) bdesc_spe_evsel;
9306 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
9307 if (d->code == fcode)
5039610b 9308 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
9309
9310 switch (fcode)
9311 {
9312 case SPE_BUILTIN_EVSTDDX:
5039610b 9313 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 9314 case SPE_BUILTIN_EVSTDHX:
5039610b 9315 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 9316 case SPE_BUILTIN_EVSTDWX:
5039610b 9317 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 9318 case SPE_BUILTIN_EVSTWHEX:
5039610b 9319 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 9320 case SPE_BUILTIN_EVSTWHOX:
5039610b 9321 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 9322 case SPE_BUILTIN_EVSTWWEX:
5039610b 9323 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 9324 case SPE_BUILTIN_EVSTWWOX:
5039610b 9325 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 9326 case SPE_BUILTIN_EVSTDD:
5039610b 9327 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 9328 case SPE_BUILTIN_EVSTDH:
5039610b 9329 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 9330 case SPE_BUILTIN_EVSTDW:
5039610b 9331 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 9332 case SPE_BUILTIN_EVSTWHE:
5039610b 9333 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 9334 case SPE_BUILTIN_EVSTWHO:
5039610b 9335 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 9336 case SPE_BUILTIN_EVSTWWE:
5039610b 9337 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 9338 case SPE_BUILTIN_EVSTWWO:
5039610b 9339 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
9340 case SPE_BUILTIN_MFSPEFSCR:
9341 icode = CODE_FOR_spe_mfspefscr;
9342 tmode = insn_data[icode].operand[0].mode;
9343
9344 if (target == 0
9345 || GET_MODE (target) != tmode
9346 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9347 target = gen_reg_rtx (tmode);
f676971a 9348
a3170dc6
AH
9349 pat = GEN_FCN (icode) (target);
9350 if (! pat)
9351 return 0;
9352 emit_insn (pat);
9353 return target;
9354 case SPE_BUILTIN_MTSPEFSCR:
9355 icode = CODE_FOR_spe_mtspefscr;
5039610b 9356 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 9357 op0 = expand_normal (arg0);
a3170dc6
AH
9358 mode0 = insn_data[icode].operand[0].mode;
9359
9360 if (arg0 == error_mark_node)
9361 return const0_rtx;
9362
9363 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
9364 op0 = copy_to_mode_reg (mode0, op0);
9365
9366 pat = GEN_FCN (icode) (op0);
9367 if (pat)
9368 emit_insn (pat);
9369 return NULL_RTX;
9370 default:
9371 break;
9372 }
9373
9374 *expandedp = false;
9375 return NULL_RTX;
9376}
9377
96038623
DE
9378static rtx
9379paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
9380{
9381 rtx pat, scratch, tmp;
9382 tree form = CALL_EXPR_ARG (exp, 0);
9383 tree arg0 = CALL_EXPR_ARG (exp, 1);
9384 tree arg1 = CALL_EXPR_ARG (exp, 2);
9385 rtx op0 = expand_normal (arg0);
9386 rtx op1 = expand_normal (arg1);
9387 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9388 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9389 int form_int;
9390 enum rtx_code code;
9391
9392 if (TREE_CODE (form) != INTEGER_CST)
9393 {
9394 error ("argument 1 of __builtin_paired_predicate must be a constant");
9395 return const0_rtx;
9396 }
9397 else
9398 form_int = TREE_INT_CST_LOW (form);
9399
9400 gcc_assert (mode0 == mode1);
9401
9402 if (arg0 == error_mark_node || arg1 == error_mark_node)
9403 return const0_rtx;
9404
9405 if (target == 0
9406 || GET_MODE (target) != SImode
9407 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
9408 target = gen_reg_rtx (SImode);
9409 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
9410 op0 = copy_to_mode_reg (mode0, op0);
9411 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
9412 op1 = copy_to_mode_reg (mode1, op1);
9413
9414 scratch = gen_reg_rtx (CCFPmode);
9415
9416 pat = GEN_FCN (icode) (scratch, op0, op1);
9417 if (!pat)
9418 return const0_rtx;
9419
9420 emit_insn (pat);
9421
9422 switch (form_int)
9423 {
9424 /* LT bit. */
9425 case 0:
9426 code = LT;
9427 break;
9428 /* GT bit. */
9429 case 1:
9430 code = GT;
9431 break;
9432 /* EQ bit. */
9433 case 2:
9434 code = EQ;
9435 break;
9436 /* UN bit. */
9437 case 3:
9438 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9439 return target;
9440 default:
9441 error ("argument 1 of __builtin_paired_predicate is out of range");
9442 return const0_rtx;
9443 }
9444
9445 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9446 emit_move_insn (target, tmp);
9447 return target;
9448}
9449
a3170dc6 9450static rtx
5039610b 9451spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9452{
9453 rtx pat, scratch, tmp;
5039610b
SL
9454 tree form = CALL_EXPR_ARG (exp, 0);
9455 tree arg0 = CALL_EXPR_ARG (exp, 1);
9456 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
9457 rtx op0 = expand_normal (arg0);
9458 rtx op1 = expand_normal (arg1);
a3170dc6
AH
9459 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9460 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9461 int form_int;
9462 enum rtx_code code;
9463
9464 if (TREE_CODE (form) != INTEGER_CST)
9465 {
9466 error ("argument 1 of __builtin_spe_predicate must be a constant");
9467 return const0_rtx;
9468 }
9469 else
9470 form_int = TREE_INT_CST_LOW (form);
9471
37409796 9472 gcc_assert (mode0 == mode1);
a3170dc6
AH
9473
9474 if (arg0 == error_mark_node || arg1 == error_mark_node)
9475 return const0_rtx;
9476
9477 if (target == 0
9478 || GET_MODE (target) != SImode
9479 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
9480 target = gen_reg_rtx (SImode);
9481
9482 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9483 op0 = copy_to_mode_reg (mode0, op0);
9484 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9485 op1 = copy_to_mode_reg (mode1, op1);
9486
9487 scratch = gen_reg_rtx (CCmode);
9488
9489 pat = GEN_FCN (icode) (scratch, op0, op1);
9490 if (! pat)
9491 return const0_rtx;
9492 emit_insn (pat);
9493
9494 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
9495 _lower_. We use one compare, but look in different bits of the
9496 CR for each variant.
9497
9498 There are 2 elements in each SPE simd type (upper/lower). The CR
9499 bits are set as follows:
9500
9501 BIT0 | BIT 1 | BIT 2 | BIT 3
9502 U | L | (U | L) | (U & L)
9503
9504 So, for an "all" relationship, BIT 3 would be set.
9505 For an "any" relationship, BIT 2 would be set. Etc.
9506
9507 Following traditional nomenclature, these bits map to:
9508
9509 BIT0 | BIT 1 | BIT 2 | BIT 3
9510 LT | GT | EQ | OV
9511
9512 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9513 */
9514
9515 switch (form_int)
9516 {
9517 /* All variant. OV bit. */
9518 case 0:
9519 /* We need to get to the OV bit, which is the ORDERED bit. We
9520 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 9521 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
9522 So let's just use another pattern. */
9523 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9524 return target;
9525 /* Any variant. EQ bit. */
9526 case 1:
9527 code = EQ;
9528 break;
9529 /* Upper variant. LT bit. */
9530 case 2:
9531 code = LT;
9532 break;
9533 /* Lower variant. GT bit. */
9534 case 3:
9535 code = GT;
9536 break;
9537 default:
9538 error ("argument 1 of __builtin_spe_predicate is out of range");
9539 return const0_rtx;
9540 }
9541
9542 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9543 emit_move_insn (target, tmp);
9544
9545 return target;
9546}
9547
9548/* The evsel builtins look like this:
9549
9550 e = __builtin_spe_evsel_OP (a, b, c, d);
9551
9552 and work like this:
9553
9554 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9555 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9556*/
9557
9558static rtx
5039610b 9559spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9560{
9561 rtx pat, scratch;
5039610b
SL
9562 tree arg0 = CALL_EXPR_ARG (exp, 0);
9563 tree arg1 = CALL_EXPR_ARG (exp, 1);
9564 tree arg2 = CALL_EXPR_ARG (exp, 2);
9565 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
9566 rtx op0 = expand_normal (arg0);
9567 rtx op1 = expand_normal (arg1);
9568 rtx op2 = expand_normal (arg2);
9569 rtx op3 = expand_normal (arg3);
a3170dc6
AH
9570 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9571 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9572
37409796 9573 gcc_assert (mode0 == mode1);
a3170dc6
AH
9574
9575 if (arg0 == error_mark_node || arg1 == error_mark_node
9576 || arg2 == error_mark_node || arg3 == error_mark_node)
9577 return const0_rtx;
9578
9579 if (target == 0
9580 || GET_MODE (target) != mode0
9581 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9582 target = gen_reg_rtx (mode0);
9583
9584 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9585 op0 = copy_to_mode_reg (mode0, op0);
9586 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9587 op1 = copy_to_mode_reg (mode0, op1);
9588 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9589 op2 = copy_to_mode_reg (mode0, op2);
9590 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9591 op3 = copy_to_mode_reg (mode0, op3);
9592
9593 /* Generate the compare. */
9594 scratch = gen_reg_rtx (CCmode);
9595 pat = GEN_FCN (icode) (scratch, op0, op1);
9596 if (! pat)
9597 return const0_rtx;
9598 emit_insn (pat);
9599
9600 if (mode0 == V2SImode)
9601 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9602 else
9603 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9604
9605 return target;
9606}
9607
0ac081f6
AH
9608/* Expand an expression EXP that calls a built-in function,
9609 with result going to TARGET if that's convenient
9610 (and in mode MODE if that's convenient).
9611 SUBTARGET may be used as the target for computing one of EXP's operands.
9612 IGNORE is nonzero if the value is to be ignored. */
9613
9614static rtx
a2369ed3 9615rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9616 enum machine_mode mode ATTRIBUTE_UNUSED,
9617 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9618{
5039610b 9619 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9620 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9621 const struct builtin_description *d;
92898235
AH
9622 size_t i;
9623 rtx ret;
9624 bool success;
f676971a 9625
9c78b944
DE
9626 if (fcode == RS6000_BUILTIN_RECIP)
9627 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9628
9629 if (fcode == RS6000_BUILTIN_RECIPF)
9630 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9631
9632 if (fcode == RS6000_BUILTIN_RSQRTF)
9633 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9634
7ccf35ed
DN
9635 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9636 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9637 {
9638 int icode = (int) CODE_FOR_altivec_lvsr;
9639 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9640 enum machine_mode mode = insn_data[icode].operand[1].mode;
9641 tree arg;
9642 rtx op, addr, pat;
9643
37409796 9644 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9645
5039610b 9646 arg = CALL_EXPR_ARG (exp, 0);
37409796 9647 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9648 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9649 addr = memory_address (mode, op);
9650 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9651 op = addr;
9652 else
9653 {
9654 /* For the load case need to negate the address. */
9655 op = gen_reg_rtx (GET_MODE (addr));
9656 emit_insn (gen_rtx_SET (VOIDmode, op,
9657 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9658 }
7ccf35ed
DN
9659 op = gen_rtx_MEM (mode, op);
9660
9661 if (target == 0
9662 || GET_MODE (target) != tmode
9663 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9664 target = gen_reg_rtx (tmode);
9665
9666 /*pat = gen_altivec_lvsr (target, op);*/
9667 pat = GEN_FCN (icode) (target, op);
9668 if (!pat)
9669 return 0;
9670 emit_insn (pat);
9671
9672 return target;
9673 }
5039610b
SL
9674
9675 /* FIXME: There's got to be a nicer way to handle this case than
9676 constructing a new CALL_EXPR. */
f57d17f1 9677 if (fcode == ALTIVEC_BUILTIN_VCFUX
7910ae0c
DN
9678 || fcode == ALTIVEC_BUILTIN_VCFSX
9679 || fcode == ALTIVEC_BUILTIN_VCTUXS
9680 || fcode == ALTIVEC_BUILTIN_VCTSXS)
f57d17f1 9681 {
5039610b
SL
9682 if (call_expr_nargs (exp) == 1)
9683 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9684 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9685 }
7ccf35ed 9686
0ac081f6 9687 if (TARGET_ALTIVEC)
92898235
AH
9688 {
9689 ret = altivec_expand_builtin (exp, target, &success);
9690
a3170dc6
AH
9691 if (success)
9692 return ret;
9693 }
9694 if (TARGET_SPE)
9695 {
9696 ret = spe_expand_builtin (exp, target, &success);
9697
92898235
AH
9698 if (success)
9699 return ret;
9700 }
96038623
DE
9701 if (TARGET_PAIRED_FLOAT)
9702 {
9703 ret = paired_expand_builtin (exp, target, &success);
9704
9705 if (success)
9706 return ret;
9707 }
92898235 9708
cacf1ca8 9709 gcc_assert (TARGET_ALTIVEC || TARGET_VSX || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9710
37409796
NS
9711 /* Handle simple unary operations. */
9712 d = (struct builtin_description *) bdesc_1arg;
9713 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9714 if (d->code == fcode)
5039610b 9715 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9716
37409796
NS
9717 /* Handle simple binary operations. */
9718 d = (struct builtin_description *) bdesc_2arg;
9719 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9720 if (d->code == fcode)
5039610b 9721 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9722
37409796 9723 /* Handle simple ternary operations. */
586de218 9724 d = bdesc_3arg;
37409796
NS
9725 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9726 if (d->code == fcode)
5039610b 9727 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9728
37409796 9729 gcc_unreachable ();
0ac081f6
AH
9730}
9731
9732static void
863d938c 9733rs6000_init_builtins (void)
0ac081f6 9734{
5afaa917
NS
9735 tree tdecl;
9736
4a5eab38
PB
9737 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9738 V2SF_type_node = build_vector_type (float_type_node, 2);
9739 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9740 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9741 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9742 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9743 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9744
9745 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9746 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9747 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9748
7c62e993
PB
9749 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9750 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9751 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
b6fc2cdb 9752 opaque_V4SI_type_node = build_opaque_vector_type (intSI_type_node, 4);
3fdaa45a 9753
8bb418a3
ZL
9754 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9755 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9756 'vector unsigned short'. */
9757
8dd16ecc
NS
9758 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9759 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9760 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9761 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9762
58646b77
PB
9763 long_integer_type_internal_node = long_integer_type_node;
9764 long_unsigned_type_internal_node = long_unsigned_type_node;
9765 intQI_type_internal_node = intQI_type_node;
9766 uintQI_type_internal_node = unsigned_intQI_type_node;
9767 intHI_type_internal_node = intHI_type_node;
9768 uintHI_type_internal_node = unsigned_intHI_type_node;
9769 intSI_type_internal_node = intSI_type_node;
9770 uintSI_type_internal_node = unsigned_intSI_type_node;
9771 float_type_internal_node = float_type_node;
9772 void_type_internal_node = void_type_node;
9773
71856685
AH
9774 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9775 get_identifier ("__bool char"),
5afaa917
NS
9776 bool_char_type_node);
9777 TYPE_NAME (bool_char_type_node) = tdecl;
9778 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9779 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9780 get_identifier ("__bool short"),
5afaa917
NS
9781 bool_short_type_node);
9782 TYPE_NAME (bool_short_type_node) = tdecl;
9783 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9784 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9785 get_identifier ("__bool int"),
5afaa917
NS
9786 bool_int_type_node);
9787 TYPE_NAME (bool_int_type_node) = tdecl;
9788 (*lang_hooks.decls.pushdecl) (tdecl);
71856685 9789 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("__pixel"),
5afaa917
NS
9790 pixel_type_node);
9791 TYPE_NAME (pixel_type_node) = tdecl;
9792 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9793
4a5eab38
PB
9794 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9795 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9796 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9797 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3 9798
71856685
AH
9799 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9800 get_identifier ("__vector unsigned char"),
5afaa917
NS
9801 unsigned_V16QI_type_node);
9802 TYPE_NAME (unsigned_V16QI_type_node) = tdecl;
9803 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9804 tdecl = build_decl (BUILTINS_LOCATION,
9805 TYPE_DECL, get_identifier ("__vector signed char"),
5afaa917
NS
9806 V16QI_type_node);
9807 TYPE_NAME (V16QI_type_node) = tdecl;
9808 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9809 tdecl = build_decl (BUILTINS_LOCATION,
9810 TYPE_DECL, get_identifier ("__vector __bool char"),
5afaa917
NS
9811 bool_V16QI_type_node);
9812 TYPE_NAME ( bool_V16QI_type_node) = tdecl;
9813 (*lang_hooks.decls.pushdecl) (tdecl);
9814
71856685
AH
9815 tdecl = build_decl (BUILTINS_LOCATION,
9816 TYPE_DECL, get_identifier ("__vector unsigned short"),
5afaa917
NS
9817 unsigned_V8HI_type_node);
9818 TYPE_NAME (unsigned_V8HI_type_node) = tdecl;
9819 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9820 tdecl = build_decl (BUILTINS_LOCATION,
9821 TYPE_DECL, get_identifier ("__vector signed short"),
5afaa917
NS
9822 V8HI_type_node);
9823 TYPE_NAME (V8HI_type_node) = tdecl;
9824 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9825 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9826 get_identifier ("__vector __bool short"),
5afaa917
NS
9827 bool_V8HI_type_node);
9828 TYPE_NAME (bool_V8HI_type_node) = tdecl;
9829 (*lang_hooks.decls.pushdecl) (tdecl);
9830
71856685
AH
9831 tdecl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
9832 get_identifier ("__vector unsigned int"),
5afaa917
NS
9833 unsigned_V4SI_type_node);
9834 TYPE_NAME (unsigned_V4SI_type_node) = tdecl;
9835 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9836 tdecl = build_decl (BUILTINS_LOCATION,
9837 TYPE_DECL, get_identifier ("__vector signed int"),
5afaa917
NS
9838 V4SI_type_node);
9839 TYPE_NAME (V4SI_type_node) = tdecl;
9840 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9841 tdecl = build_decl (BUILTINS_LOCATION,
9842 TYPE_DECL, get_identifier ("__vector __bool int"),
5afaa917
NS
9843 bool_V4SI_type_node);
9844 TYPE_NAME (bool_V4SI_type_node) = tdecl;
9845 (*lang_hooks.decls.pushdecl) (tdecl);
9846
71856685
AH
9847 tdecl = build_decl (BUILTINS_LOCATION,
9848 TYPE_DECL, get_identifier ("__vector float"),
5afaa917
NS
9849 V4SF_type_node);
9850 TYPE_NAME (V4SF_type_node) = tdecl;
9851 (*lang_hooks.decls.pushdecl) (tdecl);
71856685
AH
9852 tdecl = build_decl (BUILTINS_LOCATION,
9853 TYPE_DECL, get_identifier ("__vector __pixel"),
5afaa917
NS
9854 pixel_V8HI_type_node);
9855 TYPE_NAME (pixel_V8HI_type_node) = tdecl;
9856 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9857
96038623
DE
9858 if (TARGET_PAIRED_FLOAT)
9859 paired_init_builtins ();
a3170dc6 9860 if (TARGET_SPE)
3fdaa45a 9861 spe_init_builtins ();
0ac081f6
AH
9862 if (TARGET_ALTIVEC)
9863 altivec_init_builtins ();
96038623 9864 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9865 rs6000_common_init_builtins ();
9c78b944
DE
9866 if (TARGET_PPC_GFXOPT)
9867 {
9868 tree ftype = build_function_type_list (float_type_node,
9869 float_type_node,
9870 float_type_node,
9871 NULL_TREE);
9872 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9873 RS6000_BUILTIN_RECIPF);
9874
9875 ftype = build_function_type_list (float_type_node,
9876 float_type_node,
9877 NULL_TREE);
9878 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9879 RS6000_BUILTIN_RSQRTF);
9880 }
9881 if (TARGET_POPCNTB)
9882 {
9883 tree ftype = build_function_type_list (double_type_node,
9884 double_type_node,
9885 double_type_node,
9886 NULL_TREE);
9887 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9888 RS6000_BUILTIN_RECIP);
9889
9890 }
69ca3549
DE
9891
9892#if TARGET_XCOFF
9893 /* AIX libm provides clog as __clog. */
9894 if (built_in_decls [BUILT_IN_CLOG])
9895 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9896#endif
fb220235
FXC
9897
9898#ifdef SUBTARGET_INIT_BUILTINS
9899 SUBTARGET_INIT_BUILTINS;
9900#endif
0ac081f6
AH
9901}
9902
a3170dc6
AH
9903/* Search through a set of builtins and enable the mask bits.
9904 DESC is an array of builtins.
b6d08ca1 9905 SIZE is the total number of builtins.
a3170dc6
AH
9906 START is the builtin enum at which to start.
9907 END is the builtin enum at which to end. */
0ac081f6 9908static void
a2369ed3 9909enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9910 enum rs6000_builtins start,
a2369ed3 9911 enum rs6000_builtins end)
a3170dc6
AH
9912{
9913 int i;
9914
9915 for (i = 0; i < size; ++i)
9916 if (desc[i].code == start)
9917 break;
9918
9919 if (i == size)
9920 return;
9921
9922 for (; i < size; ++i)
9923 {
9924 /* Flip all the bits on. */
9925 desc[i].mask = target_flags;
9926 if (desc[i].code == end)
9927 break;
9928 }
9929}
9930
9931static void
863d938c 9932spe_init_builtins (void)
0ac081f6 9933{
a3170dc6
AH
9934 tree endlink = void_list_node;
9935 tree puint_type_node = build_pointer_type (unsigned_type_node);
9936 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9937 struct builtin_description *d;
0ac081f6
AH
9938 size_t i;
9939
a3170dc6
AH
9940 tree v2si_ftype_4_v2si
9941 = build_function_type
3fdaa45a
AH
9942 (opaque_V2SI_type_node,
9943 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9944 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9945 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9946 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9947 endlink)))));
9948
9949 tree v2sf_ftype_4_v2sf
9950 = build_function_type
3fdaa45a
AH
9951 (opaque_V2SF_type_node,
9952 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9953 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9954 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9955 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9956 endlink)))));
9957
9958 tree int_ftype_int_v2si_v2si
9959 = build_function_type
9960 (integer_type_node,
9961 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9962 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9963 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9964 endlink))));
9965
9966 tree int_ftype_int_v2sf_v2sf
9967 = build_function_type
9968 (integer_type_node,
9969 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9970 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9971 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9972 endlink))));
9973
9974 tree void_ftype_v2si_puint_int
9975 = build_function_type (void_type_node,
3fdaa45a 9976 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9977 tree_cons (NULL_TREE, puint_type_node,
9978 tree_cons (NULL_TREE,
9979 integer_type_node,
9980 endlink))));
9981
9982 tree void_ftype_v2si_puint_char
9983 = build_function_type (void_type_node,
3fdaa45a 9984 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9985 tree_cons (NULL_TREE, puint_type_node,
9986 tree_cons (NULL_TREE,
9987 char_type_node,
9988 endlink))));
9989
9990 tree void_ftype_v2si_pv2si_int
9991 = build_function_type (void_type_node,
3fdaa45a 9992 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9993 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9994 tree_cons (NULL_TREE,
9995 integer_type_node,
9996 endlink))));
9997
9998 tree void_ftype_v2si_pv2si_char
9999 = build_function_type (void_type_node,
3fdaa45a 10000 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 10001 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
10002 tree_cons (NULL_TREE,
10003 char_type_node,
10004 endlink))));
10005
10006 tree void_ftype_int
10007 = build_function_type (void_type_node,
10008 tree_cons (NULL_TREE, integer_type_node, endlink));
10009
10010 tree int_ftype_void
36e8d515 10011 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
10012
10013 tree v2si_ftype_pv2si_int
3fdaa45a 10014 = build_function_type (opaque_V2SI_type_node,
6035d635 10015 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
10016 tree_cons (NULL_TREE, integer_type_node,
10017 endlink)));
10018
10019 tree v2si_ftype_puint_int
3fdaa45a 10020 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
10021 tree_cons (NULL_TREE, puint_type_node,
10022 tree_cons (NULL_TREE, integer_type_node,
10023 endlink)));
10024
10025 tree v2si_ftype_pushort_int
3fdaa45a 10026 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
10027 tree_cons (NULL_TREE, pushort_type_node,
10028 tree_cons (NULL_TREE, integer_type_node,
10029 endlink)));
10030
00332c9f
AH
10031 tree v2si_ftype_signed_char
10032 = build_function_type (opaque_V2SI_type_node,
10033 tree_cons (NULL_TREE, signed_char_type_node,
10034 endlink));
10035
a3170dc6
AH
10036 /* The initialization of the simple binary and unary builtins is
10037 done in rs6000_common_init_builtins, but we have to enable the
10038 mask bits here manually because we have run out of `target_flags'
10039 bits. We really need to redesign this mask business. */
10040
10041 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
10042 ARRAY_SIZE (bdesc_2arg),
10043 SPE_BUILTIN_EVADDW,
10044 SPE_BUILTIN_EVXOR);
10045 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
10046 ARRAY_SIZE (bdesc_1arg),
10047 SPE_BUILTIN_EVABS,
10048 SPE_BUILTIN_EVSUBFUSIAAW);
10049 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
10050 ARRAY_SIZE (bdesc_spe_predicates),
10051 SPE_BUILTIN_EVCMPEQ,
10052 SPE_BUILTIN_EVFSTSTLT);
10053 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
10054 ARRAY_SIZE (bdesc_spe_evsel),
10055 SPE_BUILTIN_EVSEL_CMPGTS,
10056 SPE_BUILTIN_EVSEL_FSTSTEQ);
10057
36252949 10058 (*lang_hooks.decls.pushdecl)
71856685
AH
10059 (build_decl (BUILTINS_LOCATION, TYPE_DECL,
10060 get_identifier ("__ev64_opaque__"),
36252949
AH
10061 opaque_V2SI_type_node));
10062
a3170dc6 10063 /* Initialize irregular SPE builtins. */
f676971a 10064
a3170dc6
AH
10065 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
10066 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
10067 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
10068 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
10069 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
10070 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
10071 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
10072 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
10073 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
10074 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
10075 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
10076 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
10077 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
10078 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
10079 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
10080 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
10081 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
10082 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
10083
10084 /* Loads. */
10085 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
10086 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
10087 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
10088 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
10089 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
10090 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
10091 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
10092 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
10093 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
10094 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
10095 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
10096 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
10097 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
10098 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
10099 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
10100 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
10101 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
10102 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
10103 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
10104 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
10105 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
10106 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
10107
10108 /* Predicates. */
10109 d = (struct builtin_description *) bdesc_spe_predicates;
10110 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
10111 {
10112 tree type;
10113
10114 switch (insn_data[d->icode].operand[1].mode)
10115 {
10116 case V2SImode:
10117 type = int_ftype_int_v2si_v2si;
10118 break;
10119 case V2SFmode:
10120 type = int_ftype_int_v2sf_v2sf;
10121 break;
10122 default:
37409796 10123 gcc_unreachable ();
a3170dc6
AH
10124 }
10125
10126 def_builtin (d->mask, d->name, type, d->code);
10127 }
10128
10129 /* Evsel predicates. */
10130 d = (struct builtin_description *) bdesc_spe_evsel;
10131 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
10132 {
10133 tree type;
10134
10135 switch (insn_data[d->icode].operand[1].mode)
10136 {
10137 case V2SImode:
10138 type = v2si_ftype_4_v2si;
10139 break;
10140 case V2SFmode:
10141 type = v2sf_ftype_4_v2sf;
10142 break;
10143 default:
37409796 10144 gcc_unreachable ();
a3170dc6
AH
10145 }
10146
10147 def_builtin (d->mask, d->name, type, d->code);
10148 }
10149}
10150
96038623
DE
10151static void
10152paired_init_builtins (void)
10153{
23a651fc 10154 const struct builtin_description *d;
96038623
DE
10155 size_t i;
10156 tree endlink = void_list_node;
10157
10158 tree int_ftype_int_v2sf_v2sf
10159 = build_function_type
10160 (integer_type_node,
10161 tree_cons (NULL_TREE, integer_type_node,
10162 tree_cons (NULL_TREE, V2SF_type_node,
10163 tree_cons (NULL_TREE, V2SF_type_node,
10164 endlink))));
10165 tree pcfloat_type_node =
10166 build_pointer_type (build_qualified_type
10167 (float_type_node, TYPE_QUAL_CONST));
10168
10169 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
10170 long_integer_type_node,
10171 pcfloat_type_node,
10172 NULL_TREE);
10173 tree void_ftype_v2sf_long_pcfloat =
10174 build_function_type_list (void_type_node,
10175 V2SF_type_node,
10176 long_integer_type_node,
10177 pcfloat_type_node,
10178 NULL_TREE);
10179
10180
10181 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
10182 PAIRED_BUILTIN_LX);
10183
10184
10185 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
10186 PAIRED_BUILTIN_STX);
10187
10188 /* Predicates. */
23a651fc 10189 d = bdesc_paired_preds;
96038623
DE
10190 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
10191 {
10192 tree type;
10193
10194 switch (insn_data[d->icode].operand[1].mode)
10195 {
10196 case V2SFmode:
10197 type = int_ftype_int_v2sf_v2sf;
10198 break;
10199 default:
10200 gcc_unreachable ();
10201 }
10202
10203 def_builtin (d->mask, d->name, type, d->code);
10204 }
10205}
10206
a3170dc6 10207static void
863d938c 10208altivec_init_builtins (void)
a3170dc6 10209{
586de218
KG
10210 const struct builtin_description *d;
10211 const struct builtin_description_predicates *dp;
a3170dc6 10212 size_t i;
7a4eca66
DE
10213 tree ftype;
10214
a3170dc6
AH
10215 tree pfloat_type_node = build_pointer_type (float_type_node);
10216 tree pint_type_node = build_pointer_type (integer_type_node);
10217 tree pshort_type_node = build_pointer_type (short_integer_type_node);
10218 tree pchar_type_node = build_pointer_type (char_type_node);
10219
10220 tree pvoid_type_node = build_pointer_type (void_type_node);
10221
0dbc3651
ZW
10222 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
10223 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
10224 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
10225 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
10226
10227 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
10228
58646b77
PB
10229 tree int_ftype_opaque
10230 = build_function_type_list (integer_type_node,
10231 opaque_V4SI_type_node, NULL_TREE);
266b4890
AP
10232 tree opaque_ftype_opaque
10233 = build_function_type (integer_type_node,
10234 NULL_TREE);
58646b77
PB
10235 tree opaque_ftype_opaque_int
10236 = build_function_type_list (opaque_V4SI_type_node,
10237 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
10238 tree opaque_ftype_opaque_opaque_int
10239 = build_function_type_list (opaque_V4SI_type_node,
10240 opaque_V4SI_type_node, opaque_V4SI_type_node,
10241 integer_type_node, NULL_TREE);
10242 tree int_ftype_int_opaque_opaque
10243 = build_function_type_list (integer_type_node,
10244 integer_type_node, opaque_V4SI_type_node,
10245 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
10246 tree int_ftype_int_v4si_v4si
10247 = build_function_type_list (integer_type_node,
10248 integer_type_node, V4SI_type_node,
10249 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
10250 tree v4sf_ftype_pcfloat
10251 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 10252 tree void_ftype_pfloat_v4sf
b4de2f7d 10253 = build_function_type_list (void_type_node,
a3170dc6 10254 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
10255 tree v4si_ftype_pcint
10256 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
10257 tree void_ftype_pint_v4si
b4de2f7d
AH
10258 = build_function_type_list (void_type_node,
10259 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
10260 tree v8hi_ftype_pcshort
10261 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 10262 tree void_ftype_pshort_v8hi
b4de2f7d
AH
10263 = build_function_type_list (void_type_node,
10264 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
10265 tree v16qi_ftype_pcchar
10266 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 10267 tree void_ftype_pchar_v16qi
b4de2f7d
AH
10268 = build_function_type_list (void_type_node,
10269 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 10270 tree void_ftype_v4si
b4de2f7d 10271 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10272 tree v8hi_ftype_void
10273 = build_function_type (V8HI_type_node, void_list_node);
10274 tree void_ftype_void
10275 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
10276 tree void_ftype_int
10277 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 10278
58646b77
PB
10279 tree opaque_ftype_long_pcvoid
10280 = build_function_type_list (opaque_V4SI_type_node,
10281 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 10282 tree v16qi_ftype_long_pcvoid
a3170dc6 10283 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
10284 long_integer_type_node, pcvoid_type_node, NULL_TREE);
10285 tree v8hi_ftype_long_pcvoid
a3170dc6 10286 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
10287 long_integer_type_node, pcvoid_type_node, NULL_TREE);
10288 tree v4si_ftype_long_pcvoid
a3170dc6 10289 = build_function_type_list (V4SI_type_node,
b4a62fa0 10290 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 10291
58646b77
PB
10292 tree void_ftype_opaque_long_pvoid
10293 = build_function_type_list (void_type_node,
10294 opaque_V4SI_type_node, long_integer_type_node,
10295 pvoid_type_node, NULL_TREE);
b4a62fa0 10296 tree void_ftype_v4si_long_pvoid
b4de2f7d 10297 = build_function_type_list (void_type_node,
b4a62fa0 10298 V4SI_type_node, long_integer_type_node,
b4de2f7d 10299 pvoid_type_node, NULL_TREE);
b4a62fa0 10300 tree void_ftype_v16qi_long_pvoid
b4de2f7d 10301 = build_function_type_list (void_type_node,
b4a62fa0 10302 V16QI_type_node, long_integer_type_node,
b4de2f7d 10303 pvoid_type_node, NULL_TREE);
b4a62fa0 10304 tree void_ftype_v8hi_long_pvoid
b4de2f7d 10305 = build_function_type_list (void_type_node,
b4a62fa0 10306 V8HI_type_node, long_integer_type_node,
b4de2f7d 10307 pvoid_type_node, NULL_TREE);
a3170dc6
AH
10308 tree int_ftype_int_v8hi_v8hi
10309 = build_function_type_list (integer_type_node,
10310 integer_type_node, V8HI_type_node,
10311 V8HI_type_node, NULL_TREE);
10312 tree int_ftype_int_v16qi_v16qi
10313 = build_function_type_list (integer_type_node,
10314 integer_type_node, V16QI_type_node,
10315 V16QI_type_node, NULL_TREE);
10316 tree int_ftype_int_v4sf_v4sf
10317 = build_function_type_list (integer_type_node,
10318 integer_type_node, V4SF_type_node,
10319 V4SF_type_node, NULL_TREE);
10320 tree v4si_ftype_v4si
10321 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
10322 tree v8hi_ftype_v8hi
10323 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
10324 tree v16qi_ftype_v16qi
10325 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
10326 tree v4sf_ftype_v4sf
10327 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 10328 tree void_ftype_pcvoid_int_int
a3170dc6 10329 = build_function_type_list (void_type_node,
0dbc3651 10330 pcvoid_type_node, integer_type_node,
8bb418a3 10331 integer_type_node, NULL_TREE);
8bb418a3 10332
0dbc3651
ZW
10333 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
10334 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
10335 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
10336 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
10337 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
10338 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
10339 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
10340 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
10341 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
10342 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
10343 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
10344 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
10345 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
10346 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
10347 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
10348 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
10349 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
10350 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
10351 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 10352 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
10353 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
10354 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
10355 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
10356 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
10357 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
10358 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
10359 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
10360 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
10361 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
10362 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
10363 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
10364 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
10365 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
10366 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
10367 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
10368 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
10369 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
10370 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
10371 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
10372 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
10373 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
10374 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
10375 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
10376 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
10377 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
10378 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
10379
0b61703c
AP
10380 if (rs6000_cpu == PROCESSOR_CELL)
10381 {
10382 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
10383 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
10384 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
10385 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRXL);
10386
10387 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLX);
10388 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLXL);
10389 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRX);
10390 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRXL);
10391
10392 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLX);
10393 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLXL);
10394 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRX);
10395 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRXL);
10396
10397 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLX);
10398 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLXL);
10399 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRX);
10400 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRXL);
10401 }
58646b77 10402 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
266b4890
AP
10403 def_builtin (MASK_ALTIVEC, "__builtin_vec_splats", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_SPLATS);
10404 def_builtin (MASK_ALTIVEC, "__builtin_vec_promote", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_PROMOTE);
58646b77
PB
10405
10406 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
10407 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
266b4890
AP
10408 def_builtin (MASK_ALTIVEC, "__builtin_vec_extract", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_EXTRACT);
10409 def_builtin (MASK_ALTIVEC, "__builtin_vec_insert", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_INSERT);
58646b77
PB
10410 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
10411 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
10412 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
10413 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
10414 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
10415 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
10416 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
10417 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 10418
a3170dc6 10419 /* Add the DST variants. */
586de218 10420 d = bdesc_dst;
a3170dc6 10421 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 10422 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
10423
10424 /* Initialize the predicates. */
586de218 10425 dp = bdesc_altivec_preds;
a3170dc6
AH
10426 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
10427 {
10428 enum machine_mode mode1;
10429 tree type;
58646b77
PB
10430 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10431 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 10432
58646b77
PB
10433 if (is_overloaded)
10434 mode1 = VOIDmode;
10435 else
10436 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
10437
10438 switch (mode1)
10439 {
58646b77
PB
10440 case VOIDmode:
10441 type = int_ftype_int_opaque_opaque;
10442 break;
a3170dc6
AH
10443 case V4SImode:
10444 type = int_ftype_int_v4si_v4si;
10445 break;
10446 case V8HImode:
10447 type = int_ftype_int_v8hi_v8hi;
10448 break;
10449 case V16QImode:
10450 type = int_ftype_int_v16qi_v16qi;
10451 break;
10452 case V4SFmode:
10453 type = int_ftype_int_v4sf_v4sf;
10454 break;
10455 default:
37409796 10456 gcc_unreachable ();
a3170dc6 10457 }
f676971a 10458
a3170dc6
AH
10459 def_builtin (dp->mask, dp->name, type, dp->code);
10460 }
10461
10462 /* Initialize the abs* operators. */
586de218 10463 d = bdesc_abs;
a3170dc6
AH
10464 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
10465 {
10466 enum machine_mode mode0;
10467 tree type;
10468
10469 mode0 = insn_data[d->icode].operand[0].mode;
10470
10471 switch (mode0)
10472 {
10473 case V4SImode:
10474 type = v4si_ftype_v4si;
10475 break;
10476 case V8HImode:
10477 type = v8hi_ftype_v8hi;
10478 break;
10479 case V16QImode:
10480 type = v16qi_ftype_v16qi;
10481 break;
10482 case V4SFmode:
10483 type = v4sf_ftype_v4sf;
10484 break;
10485 default:
37409796 10486 gcc_unreachable ();
a3170dc6 10487 }
f676971a 10488
a3170dc6
AH
10489 def_builtin (d->mask, d->name, type, d->code);
10490 }
7ccf35ed 10491
13c62176
DN
10492 if (TARGET_ALTIVEC)
10493 {
10494 tree decl;
10495
10496 /* Initialize target builtin that implements
10497 targetm.vectorize.builtin_mask_for_load. */
10498
c79efc4d
RÁE
10499 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
10500 v16qi_ftype_long_pcvoid,
10501 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
10502 BUILT_IN_MD, NULL, NULL_TREE);
10503 TREE_READONLY (decl) = 1;
13c62176
DN
10504 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
10505 altivec_builtin_mask_for_load = decl;
13c62176 10506 }
7a4eca66
DE
10507
10508 /* Access to the vec_init patterns. */
10509 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
10510 integer_type_node, integer_type_node,
10511 integer_type_node, NULL_TREE);
10512 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
10513 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
10514
10515 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
10516 short_integer_type_node,
10517 short_integer_type_node,
10518 short_integer_type_node,
10519 short_integer_type_node,
10520 short_integer_type_node,
10521 short_integer_type_node,
10522 short_integer_type_node, NULL_TREE);
10523 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
10524 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
10525
10526 ftype = build_function_type_list (V16QI_type_node, char_type_node,
10527 char_type_node, char_type_node,
10528 char_type_node, char_type_node,
10529 char_type_node, char_type_node,
10530 char_type_node, char_type_node,
10531 char_type_node, char_type_node,
10532 char_type_node, char_type_node,
10533 char_type_node, char_type_node,
10534 char_type_node, NULL_TREE);
10535 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
10536 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
10537
10538 ftype = build_function_type_list (V4SF_type_node, float_type_node,
10539 float_type_node, float_type_node,
10540 float_type_node, NULL_TREE);
10541 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
10542 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
10543
10544 /* Access to the vec_set patterns. */
10545 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
10546 intSI_type_node,
10547 integer_type_node, NULL_TREE);
10548 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
10549 ALTIVEC_BUILTIN_VEC_SET_V4SI);
10550
10551 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10552 intHI_type_node,
10553 integer_type_node, NULL_TREE);
10554 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10555 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10556
10557 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10558 intQI_type_node,
10559 integer_type_node, NULL_TREE);
10560 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10561 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10562
10563 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10564 float_type_node,
10565 integer_type_node, NULL_TREE);
10566 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10567 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10568
10569 /* Access to the vec_extract patterns. */
10570 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10571 integer_type_node, NULL_TREE);
10572 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10573 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10574
10575 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10576 integer_type_node, NULL_TREE);
10577 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10578 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10579
10580 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10581 integer_type_node, NULL_TREE);
10582 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10583 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10584
10585 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10586 integer_type_node, NULL_TREE);
10587 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10588 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
10589}
10590
10591static void
863d938c 10592rs6000_common_init_builtins (void)
a3170dc6 10593{
586de218 10594 const struct builtin_description *d;
a3170dc6
AH
10595 size_t i;
10596
96038623
DE
10597 tree v2sf_ftype_v2sf_v2sf_v2sf
10598 = build_function_type_list (V2SF_type_node,
10599 V2SF_type_node, V2SF_type_node,
10600 V2SF_type_node, NULL_TREE);
10601
a3170dc6
AH
10602 tree v4sf_ftype_v4sf_v4sf_v16qi
10603 = build_function_type_list (V4SF_type_node,
10604 V4SF_type_node, V4SF_type_node,
10605 V16QI_type_node, NULL_TREE);
10606 tree v4si_ftype_v4si_v4si_v16qi
10607 = build_function_type_list (V4SI_type_node,
10608 V4SI_type_node, V4SI_type_node,
10609 V16QI_type_node, NULL_TREE);
10610 tree v8hi_ftype_v8hi_v8hi_v16qi
10611 = build_function_type_list (V8HI_type_node,
10612 V8HI_type_node, V8HI_type_node,
10613 V16QI_type_node, NULL_TREE);
10614 tree v16qi_ftype_v16qi_v16qi_v16qi
10615 = build_function_type_list (V16QI_type_node,
10616 V16QI_type_node, V16QI_type_node,
10617 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
10618 tree v4si_ftype_int
10619 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10620 tree v8hi_ftype_int
10621 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10622 tree v16qi_ftype_int
10623 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
10624 tree v8hi_ftype_v16qi
10625 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10626 tree v4sf_ftype_v4sf
10627 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10628
10629 tree v2si_ftype_v2si_v2si
2abe3e28
AH
10630 = build_function_type_list (opaque_V2SI_type_node,
10631 opaque_V2SI_type_node,
10632 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10633
96038623 10634 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10635 = build_function_type_list (opaque_V2SF_type_node,
10636 opaque_V2SF_type_node,
10637 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10638
96038623
DE
10639 tree v2sf_ftype_v2sf_v2sf
10640 = build_function_type_list (V2SF_type_node,
10641 V2SF_type_node,
10642 V2SF_type_node, NULL_TREE);
10643
10644
a3170dc6 10645 tree v2si_ftype_int_int
2abe3e28 10646 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10647 integer_type_node, integer_type_node,
10648 NULL_TREE);
10649
58646b77
PB
10650 tree opaque_ftype_opaque
10651 = build_function_type_list (opaque_V4SI_type_node,
10652 opaque_V4SI_type_node, NULL_TREE);
10653
a3170dc6 10654 tree v2si_ftype_v2si
2abe3e28
AH
10655 = build_function_type_list (opaque_V2SI_type_node,
10656 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10657
96038623 10658 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10659 = build_function_type_list (opaque_V2SF_type_node,
10660 opaque_V2SF_type_node, NULL_TREE);
f676971a 10661
96038623
DE
10662 tree v2sf_ftype_v2sf
10663 = build_function_type_list (V2SF_type_node,
10664 V2SF_type_node, NULL_TREE);
10665
a3170dc6 10666 tree v2sf_ftype_v2si
2abe3e28
AH
10667 = build_function_type_list (opaque_V2SF_type_node,
10668 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10669
10670 tree v2si_ftype_v2sf
2abe3e28
AH
10671 = build_function_type_list (opaque_V2SI_type_node,
10672 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10673
10674 tree v2si_ftype_v2si_char
2abe3e28
AH
10675 = build_function_type_list (opaque_V2SI_type_node,
10676 opaque_V2SI_type_node,
10677 char_type_node, NULL_TREE);
a3170dc6
AH
10678
10679 tree v2si_ftype_int_char
2abe3e28 10680 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10681 integer_type_node, char_type_node, NULL_TREE);
10682
10683 tree v2si_ftype_char
2abe3e28
AH
10684 = build_function_type_list (opaque_V2SI_type_node,
10685 char_type_node, NULL_TREE);
a3170dc6
AH
10686
10687 tree int_ftype_int_int
10688 = build_function_type_list (integer_type_node,
10689 integer_type_node, integer_type_node,
10690 NULL_TREE);
95385cbb 10691
58646b77
PB
10692 tree opaque_ftype_opaque_opaque
10693 = build_function_type_list (opaque_V4SI_type_node,
10694 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10695 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10696 = build_function_type_list (V4SI_type_node,
10697 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10698 tree v4sf_ftype_v4si_int
b4de2f7d 10699 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10700 V4SI_type_node, integer_type_node, NULL_TREE);
10701 tree v4si_ftype_v4sf_int
b4de2f7d 10702 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10703 V4SF_type_node, integer_type_node, NULL_TREE);
10704 tree v4si_ftype_v4si_int
b4de2f7d 10705 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10706 V4SI_type_node, integer_type_node, NULL_TREE);
10707 tree v8hi_ftype_v8hi_int
b4de2f7d 10708 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10709 V8HI_type_node, integer_type_node, NULL_TREE);
10710 tree v16qi_ftype_v16qi_int
b4de2f7d 10711 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10712 V16QI_type_node, integer_type_node, NULL_TREE);
10713 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10714 = build_function_type_list (V16QI_type_node,
10715 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10716 integer_type_node, NULL_TREE);
10717 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10718 = build_function_type_list (V8HI_type_node,
10719 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10720 integer_type_node, NULL_TREE);
10721 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10722 = build_function_type_list (V4SI_type_node,
10723 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10724 integer_type_node, NULL_TREE);
10725 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10726 = build_function_type_list (V4SF_type_node,
10727 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10728 integer_type_node, NULL_TREE);
0ac081f6 10729 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10730 = build_function_type_list (V4SF_type_node,
10731 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10732 tree opaque_ftype_opaque_opaque_opaque
10733 = build_function_type_list (opaque_V4SI_type_node,
10734 opaque_V4SI_type_node, opaque_V4SI_type_node,
10735 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10736 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10737 = build_function_type_list (V4SF_type_node,
10738 V4SF_type_node, V4SF_type_node,
10739 V4SI_type_node, NULL_TREE);
2212663f 10740 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10741 = build_function_type_list (V4SF_type_node,
10742 V4SF_type_node, V4SF_type_node,
10743 V4SF_type_node, NULL_TREE);
f676971a 10744 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10745 = build_function_type_list (V4SI_type_node,
10746 V4SI_type_node, V4SI_type_node,
10747 V4SI_type_node, NULL_TREE);
0ac081f6 10748 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10749 = build_function_type_list (V8HI_type_node,
10750 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10751 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10752 = build_function_type_list (V8HI_type_node,
10753 V8HI_type_node, V8HI_type_node,
10754 V8HI_type_node, NULL_TREE);
c4ad648e 10755 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10756 = build_function_type_list (V4SI_type_node,
10757 V8HI_type_node, V8HI_type_node,
10758 V4SI_type_node, NULL_TREE);
c4ad648e 10759 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10760 = build_function_type_list (V4SI_type_node,
10761 V16QI_type_node, V16QI_type_node,
10762 V4SI_type_node, NULL_TREE);
0ac081f6 10763 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10764 = build_function_type_list (V16QI_type_node,
10765 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10766 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10767 = build_function_type_list (V4SI_type_node,
10768 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10769 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10770 = build_function_type_list (V8HI_type_node,
10771 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10772 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10773 = build_function_type_list (V4SI_type_node,
10774 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10775 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10776 = build_function_type_list (V8HI_type_node,
10777 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10778 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10779 = build_function_type_list (V16QI_type_node,
10780 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10781 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10782 = build_function_type_list (V4SI_type_node,
10783 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10784 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10785 = build_function_type_list (V4SI_type_node,
10786 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10787 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10788 = build_function_type_list (V4SI_type_node,
10789 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10790 tree v4si_ftype_v8hi
10791 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10792 tree int_ftype_v4si_v4si
10793 = build_function_type_list (integer_type_node,
10794 V4SI_type_node, V4SI_type_node, NULL_TREE);
10795 tree int_ftype_v4sf_v4sf
10796 = build_function_type_list (integer_type_node,
10797 V4SF_type_node, V4SF_type_node, NULL_TREE);
10798 tree int_ftype_v16qi_v16qi
10799 = build_function_type_list (integer_type_node,
10800 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10801 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10802 = build_function_type_list (integer_type_node,
10803 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10804
6f317ef3 10805 /* Add the simple ternary operators. */
586de218 10806 d = bdesc_3arg;
ca7558fc 10807 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10808 {
2212663f
DB
10809 enum machine_mode mode0, mode1, mode2, mode3;
10810 tree type;
58646b77
PB
10811 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10812 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10813
58646b77
PB
10814 if (is_overloaded)
10815 {
10816 mode0 = VOIDmode;
10817 mode1 = VOIDmode;
10818 mode2 = VOIDmode;
10819 mode3 = VOIDmode;
10820 }
10821 else
10822 {
10823 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10824 continue;
f676971a 10825
58646b77
PB
10826 mode0 = insn_data[d->icode].operand[0].mode;
10827 mode1 = insn_data[d->icode].operand[1].mode;
10828 mode2 = insn_data[d->icode].operand[2].mode;
10829 mode3 = insn_data[d->icode].operand[3].mode;
10830 }
bb8df8a6 10831
2212663f
DB
10832 /* When all four are of the same mode. */
10833 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10834 {
10835 switch (mode0)
10836 {
58646b77
PB
10837 case VOIDmode:
10838 type = opaque_ftype_opaque_opaque_opaque;
10839 break;
617e0e1d
DB
10840 case V4SImode:
10841 type = v4si_ftype_v4si_v4si_v4si;
10842 break;
2212663f
DB
10843 case V4SFmode:
10844 type = v4sf_ftype_v4sf_v4sf_v4sf;
10845 break;
10846 case V8HImode:
10847 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10848 break;
2212663f
DB
10849 case V16QImode:
10850 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10851 break;
96038623
DE
10852 case V2SFmode:
10853 type = v2sf_ftype_v2sf_v2sf_v2sf;
10854 break;
2212663f 10855 default:
37409796 10856 gcc_unreachable ();
2212663f
DB
10857 }
10858 }
10859 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10860 {
2212663f
DB
10861 switch (mode0)
10862 {
10863 case V4SImode:
10864 type = v4si_ftype_v4si_v4si_v16qi;
10865 break;
10866 case V4SFmode:
10867 type = v4sf_ftype_v4sf_v4sf_v16qi;
10868 break;
10869 case V8HImode:
10870 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10871 break;
2212663f
DB
10872 case V16QImode:
10873 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10874 break;
2212663f 10875 default:
37409796 10876 gcc_unreachable ();
2212663f
DB
10877 }
10878 }
f676971a 10879 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10880 && mode3 == V4SImode)
24408032 10881 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10882 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10883 && mode3 == V4SImode)
24408032 10884 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10885 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10886 && mode3 == V4SImode)
24408032
AH
10887 type = v4sf_ftype_v4sf_v4sf_v4si;
10888
a7b376ee 10889 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10890 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10891 && mode3 == QImode)
b9e4e5d1 10892 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10893
a7b376ee 10894 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10895 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10896 && mode3 == QImode)
b9e4e5d1 10897 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10898
a7b376ee 10899 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10900 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10901 && mode3 == QImode)
b9e4e5d1 10902 type = v4si_ftype_v4si_v4si_int;
24408032 10903
a7b376ee 10904 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10905 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10906 && mode3 == QImode)
b9e4e5d1 10907 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10908
2212663f 10909 else
37409796 10910 gcc_unreachable ();
2212663f
DB
10911
10912 def_builtin (d->mask, d->name, type, d->code);
10913 }
10914
0ac081f6 10915 /* Add the simple binary operators. */
00b960c7 10916 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10917 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10918 {
10919 enum machine_mode mode0, mode1, mode2;
10920 tree type;
58646b77
PB
10921 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10922 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10923
58646b77
PB
10924 if (is_overloaded)
10925 {
10926 mode0 = VOIDmode;
10927 mode1 = VOIDmode;
10928 mode2 = VOIDmode;
10929 }
10930 else
bb8df8a6 10931 {
58646b77
PB
10932 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10933 continue;
f676971a 10934
58646b77
PB
10935 mode0 = insn_data[d->icode].operand[0].mode;
10936 mode1 = insn_data[d->icode].operand[1].mode;
10937 mode2 = insn_data[d->icode].operand[2].mode;
10938 }
0ac081f6
AH
10939
10940 /* When all three operands are of the same mode. */
10941 if (mode0 == mode1 && mode1 == mode2)
10942 {
10943 switch (mode0)
10944 {
58646b77
PB
10945 case VOIDmode:
10946 type = opaque_ftype_opaque_opaque;
10947 break;
0ac081f6
AH
10948 case V4SFmode:
10949 type = v4sf_ftype_v4sf_v4sf;
10950 break;
10951 case V4SImode:
10952 type = v4si_ftype_v4si_v4si;
10953 break;
10954 case V16QImode:
10955 type = v16qi_ftype_v16qi_v16qi;
10956 break;
10957 case V8HImode:
10958 type = v8hi_ftype_v8hi_v8hi;
10959 break;
a3170dc6
AH
10960 case V2SImode:
10961 type = v2si_ftype_v2si_v2si;
10962 break;
96038623
DE
10963 case V2SFmode:
10964 if (TARGET_PAIRED_FLOAT)
10965 type = v2sf_ftype_v2sf_v2sf;
10966 else
10967 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10968 break;
10969 case SImode:
10970 type = int_ftype_int_int;
10971 break;
0ac081f6 10972 default:
37409796 10973 gcc_unreachable ();
0ac081f6
AH
10974 }
10975 }
10976
10977 /* A few other combos we really don't want to do manually. */
10978
10979 /* vint, vfloat, vfloat. */
10980 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10981 type = v4si_ftype_v4sf_v4sf;
10982
10983 /* vshort, vchar, vchar. */
10984 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10985 type = v8hi_ftype_v16qi_v16qi;
10986
10987 /* vint, vshort, vshort. */
10988 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10989 type = v4si_ftype_v8hi_v8hi;
10990
10991 /* vshort, vint, vint. */
10992 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10993 type = v8hi_ftype_v4si_v4si;
10994
10995 /* vchar, vshort, vshort. */
10996 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10997 type = v16qi_ftype_v8hi_v8hi;
10998
10999 /* vint, vchar, vint. */
11000 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
11001 type = v4si_ftype_v16qi_v4si;
11002
fa066a23
AH
11003 /* vint, vchar, vchar. */
11004 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
11005 type = v4si_ftype_v16qi_v16qi;
11006
0ac081f6
AH
11007 /* vint, vshort, vint. */
11008 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
11009 type = v4si_ftype_v8hi_v4si;
f676971a 11010
a7b376ee 11011 /* vint, vint, 5-bit literal. */
2212663f 11012 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 11013 type = v4si_ftype_v4si_int;
f676971a 11014
a7b376ee 11015 /* vshort, vshort, 5-bit literal. */
2212663f 11016 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 11017 type = v8hi_ftype_v8hi_int;
f676971a 11018
a7b376ee 11019 /* vchar, vchar, 5-bit literal. */
2212663f 11020 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 11021 type = v16qi_ftype_v16qi_int;
0ac081f6 11022
a7b376ee 11023 /* vfloat, vint, 5-bit literal. */
617e0e1d 11024 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 11025 type = v4sf_ftype_v4si_int;
f676971a 11026
a7b376ee 11027 /* vint, vfloat, 5-bit literal. */
617e0e1d 11028 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 11029 type = v4si_ftype_v4sf_int;
617e0e1d 11030
a3170dc6
AH
11031 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
11032 type = v2si_ftype_int_int;
11033
11034 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
11035 type = v2si_ftype_v2si_char;
11036
11037 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
11038 type = v2si_ftype_int_char;
11039
37409796 11040 else
0ac081f6 11041 {
37409796
NS
11042 /* int, x, x. */
11043 gcc_assert (mode0 == SImode);
0ac081f6
AH
11044 switch (mode1)
11045 {
11046 case V4SImode:
11047 type = int_ftype_v4si_v4si;
11048 break;
11049 case V4SFmode:
11050 type = int_ftype_v4sf_v4sf;
11051 break;
11052 case V16QImode:
11053 type = int_ftype_v16qi_v16qi;
11054 break;
11055 case V8HImode:
11056 type = int_ftype_v8hi_v8hi;
11057 break;
11058 default:
37409796 11059 gcc_unreachable ();
0ac081f6
AH
11060 }
11061 }
11062
2212663f
DB
11063 def_builtin (d->mask, d->name, type, d->code);
11064 }
24408032 11065
2212663f
DB
11066 /* Add the simple unary operators. */
11067 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 11068 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
11069 {
11070 enum machine_mode mode0, mode1;
11071 tree type;
58646b77
PB
11072 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
11073 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
11074
11075 if (is_overloaded)
11076 {
11077 mode0 = VOIDmode;
11078 mode1 = VOIDmode;
11079 }
11080 else
11081 {
11082 if (d->name == 0 || d->icode == CODE_FOR_nothing)
11083 continue;
bb8df8a6 11084
58646b77
PB
11085 mode0 = insn_data[d->icode].operand[0].mode;
11086 mode1 = insn_data[d->icode].operand[1].mode;
11087 }
2212663f
DB
11088
11089 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 11090 type = v4si_ftype_int;
2212663f 11091 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 11092 type = v8hi_ftype_int;
2212663f 11093 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 11094 type = v16qi_ftype_int;
58646b77
PB
11095 else if (mode0 == VOIDmode && mode1 == VOIDmode)
11096 type = opaque_ftype_opaque;
617e0e1d
DB
11097 else if (mode0 == V4SFmode && mode1 == V4SFmode)
11098 type = v4sf_ftype_v4sf;
20e26713
AH
11099 else if (mode0 == V8HImode && mode1 == V16QImode)
11100 type = v8hi_ftype_v16qi;
11101 else if (mode0 == V4SImode && mode1 == V8HImode)
11102 type = v4si_ftype_v8hi;
a3170dc6
AH
11103 else if (mode0 == V2SImode && mode1 == V2SImode)
11104 type = v2si_ftype_v2si;
11105 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
11106 {
11107 if (TARGET_PAIRED_FLOAT)
11108 type = v2sf_ftype_v2sf;
11109 else
11110 type = v2sf_ftype_v2sf_spe;
11111 }
a3170dc6
AH
11112 else if (mode0 == V2SFmode && mode1 == V2SImode)
11113 type = v2sf_ftype_v2si;
11114 else if (mode0 == V2SImode && mode1 == V2SFmode)
11115 type = v2si_ftype_v2sf;
11116 else if (mode0 == V2SImode && mode1 == QImode)
11117 type = v2si_ftype_char;
2212663f 11118 else
37409796 11119 gcc_unreachable ();
2212663f 11120
0ac081f6
AH
11121 def_builtin (d->mask, d->name, type, d->code);
11122 }
11123}
11124
c15c90bb
ZW
11125static void
11126rs6000_init_libfuncs (void)
11127{
602ea4d3
JJ
11128 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
11129 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 11130 {
602ea4d3
JJ
11131 /* AIX library routines for float->int conversion. */
11132 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
11133 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
11134 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
11135 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
11136 }
c15c90bb 11137
602ea4d3 11138 if (!TARGET_IEEEQUAD)
98c41d98 11139 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
11140 if (!TARGET_XL_COMPAT)
11141 {
11142 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
11143 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
11144 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
11145 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 11146
17caeff2 11147 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
11148 {
11149 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
11150 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
11151 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
11152 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
11153 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
11154 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
11155 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
11156
11157 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
11158 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
11159 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
11160 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
11161 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
11162 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
11163 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
11164 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
11165 }
b26941b4
JM
11166
11167 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
11168 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
11169 }
11170 else
11171 {
11172 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
11173 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
11174 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
11175 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
11176 }
c9034561 11177 else
c15c90bb 11178 {
c9034561 11179 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
11180
11181 set_optab_libfunc (add_optab, TFmode, "_q_add");
11182 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
11183 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
11184 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
11185 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
11186 if (TARGET_PPC_GPOPT || TARGET_POWER2)
11187 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
11188
c9034561
ZW
11189 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
11190 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
11191 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
11192 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
11193 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
11194 set_optab_libfunc (le_optab, TFmode, "_q_fle");
11195
85363ca0
ZW
11196 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
11197 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
11198 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
11199 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
11200 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
11201 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
11202 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 11203 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
11204 }
11205}
fba73eb1
DE
11206
11207\f
11208/* Expand a block clear operation, and return 1 if successful. Return 0
11209 if we should let the compiler generate normal code.
11210
11211 operands[0] is the destination
11212 operands[1] is the length
57e84f18 11213 operands[3] is the alignment */
fba73eb1
DE
11214
11215int
11216expand_block_clear (rtx operands[])
11217{
11218 rtx orig_dest = operands[0];
11219 rtx bytes_rtx = operands[1];
57e84f18 11220 rtx align_rtx = operands[3];
5514620a
GK
11221 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
11222 HOST_WIDE_INT align;
11223 HOST_WIDE_INT bytes;
fba73eb1
DE
11224 int offset;
11225 int clear_bytes;
5514620a 11226 int clear_step;
fba73eb1
DE
11227
11228 /* If this is not a fixed size move, just call memcpy */
11229 if (! constp)
11230 return 0;
11231
37409796
NS
11232 /* This must be a fixed size alignment */
11233 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
11234 align = INTVAL (align_rtx) * BITS_PER_UNIT;
11235
11236 /* Anything to clear? */
11237 bytes = INTVAL (bytes_rtx);
11238 if (bytes <= 0)
11239 return 1;
11240
5514620a
GK
11241 /* Use the builtin memset after a point, to avoid huge code bloat.
11242 When optimize_size, avoid any significant code bloat; calling
11243 memset is about 4 instructions, so allow for one instruction to
11244 load zero and three to do clearing. */
11245 if (TARGET_ALTIVEC && align >= 128)
11246 clear_step = 16;
11247 else if (TARGET_POWERPC64 && align >= 32)
11248 clear_step = 8;
21d818ff
NF
11249 else if (TARGET_SPE && align >= 64)
11250 clear_step = 8;
5514620a
GK
11251 else
11252 clear_step = 4;
fba73eb1 11253
5514620a
GK
11254 if (optimize_size && bytes > 3 * clear_step)
11255 return 0;
11256 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
11257 return 0;
11258
11259 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
11260 {
fba73eb1
DE
11261 enum machine_mode mode = BLKmode;
11262 rtx dest;
f676971a 11263
5514620a
GK
11264 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
11265 {
11266 clear_bytes = 16;
11267 mode = V4SImode;
11268 }
21d818ff
NF
11269 else if (bytes >= 8 && TARGET_SPE && align >= 64)
11270 {
11271 clear_bytes = 8;
11272 mode = V2SImode;
11273 }
5514620a 11274 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
11275 /* 64-bit loads and stores require word-aligned
11276 displacements. */
11277 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
11278 {
11279 clear_bytes = 8;
11280 mode = DImode;
fba73eb1 11281 }
5514620a 11282 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
11283 { /* move 4 bytes */
11284 clear_bytes = 4;
11285 mode = SImode;
fba73eb1 11286 }
ec53fc93 11287 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
11288 { /* move 2 bytes */
11289 clear_bytes = 2;
11290 mode = HImode;
fba73eb1
DE
11291 }
11292 else /* move 1 byte at a time */
11293 {
11294 clear_bytes = 1;
11295 mode = QImode;
fba73eb1 11296 }
f676971a 11297
fba73eb1 11298 dest = adjust_address (orig_dest, mode, offset);
f676971a 11299
5514620a 11300 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
11301 }
11302
11303 return 1;
11304}
11305
35aff10b 11306\f
7e69e155
MM
11307/* Expand a block move operation, and return 1 if successful. Return 0
11308 if we should let the compiler generate normal code.
11309
11310 operands[0] is the destination
11311 operands[1] is the source
11312 operands[2] is the length
11313 operands[3] is the alignment */
11314
3933e0e1
MM
11315#define MAX_MOVE_REG 4
11316
7e69e155 11317int
a2369ed3 11318expand_block_move (rtx operands[])
7e69e155 11319{
b6c9286a
MM
11320 rtx orig_dest = operands[0];
11321 rtx orig_src = operands[1];
7e69e155 11322 rtx bytes_rtx = operands[2];
7e69e155 11323 rtx align_rtx = operands[3];
3933e0e1 11324 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 11325 int align;
3933e0e1
MM
11326 int bytes;
11327 int offset;
7e69e155 11328 int move_bytes;
cabfd258
GK
11329 rtx stores[MAX_MOVE_REG];
11330 int num_reg = 0;
7e69e155 11331
3933e0e1 11332 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 11333 if (! constp)
3933e0e1
MM
11334 return 0;
11335
37409796
NS
11336 /* This must be a fixed size alignment */
11337 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 11338 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 11339
7e69e155 11340 /* Anything to move? */
3933e0e1
MM
11341 bytes = INTVAL (bytes_rtx);
11342 if (bytes <= 0)
7e69e155
MM
11343 return 1;
11344
ea9982a8 11345 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 11346 reg_parm_stack_space. */
ea9982a8 11347 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
11348 return 0;
11349
cabfd258 11350 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 11351 {
cabfd258 11352 union {
70128ad9 11353 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 11354 rtx (*mov) (rtx, rtx);
cabfd258
GK
11355 } gen_func;
11356 enum machine_mode mode = BLKmode;
11357 rtx src, dest;
f676971a 11358
5514620a
GK
11359 /* Altivec first, since it will be faster than a string move
11360 when it applies, and usually not significantly larger. */
11361 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
11362 {
11363 move_bytes = 16;
11364 mode = V4SImode;
11365 gen_func.mov = gen_movv4si;
11366 }
21d818ff
NF
11367 else if (TARGET_SPE && bytes >= 8 && align >= 64)
11368 {
11369 move_bytes = 8;
11370 mode = V2SImode;
11371 gen_func.mov = gen_movv2si;
11372 }
5514620a 11373 else if (TARGET_STRING
cabfd258
GK
11374 && bytes > 24 /* move up to 32 bytes at a time */
11375 && ! fixed_regs[5]
11376 && ! fixed_regs[6]
11377 && ! fixed_regs[7]
11378 && ! fixed_regs[8]
11379 && ! fixed_regs[9]
11380 && ! fixed_regs[10]
11381 && ! fixed_regs[11]
11382 && ! fixed_regs[12])
7e69e155 11383 {
cabfd258 11384 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 11385 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
11386 }
11387 else if (TARGET_STRING
11388 && bytes > 16 /* move up to 24 bytes at a time */
11389 && ! fixed_regs[5]
11390 && ! fixed_regs[6]
11391 && ! fixed_regs[7]
11392 && ! fixed_regs[8]
11393 && ! fixed_regs[9]
11394 && ! fixed_regs[10])
11395 {
11396 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 11397 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
11398 }
11399 else if (TARGET_STRING
11400 && bytes > 8 /* move up to 16 bytes at a time */
11401 && ! fixed_regs[5]
11402 && ! fixed_regs[6]
11403 && ! fixed_regs[7]
11404 && ! fixed_regs[8])
11405 {
11406 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 11407 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
11408 }
11409 else if (bytes >= 8 && TARGET_POWERPC64
11410 /* 64-bit loads and stores require word-aligned
11411 displacements. */
fba73eb1 11412 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
11413 {
11414 move_bytes = 8;
11415 mode = DImode;
11416 gen_func.mov = gen_movdi;
11417 }
11418 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
11419 { /* move up to 8 bytes at a time */
11420 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 11421 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 11422 }
cd7d9ca4 11423 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
11424 { /* move 4 bytes */
11425 move_bytes = 4;
11426 mode = SImode;
11427 gen_func.mov = gen_movsi;
11428 }
ec53fc93 11429 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
11430 { /* move 2 bytes */
11431 move_bytes = 2;
11432 mode = HImode;
11433 gen_func.mov = gen_movhi;
11434 }
11435 else if (TARGET_STRING && bytes > 1)
11436 { /* move up to 4 bytes at a time */
11437 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 11438 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
11439 }
11440 else /* move 1 byte at a time */
11441 {
11442 move_bytes = 1;
11443 mode = QImode;
11444 gen_func.mov = gen_movqi;
11445 }
f676971a 11446
cabfd258
GK
11447 src = adjust_address (orig_src, mode, offset);
11448 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
11449
11450 if (mode != BLKmode)
cabfd258
GK
11451 {
11452 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 11453
cabfd258
GK
11454 emit_insn ((*gen_func.mov) (tmp_reg, src));
11455 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 11456 }
3933e0e1 11457
cabfd258
GK
11458 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
11459 {
11460 int i;
11461 for (i = 0; i < num_reg; i++)
11462 emit_insn (stores[i]);
11463 num_reg = 0;
11464 }
35aff10b 11465
cabfd258 11466 if (mode == BLKmode)
7e69e155 11467 {
70128ad9 11468 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
11469 patterns require zero offset. */
11470 if (!REG_P (XEXP (src, 0)))
b6c9286a 11471 {
cabfd258
GK
11472 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
11473 src = replace_equiv_address (src, src_reg);
b6c9286a 11474 }
cabfd258 11475 set_mem_size (src, GEN_INT (move_bytes));
f676971a 11476
cabfd258 11477 if (!REG_P (XEXP (dest, 0)))
3933e0e1 11478 {
cabfd258
GK
11479 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
11480 dest = replace_equiv_address (dest, dest_reg);
7e69e155 11481 }
cabfd258 11482 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 11483
70128ad9 11484 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
11485 GEN_INT (move_bytes & 31),
11486 align_rtx));
7e69e155 11487 }
7e69e155
MM
11488 }
11489
11490 return 1;
11491}
11492
d62294f5 11493\f
9caa3eb2
DE
11494/* Return a string to perform a load_multiple operation.
11495 operands[0] is the vector.
11496 operands[1] is the source address.
11497 operands[2] is the first destination register. */
11498
11499const char *
a2369ed3 11500rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
11501{
11502 /* We have to handle the case where the pseudo used to contain the address
11503 is assigned to one of the output registers. */
11504 int i, j;
11505 int words = XVECLEN (operands[0], 0);
11506 rtx xop[10];
11507
11508 if (XVECLEN (operands[0], 0) == 1)
11509 return "{l|lwz} %2,0(%1)";
11510
11511 for (i = 0; i < words; i++)
11512 if (refers_to_regno_p (REGNO (operands[2]) + i,
11513 REGNO (operands[2]) + i + 1, operands[1], 0))
11514 {
11515 if (i == words-1)
11516 {
11517 xop[0] = GEN_INT (4 * (words-1));
11518 xop[1] = operands[1];
11519 xop[2] = operands[2];
11520 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
11521 return "";
11522 }
11523 else if (i == 0)
11524 {
11525 xop[0] = GEN_INT (4 * (words-1));
11526 xop[1] = operands[1];
11527 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
11528 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
11529 return "";
11530 }
11531 else
11532 {
11533 for (j = 0; j < words; j++)
11534 if (j != i)
11535 {
11536 xop[0] = GEN_INT (j * 4);
11537 xop[1] = operands[1];
11538 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
11539 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
11540 }
11541 xop[0] = GEN_INT (i * 4);
11542 xop[1] = operands[1];
11543 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
11544 return "";
11545 }
11546 }
11547
11548 return "{lsi|lswi} %2,%1,%N0";
11549}
11550
9878760c 11551\f
a4f6c312
SS
11552/* A validation routine: say whether CODE, a condition code, and MODE
11553 match. The other alternatives either don't make sense or should
11554 never be generated. */
39a10a29 11555
48d72335 11556void
a2369ed3 11557validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 11558{
37409796
NS
11559 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11560 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11561 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
11562
11563 /* These don't make sense. */
37409796
NS
11564 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11565 || mode != CCUNSmode);
39a10a29 11566
37409796
NS
11567 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11568 || mode == CCUNSmode);
39a10a29 11569
37409796
NS
11570 gcc_assert (mode == CCFPmode
11571 || (code != ORDERED && code != UNORDERED
11572 && code != UNEQ && code != LTGT
11573 && code != UNGT && code != UNLT
11574 && code != UNGE && code != UNLE));
f676971a
EC
11575
11576 /* These should never be generated except for
bc9ec0e0 11577 flag_finite_math_only. */
37409796
NS
11578 gcc_assert (mode != CCFPmode
11579 || flag_finite_math_only
11580 || (code != LE && code != GE
11581 && code != UNEQ && code != LTGT
11582 && code != UNGT && code != UNLT));
39a10a29
GK
11583
11584 /* These are invalid; the information is not there. */
37409796 11585 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
11586}
11587
9878760c
RK
11588\f
11589/* Return 1 if ANDOP is a mask that has no bits on that are not in the
11590 mask required to convert the result of a rotate insn into a shift
b1765bde 11591 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
11592
11593int
a2369ed3 11594includes_lshift_p (rtx shiftop, rtx andop)
9878760c 11595{
e2c953b6
DE
11596 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11597
11598 shift_mask <<= INTVAL (shiftop);
9878760c 11599
b1765bde 11600 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
11601}
11602
11603/* Similar, but for right shift. */
11604
11605int
a2369ed3 11606includes_rshift_p (rtx shiftop, rtx andop)
9878760c 11607{
a7653a2c 11608 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
11609
11610 shift_mask >>= INTVAL (shiftop);
11611
b1765bde 11612 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
11613}
11614
c5059423
AM
11615/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11616 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 11617 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
11618
11619int
a2369ed3 11620includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 11621{
c5059423
AM
11622 if (GET_CODE (andop) == CONST_INT)
11623 {
02071907 11624 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 11625
c5059423 11626 c = INTVAL (andop);
02071907 11627 if (c == 0 || c == ~0)
c5059423 11628 return 0;
e2c953b6 11629
02071907 11630 shift_mask = ~0;
c5059423
AM
11631 shift_mask <<= INTVAL (shiftop);
11632
b6d08ca1 11633 /* Find the least significant one bit. */
c5059423
AM
11634 lsb = c & -c;
11635
11636 /* It must coincide with the LSB of the shift mask. */
11637 if (-lsb != shift_mask)
11638 return 0;
e2c953b6 11639
c5059423
AM
11640 /* Invert to look for the next transition (if any). */
11641 c = ~c;
11642
11643 /* Remove the low group of ones (originally low group of zeros). */
11644 c &= -lsb;
11645
11646 /* Again find the lsb, and check we have all 1's above. */
11647 lsb = c & -c;
11648 return c == -lsb;
11649 }
11650 else if (GET_CODE (andop) == CONST_DOUBLE
11651 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11652 {
02071907
AM
11653 HOST_WIDE_INT low, high, lsb;
11654 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11655
11656 low = CONST_DOUBLE_LOW (andop);
11657 if (HOST_BITS_PER_WIDE_INT < 64)
11658 high = CONST_DOUBLE_HIGH (andop);
11659
11660 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11661 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11662 return 0;
11663
11664 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11665 {
02071907 11666 shift_mask_high = ~0;
c5059423
AM
11667 if (INTVAL (shiftop) > 32)
11668 shift_mask_high <<= INTVAL (shiftop) - 32;
11669
11670 lsb = high & -high;
11671
11672 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11673 return 0;
11674
11675 high = ~high;
11676 high &= -lsb;
11677
11678 lsb = high & -high;
11679 return high == -lsb;
11680 }
11681
02071907 11682 shift_mask_low = ~0;
c5059423
AM
11683 shift_mask_low <<= INTVAL (shiftop);
11684
11685 lsb = low & -low;
11686
11687 if (-lsb != shift_mask_low)
11688 return 0;
11689
11690 if (HOST_BITS_PER_WIDE_INT < 64)
11691 high = ~high;
11692 low = ~low;
11693 low &= -lsb;
11694
11695 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11696 {
11697 lsb = high & -high;
11698 return high == -lsb;
11699 }
11700
11701 lsb = low & -low;
11702 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11703 }
11704 else
11705 return 0;
11706}
e2c953b6 11707
c5059423
AM
11708/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11709 to perform a left shift. It must have SHIFTOP or more least
c1207243 11710 significant 0's, with the remainder of the word 1's. */
e2c953b6 11711
c5059423 11712int
a2369ed3 11713includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11714{
e2c953b6 11715 if (GET_CODE (andop) == CONST_INT)
c5059423 11716 {
02071907 11717 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11718
02071907 11719 shift_mask = ~0;
c5059423
AM
11720 shift_mask <<= INTVAL (shiftop);
11721 c = INTVAL (andop);
11722
c1207243 11723 /* Find the least significant one bit. */
c5059423
AM
11724 lsb = c & -c;
11725
11726 /* It must be covered by the shift mask.
a4f6c312 11727 This test also rejects c == 0. */
c5059423
AM
11728 if ((lsb & shift_mask) == 0)
11729 return 0;
11730
11731 /* Check we have all 1's above the transition, and reject all 1's. */
11732 return c == -lsb && lsb != 1;
11733 }
11734 else if (GET_CODE (andop) == CONST_DOUBLE
11735 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11736 {
02071907 11737 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11738
11739 low = CONST_DOUBLE_LOW (andop);
11740
11741 if (HOST_BITS_PER_WIDE_INT < 64)
11742 {
02071907 11743 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11744
11745 high = CONST_DOUBLE_HIGH (andop);
11746
11747 if (low == 0)
11748 {
02071907 11749 shift_mask_high = ~0;
c5059423
AM
11750 if (INTVAL (shiftop) > 32)
11751 shift_mask_high <<= INTVAL (shiftop) - 32;
11752
11753 lsb = high & -high;
11754
11755 if ((lsb & shift_mask_high) == 0)
11756 return 0;
11757
11758 return high == -lsb;
11759 }
11760 if (high != ~0)
11761 return 0;
11762 }
11763
02071907 11764 shift_mask_low = ~0;
c5059423
AM
11765 shift_mask_low <<= INTVAL (shiftop);
11766
11767 lsb = low & -low;
11768
11769 if ((lsb & shift_mask_low) == 0)
11770 return 0;
11771
11772 return low == -lsb && lsb != 1;
11773 }
e2c953b6 11774 else
c5059423 11775 return 0;
9878760c 11776}
35068b43 11777
11ac38b2
DE
11778/* Return 1 if operands will generate a valid arguments to rlwimi
11779instruction for insert with right shift in 64-bit mode. The mask may
11780not start on the first bit or stop on the last bit because wrap-around
11781effects of instruction do not correspond to semantics of RTL insn. */
11782
11783int
11784insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11785{
429ec7dc
DE
11786 if (INTVAL (startop) > 32
11787 && INTVAL (startop) < 64
11788 && INTVAL (sizeop) > 1
11789 && INTVAL (sizeop) + INTVAL (startop) < 64
11790 && INTVAL (shiftop) > 0
11791 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11792 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11793 return 1;
11794
11795 return 0;
11796}
11797
35068b43 11798/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11799 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11800
11801int
a2369ed3 11802registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11803{
11804 /* We might have been passed a SUBREG. */
f676971a 11805 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11806 return 0;
f676971a 11807
90f81f99
AP
11808 /* We might have been passed non floating point registers. */
11809 if (!FP_REGNO_P (REGNO (reg1))
11810 || !FP_REGNO_P (REGNO (reg2)))
11811 return 0;
35068b43
RK
11812
11813 return (REGNO (reg1) == REGNO (reg2) - 1);
11814}
11815
a4f6c312
SS
11816/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11817 addr1 and addr2 must be in consecutive memory locations
11818 (addr2 == addr1 + 8). */
35068b43
RK
11819
11820int
90f81f99 11821mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11822{
90f81f99 11823 rtx addr1, addr2;
bb8df8a6
EC
11824 unsigned int reg1, reg2;
11825 int offset1, offset2;
35068b43 11826
90f81f99
AP
11827 /* The mems cannot be volatile. */
11828 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11829 return 0;
f676971a 11830
90f81f99
AP
11831 addr1 = XEXP (mem1, 0);
11832 addr2 = XEXP (mem2, 0);
11833
35068b43
RK
11834 /* Extract an offset (if used) from the first addr. */
11835 if (GET_CODE (addr1) == PLUS)
11836 {
11837 /* If not a REG, return zero. */
11838 if (GET_CODE (XEXP (addr1, 0)) != REG)
11839 return 0;
11840 else
11841 {
c4ad648e 11842 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11843 /* The offset must be constant! */
11844 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11845 return 0;
11846 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11847 }
11848 }
11849 else if (GET_CODE (addr1) != REG)
11850 return 0;
11851 else
11852 {
11853 reg1 = REGNO (addr1);
11854 /* This was a simple (mem (reg)) expression. Offset is 0. */
11855 offset1 = 0;
11856 }
11857
bb8df8a6
EC
11858 /* And now for the second addr. */
11859 if (GET_CODE (addr2) == PLUS)
11860 {
11861 /* If not a REG, return zero. */
11862 if (GET_CODE (XEXP (addr2, 0)) != REG)
11863 return 0;
11864 else
11865 {
11866 reg2 = REGNO (XEXP (addr2, 0));
11867 /* The offset must be constant. */
11868 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11869 return 0;
11870 offset2 = INTVAL (XEXP (addr2, 1));
11871 }
11872 }
11873 else if (GET_CODE (addr2) != REG)
35068b43 11874 return 0;
bb8df8a6
EC
11875 else
11876 {
11877 reg2 = REGNO (addr2);
11878 /* This was a simple (mem (reg)) expression. Offset is 0. */
11879 offset2 = 0;
11880 }
35068b43 11881
bb8df8a6
EC
11882 /* Both of these must have the same base register. */
11883 if (reg1 != reg2)
35068b43
RK
11884 return 0;
11885
11886 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11887 if (offset2 != offset1 + 8)
35068b43
RK
11888 return 0;
11889
11890 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11891 instructions. */
11892 return 1;
11893}
9878760c 11894\f
e41b2a33
PB
11895
11896rtx
11897rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11898{
11899 static bool eliminated = false;
11900 if (mode != SDmode)
11901 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11902 else
11903 {
11904 rtx mem = cfun->machine->sdmode_stack_slot;
11905 gcc_assert (mem != NULL_RTX);
11906
11907 if (!eliminated)
11908 {
11909 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11910 cfun->machine->sdmode_stack_slot = mem;
11911 eliminated = true;
11912 }
11913 return mem;
11914 }
11915}
11916
11917static tree
11918rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11919{
11920 /* Don't walk into types. */
11921 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11922 {
11923 *walk_subtrees = 0;
11924 return NULL_TREE;
11925 }
11926
11927 switch (TREE_CODE (*tp))
11928 {
11929 case VAR_DECL:
11930 case PARM_DECL:
11931 case FIELD_DECL:
11932 case RESULT_DECL:
48f5b722 11933 case SSA_NAME:
e41b2a33 11934 case REAL_CST:
fdf4f148 11935 case INDIRECT_REF:
a0f39282
JJ
11936 case ALIGN_INDIRECT_REF:
11937 case MISALIGNED_INDIRECT_REF:
fdf4f148 11938 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11939 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11940 return *tp;
11941 break;
11942 default:
11943 break;
11944 }
11945
11946 return NULL_TREE;
11947}
11948
11949
11950/* Allocate a 64-bit stack slot to be used for copying SDmode
11951 values through if this function has any SDmode references. */
11952
11953static void
11954rs6000_alloc_sdmode_stack_slot (void)
11955{
11956 tree t;
11957 basic_block bb;
726a989a 11958 gimple_stmt_iterator gsi;
e41b2a33
PB
11959
11960 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11961
11962 FOR_EACH_BB (bb)
726a989a 11963 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e41b2a33 11964 {
726a989a 11965 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
e41b2a33
PB
11966 if (ret)
11967 {
11968 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11969 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11970 SDmode, 0);
11971 return;
11972 }
11973 }
11974
11975 /* Check for any SDmode parameters of the function. */
11976 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11977 {
11978 if (TREE_TYPE (t) == error_mark_node)
11979 continue;
11980
11981 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11982 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11983 {
11984 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11985 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11986 SDmode, 0);
11987 return;
11988 }
11989 }
11990}
11991
11992static void
11993rs6000_instantiate_decls (void)
11994{
11995 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11996 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11997}
11998
9878760c 11999/* Return the register class of a scratch register needed to copy IN into
0a2aaacc 12000 or out of a register in RCLASS in MODE. If it can be done directly,
9878760c
RK
12001 NO_REGS is returned. */
12002
12003enum reg_class
0a2aaacc 12004rs6000_secondary_reload_class (enum reg_class rclass,
3c4774e0
R
12005 enum machine_mode mode ATTRIBUTE_UNUSED,
12006 rtx in)
9878760c 12007{
5accd822 12008 int regno;
9878760c 12009
ab82a49f
AP
12010 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
12011#if TARGET_MACHO
c4ad648e 12012 && MACHOPIC_INDIRECT
ab82a49f 12013#endif
c4ad648e 12014 ))
46fad5b7
DJ
12015 {
12016 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
12017 other than BASE_REGS for TARGET_ELF. So indicate that a
12018 register from BASE_REGS is needed as an intermediate
12019 register.
f676971a 12020
46fad5b7
DJ
12021 On Darwin, pic addresses require a load from memory, which
12022 needs a base register. */
0a2aaacc 12023 if (rclass != BASE_REGS
c4ad648e
AM
12024 && (GET_CODE (in) == SYMBOL_REF
12025 || GET_CODE (in) == HIGH
12026 || GET_CODE (in) == LABEL_REF
12027 || GET_CODE (in) == CONST))
12028 return BASE_REGS;
46fad5b7 12029 }
e7b7998a 12030
5accd822
DE
12031 if (GET_CODE (in) == REG)
12032 {
12033 regno = REGNO (in);
12034 if (regno >= FIRST_PSEUDO_REGISTER)
12035 {
12036 regno = true_regnum (in);
12037 if (regno >= FIRST_PSEUDO_REGISTER)
12038 regno = -1;
12039 }
12040 }
12041 else if (GET_CODE (in) == SUBREG)
12042 {
12043 regno = true_regnum (in);
12044 if (regno >= FIRST_PSEUDO_REGISTER)
12045 regno = -1;
12046 }
12047 else
12048 regno = -1;
12049
9878760c
RK
12050 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
12051 into anything. */
0a2aaacc 12052 if (rclass == GENERAL_REGS || rclass == BASE_REGS
9878760c
RK
12053 || (regno >= 0 && INT_REGNO_P (regno)))
12054 return NO_REGS;
12055
12056 /* Constants, memory, and FP registers can go into FP registers. */
12057 if ((regno == -1 || FP_REGNO_P (regno))
0a2aaacc 12058 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
e41b2a33 12059 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 12060
0ac081f6
AH
12061 /* Memory, and AltiVec registers can go into AltiVec registers. */
12062 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
0a2aaacc 12063 && rclass == ALTIVEC_REGS)
0ac081f6
AH
12064 return NO_REGS;
12065
9878760c 12066 /* We can copy among the CR registers. */
0a2aaacc 12067 if ((rclass == CR_REGS || rclass == CR0_REGS)
9878760c
RK
12068 && regno >= 0 && CR_REGNO_P (regno))
12069 return NO_REGS;
12070
12071 /* Otherwise, we need GENERAL_REGS. */
12072 return GENERAL_REGS;
12073}
12074\f
12075/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 12076 know this is a valid comparison.
9878760c
RK
12077
12078 SCC_P is 1 if this is for an scc. That means that %D will have been
12079 used instead of %C, so the bits will be in different places.
12080
b4ac57ab 12081 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
12082
12083int
a2369ed3 12084ccr_bit (rtx op, int scc_p)
9878760c
RK
12085{
12086 enum rtx_code code = GET_CODE (op);
12087 enum machine_mode cc_mode;
12088 int cc_regnum;
12089 int base_bit;
9ebbca7d 12090 rtx reg;
9878760c 12091
ec8e098d 12092 if (!COMPARISON_P (op))
9878760c
RK
12093 return -1;
12094
9ebbca7d
GK
12095 reg = XEXP (op, 0);
12096
37409796 12097 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
12098
12099 cc_mode = GET_MODE (reg);
12100 cc_regnum = REGNO (reg);
12101 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 12102
39a10a29 12103 validate_condition_mode (code, cc_mode);
c5defebb 12104
b7053a3f
GK
12105 /* When generating a sCOND operation, only positive conditions are
12106 allowed. */
37409796
NS
12107 gcc_assert (!scc_p
12108 || code == EQ || code == GT || code == LT || code == UNORDERED
12109 || code == GTU || code == LTU);
f676971a 12110
9878760c
RK
12111 switch (code)
12112 {
12113 case NE:
12114 return scc_p ? base_bit + 3 : base_bit + 2;
12115 case EQ:
12116 return base_bit + 2;
1c882ea4 12117 case GT: case GTU: case UNLE:
9878760c 12118 return base_bit + 1;
1c882ea4 12119 case LT: case LTU: case UNGE:
9878760c 12120 return base_bit;
1c882ea4
GK
12121 case ORDERED: case UNORDERED:
12122 return base_bit + 3;
9878760c
RK
12123
12124 case GE: case GEU:
39a10a29 12125 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
12126 unordered position. So test that bit. For integer, this is ! LT
12127 unless this is an scc insn. */
39a10a29 12128 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
12129
12130 case LE: case LEU:
39a10a29 12131 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 12132
9878760c 12133 default:
37409796 12134 gcc_unreachable ();
9878760c
RK
12135 }
12136}
1ff7789b 12137\f
8d30c4ee 12138/* Return the GOT register. */
1ff7789b 12139
9390387d 12140rtx
a2369ed3 12141rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 12142{
a4f6c312
SS
12143 /* The second flow pass currently (June 1999) can't update
12144 regs_ever_live without disturbing other parts of the compiler, so
12145 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
12146 if (!can_create_pseudo_p ()
12147 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 12148 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 12149
e3b5732b 12150 crtl->uses_pic_offset_table = 1;
3cb999d8 12151
1ff7789b
MM
12152 return pic_offset_table_rtx;
12153}
a7df97e6 12154\f
e2500fed
GK
12155/* Function to init struct machine_function.
12156 This will be called, via a pointer variable,
12157 from push_function_context. */
a7df97e6 12158
e2500fed 12159static struct machine_function *
863d938c 12160rs6000_init_machine_status (void)
a7df97e6 12161{
5ead67f6 12162 return GGC_CNEW (machine_function);
a7df97e6 12163}
9878760c 12164\f
0ba1b2ff
AM
12165/* These macros test for integers and extract the low-order bits. */
12166#define INT_P(X) \
12167((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
12168 && GET_MODE (X) == VOIDmode)
12169
12170#define INT_LOWPART(X) \
12171 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
12172
12173int
a2369ed3 12174extract_MB (rtx op)
0ba1b2ff
AM
12175{
12176 int i;
12177 unsigned long val = INT_LOWPART (op);
12178
12179 /* If the high bit is zero, the value is the first 1 bit we find
12180 from the left. */
12181 if ((val & 0x80000000) == 0)
12182 {
37409796 12183 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
12184
12185 i = 1;
12186 while (((val <<= 1) & 0x80000000) == 0)
12187 ++i;
12188 return i;
12189 }
12190
12191 /* If the high bit is set and the low bit is not, or the mask is all
12192 1's, the value is zero. */
12193 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
12194 return 0;
12195
12196 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
12197 from the right. */
12198 i = 31;
12199 while (((val >>= 1) & 1) != 0)
12200 --i;
12201
12202 return i;
12203}
12204
12205int
a2369ed3 12206extract_ME (rtx op)
0ba1b2ff
AM
12207{
12208 int i;
12209 unsigned long val = INT_LOWPART (op);
12210
12211 /* If the low bit is zero, the value is the first 1 bit we find from
12212 the right. */
12213 if ((val & 1) == 0)
12214 {
37409796 12215 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
12216
12217 i = 30;
12218 while (((val >>= 1) & 1) == 0)
12219 --i;
12220
12221 return i;
12222 }
12223
12224 /* If the low bit is set and the high bit is not, or the mask is all
12225 1's, the value is 31. */
12226 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
12227 return 31;
12228
12229 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
12230 from the left. */
12231 i = 0;
12232 while (((val <<= 1) & 0x80000000) != 0)
12233 ++i;
12234
12235 return i;
12236}
12237
c4501e62
JJ
12238/* Locate some local-dynamic symbol still in use by this function
12239 so that we can print its name in some tls_ld pattern. */
12240
12241static const char *
863d938c 12242rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
12243{
12244 rtx insn;
12245
12246 if (cfun->machine->some_ld_name)
12247 return cfun->machine->some_ld_name;
12248
12249 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
12250 if (INSN_P (insn)
12251 && for_each_rtx (&PATTERN (insn),
12252 rs6000_get_some_local_dynamic_name_1, 0))
12253 return cfun->machine->some_ld_name;
12254
37409796 12255 gcc_unreachable ();
c4501e62
JJ
12256}
12257
12258/* Helper function for rs6000_get_some_local_dynamic_name. */
12259
12260static int
a2369ed3 12261rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
12262{
12263 rtx x = *px;
12264
12265 if (GET_CODE (x) == SYMBOL_REF)
12266 {
12267 const char *str = XSTR (x, 0);
12268 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
12269 {
12270 cfun->machine->some_ld_name = str;
12271 return 1;
12272 }
12273 }
12274
12275 return 0;
12276}
12277
85b776df
AM
12278/* Write out a function code label. */
12279
12280void
12281rs6000_output_function_entry (FILE *file, const char *fname)
12282{
12283 if (fname[0] != '.')
12284 {
12285 switch (DEFAULT_ABI)
12286 {
12287 default:
37409796 12288 gcc_unreachable ();
85b776df
AM
12289
12290 case ABI_AIX:
12291 if (DOT_SYMBOLS)
12292 putc ('.', file);
12293 else
12294 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
12295 break;
12296
12297 case ABI_V4:
12298 case ABI_DARWIN:
12299 break;
12300 }
12301 }
12302 if (TARGET_AIX)
12303 RS6000_OUTPUT_BASENAME (file, fname);
12304 else
12305 assemble_name (file, fname);
12306}
12307
9878760c
RK
12308/* Print an operand. Recognize special options, documented below. */
12309
38c1f2d7 12310#if TARGET_ELF
d9407988 12311#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 12312#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
12313#else
12314#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 12315#define SMALL_DATA_REG 0
ba5e43aa
MM
12316#endif
12317
9878760c 12318void
a2369ed3 12319print_operand (FILE *file, rtx x, int code)
9878760c
RK
12320{
12321 int i;
a260abc9 12322 HOST_WIDE_INT val;
0ba1b2ff 12323 unsigned HOST_WIDE_INT uval;
9878760c
RK
12324
12325 switch (code)
12326 {
a8b3aeda 12327 case '.':
a85d226b
RK
12328 /* Write out an instruction after the call which may be replaced
12329 with glue code by the loader. This depends on the AIX version. */
12330 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
12331 return;
12332
81eace42
GK
12333 /* %a is output_address. */
12334
9854d9ed
RK
12335 case 'A':
12336 /* If X is a constant integer whose low-order 5 bits are zero,
12337 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 12338 in the AIX assembler where "sri" with a zero shift count
20e26713 12339 writes a trash instruction. */
9854d9ed 12340 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 12341 putc ('l', file);
9854d9ed 12342 else
76229ac8 12343 putc ('r', file);
9854d9ed
RK
12344 return;
12345
12346 case 'b':
e2c953b6
DE
12347 /* If constant, low-order 16 bits of constant, unsigned.
12348 Otherwise, write normally. */
12349 if (INT_P (x))
12350 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
12351 else
12352 print_operand (file, x, 0);
cad12a8d
RK
12353 return;
12354
a260abc9
DE
12355 case 'B':
12356 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
12357 for 64-bit mask direction. */
9390387d 12358 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 12359 return;
a260abc9 12360
81eace42
GK
12361 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
12362 output_operand. */
12363
423c1189
AH
12364 case 'c':
12365 /* X is a CR register. Print the number of the GT bit of the CR. */
12366 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12367 output_operand_lossage ("invalid %%E value");
12368 else
12369 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
12370 return;
12371
12372 case 'D':
cef6b86c 12373 /* Like 'J' but get to the GT bit only. */
37409796 12374 gcc_assert (GET_CODE (x) == REG);
423c1189 12375
cef6b86c
EB
12376 /* Bit 1 is GT bit. */
12377 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 12378
cef6b86c
EB
12379 /* Add one for shift count in rlinm for scc. */
12380 fprintf (file, "%d", i + 1);
423c1189
AH
12381 return;
12382
9854d9ed 12383 case 'E':
39a10a29 12384 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
12385 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12386 output_operand_lossage ("invalid %%E value");
78fbdbf7 12387 else
39a10a29 12388 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 12389 return;
9854d9ed
RK
12390
12391 case 'f':
12392 /* X is a CR register. Print the shift count needed to move it
12393 to the high-order four bits. */
12394 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12395 output_operand_lossage ("invalid %%f value");
12396 else
9ebbca7d 12397 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12398 return;
12399
12400 case 'F':
12401 /* Similar, but print the count for the rotate in the opposite
12402 direction. */
12403 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12404 output_operand_lossage ("invalid %%F value");
12405 else
9ebbca7d 12406 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12407 return;
12408
12409 case 'G':
12410 /* X is a constant integer. If it is negative, print "m",
43aa4e05 12411 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
12412 if (GET_CODE (x) != CONST_INT)
12413 output_operand_lossage ("invalid %%G value");
12414 else if (INTVAL (x) >= 0)
76229ac8 12415 putc ('z', file);
9854d9ed 12416 else
76229ac8 12417 putc ('m', file);
9854d9ed 12418 return;
e2c953b6 12419
9878760c 12420 case 'h':
a4f6c312
SS
12421 /* If constant, output low-order five bits. Otherwise, write
12422 normally. */
9878760c 12423 if (INT_P (x))
5f59ecb7 12424 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
12425 else
12426 print_operand (file, x, 0);
12427 return;
12428
64305719 12429 case 'H':
a4f6c312
SS
12430 /* If constant, output low-order six bits. Otherwise, write
12431 normally. */
64305719 12432 if (INT_P (x))
5f59ecb7 12433 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
12434 else
12435 print_operand (file, x, 0);
12436 return;
12437
9854d9ed
RK
12438 case 'I':
12439 /* Print `i' if this is a constant, else nothing. */
9878760c 12440 if (INT_P (x))
76229ac8 12441 putc ('i', file);
9878760c
RK
12442 return;
12443
9854d9ed
RK
12444 case 'j':
12445 /* Write the bit number in CCR for jump. */
12446 i = ccr_bit (x, 0);
12447 if (i == -1)
12448 output_operand_lossage ("invalid %%j code");
9878760c 12449 else
9854d9ed 12450 fprintf (file, "%d", i);
9878760c
RK
12451 return;
12452
9854d9ed
RK
12453 case 'J':
12454 /* Similar, but add one for shift count in rlinm for scc and pass
12455 scc flag to `ccr_bit'. */
12456 i = ccr_bit (x, 1);
12457 if (i == -1)
12458 output_operand_lossage ("invalid %%J code");
12459 else
a0466a68
RK
12460 /* If we want bit 31, write a shift count of zero, not 32. */
12461 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
12462 return;
12463
9854d9ed
RK
12464 case 'k':
12465 /* X must be a constant. Write the 1's complement of the
12466 constant. */
9878760c 12467 if (! INT_P (x))
9854d9ed 12468 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
12469 else
12470 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
12471 return;
12472
81eace42 12473 case 'K':
9ebbca7d
GK
12474 /* X must be a symbolic constant on ELF. Write an
12475 expression suitable for an 'addi' that adds in the low 16
12476 bits of the MEM. */
12477 if (GET_CODE (x) != CONST)
12478 {
12479 print_operand_address (file, x);
12480 fputs ("@l", file);
12481 }
12482 else
12483 {
12484 if (GET_CODE (XEXP (x, 0)) != PLUS
12485 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
12486 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
12487 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 12488 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
12489 print_operand_address (file, XEXP (XEXP (x, 0), 0));
12490 fputs ("@l", file);
ed8d2920
MM
12491 /* For GNU as, there must be a non-alphanumeric character
12492 between 'l' and the number. The '-' is added by
12493 print_operand() already. */
12494 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
12495 fputs ("+", file);
9ebbca7d
GK
12496 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
12497 }
81eace42
GK
12498 return;
12499
12500 /* %l is output_asm_label. */
9ebbca7d 12501
9854d9ed
RK
12502 case 'L':
12503 /* Write second word of DImode or DFmode reference. Works on register
12504 or non-indexed memory only. */
12505 if (GET_CODE (x) == REG)
fb5c67a7 12506 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
12507 else if (GET_CODE (x) == MEM)
12508 {
12509 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 12510 we have already done it, we can just use an offset of word. */
9854d9ed
RK
12511 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12512 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
12513 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12514 UNITS_PER_WORD));
6fb5fa3c
DB
12515 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12516 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12517 UNITS_PER_WORD));
9854d9ed 12518 else
d7624dc0
RK
12519 output_address (XEXP (adjust_address_nv (x, SImode,
12520 UNITS_PER_WORD),
12521 0));
ed8908e7 12522
ba5e43aa 12523 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12524 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12525 reg_names[SMALL_DATA_REG]);
9854d9ed 12526 }
9878760c 12527 return;
f676971a 12528
9878760c
RK
12529 case 'm':
12530 /* MB value for a mask operand. */
b1765bde 12531 if (! mask_operand (x, SImode))
9878760c
RK
12532 output_operand_lossage ("invalid %%m value");
12533
0ba1b2ff 12534 fprintf (file, "%d", extract_MB (x));
9878760c
RK
12535 return;
12536
12537 case 'M':
12538 /* ME value for a mask operand. */
b1765bde 12539 if (! mask_operand (x, SImode))
a260abc9 12540 output_operand_lossage ("invalid %%M value");
9878760c 12541
0ba1b2ff 12542 fprintf (file, "%d", extract_ME (x));
9878760c
RK
12543 return;
12544
81eace42
GK
12545 /* %n outputs the negative of its operand. */
12546
9878760c
RK
12547 case 'N':
12548 /* Write the number of elements in the vector times 4. */
12549 if (GET_CODE (x) != PARALLEL)
12550 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
12551 else
12552 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
12553 return;
12554
12555 case 'O':
12556 /* Similar, but subtract 1 first. */
12557 if (GET_CODE (x) != PARALLEL)
1427100a 12558 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
12559 else
12560 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
12561 return;
12562
9854d9ed
RK
12563 case 'p':
12564 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12565 if (! INT_P (x)
2bfcf297 12566 || INT_LOWPART (x) < 0
9854d9ed
RK
12567 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12568 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
12569 else
12570 fprintf (file, "%d", i);
9854d9ed
RK
12571 return;
12572
9878760c
RK
12573 case 'P':
12574 /* The operand must be an indirect memory reference. The result
8bb418a3 12575 is the register name. */
9878760c
RK
12576 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12577 || REGNO (XEXP (x, 0)) >= 32)
12578 output_operand_lossage ("invalid %%P value");
e2c953b6 12579 else
fb5c67a7 12580 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
12581 return;
12582
dfbdccdb
GK
12583 case 'q':
12584 /* This outputs the logical code corresponding to a boolean
12585 expression. The expression may have one or both operands
39a10a29 12586 negated (if one, only the first one). For condition register
c4ad648e
AM
12587 logical operations, it will also treat the negated
12588 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 12589 {
63bc1d05 12590 const char *const *t = 0;
dfbdccdb
GK
12591 const char *s;
12592 enum rtx_code code = GET_CODE (x);
12593 static const char * const tbl[3][3] = {
12594 { "and", "andc", "nor" },
12595 { "or", "orc", "nand" },
12596 { "xor", "eqv", "xor" } };
12597
12598 if (code == AND)
12599 t = tbl[0];
12600 else if (code == IOR)
12601 t = tbl[1];
12602 else if (code == XOR)
12603 t = tbl[2];
12604 else
12605 output_operand_lossage ("invalid %%q value");
12606
12607 if (GET_CODE (XEXP (x, 0)) != NOT)
12608 s = t[0];
12609 else
12610 {
12611 if (GET_CODE (XEXP (x, 1)) == NOT)
12612 s = t[2];
12613 else
12614 s = t[1];
12615 }
f676971a 12616
dfbdccdb
GK
12617 fputs (s, file);
12618 }
12619 return;
12620
2c4a9cff
DE
12621 case 'Q':
12622 if (TARGET_MFCRF)
3b6ce0af 12623 fputc (',', file);
5efb1046 12624 /* FALLTHRU */
2c4a9cff
DE
12625 else
12626 return;
12627
9854d9ed
RK
12628 case 'R':
12629 /* X is a CR register. Print the mask for `mtcrf'. */
12630 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12631 output_operand_lossage ("invalid %%R value");
12632 else
9ebbca7d 12633 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12634 return;
9854d9ed
RK
12635
12636 case 's':
12637 /* Low 5 bits of 32 - value */
12638 if (! INT_P (x))
12639 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12640 else
12641 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12642 return;
9854d9ed 12643
a260abc9 12644 case 'S':
0ba1b2ff 12645 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12646 CONST_INT 32-bit mask is considered sign-extended so any
12647 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12648 if (! mask64_operand (x, DImode))
a260abc9
DE
12649 output_operand_lossage ("invalid %%S value");
12650
0ba1b2ff 12651 uval = INT_LOWPART (x);
a260abc9 12652
0ba1b2ff 12653 if (uval & 1) /* Clear Left */
a260abc9 12654 {
f099d360
GK
12655#if HOST_BITS_PER_WIDE_INT > 64
12656 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12657#endif
0ba1b2ff 12658 i = 64;
a260abc9 12659 }
0ba1b2ff 12660 else /* Clear Right */
a260abc9 12661 {
0ba1b2ff 12662 uval = ~uval;
f099d360
GK
12663#if HOST_BITS_PER_WIDE_INT > 64
12664 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12665#endif
0ba1b2ff 12666 i = 63;
a260abc9 12667 }
0ba1b2ff
AM
12668 while (uval != 0)
12669 --i, uval >>= 1;
37409796 12670 gcc_assert (i >= 0);
0ba1b2ff
AM
12671 fprintf (file, "%d", i);
12672 return;
a260abc9 12673
a3170dc6
AH
12674 case 't':
12675 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12676 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12677
12678 /* Bit 3 is OV bit. */
12679 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12680
12681 /* If we want bit 31, write a shift count of zero, not 32. */
12682 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12683 return;
12684
cccf3bdc
DE
12685 case 'T':
12686 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12687 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12688 && REGNO (x) != CTR_REGNO))
cccf3bdc 12689 output_operand_lossage ("invalid %%T value");
1de43f85 12690 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12691 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12692 else
12693 fputs ("ctr", file);
12694 return;
12695
9854d9ed 12696 case 'u':
802a0058 12697 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12698 if (! INT_P (x))
12699 output_operand_lossage ("invalid %%u value");
e2c953b6 12700 else
f676971a 12701 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12702 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12703 return;
12704
802a0058
MM
12705 case 'v':
12706 /* High-order 16 bits of constant for use in signed operand. */
12707 if (! INT_P (x))
12708 output_operand_lossage ("invalid %%v value");
e2c953b6 12709 else
134c32f6
DE
12710 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12711 (INT_LOWPART (x) >> 16) & 0xffff);
12712 return;
802a0058 12713
9854d9ed
RK
12714 case 'U':
12715 /* Print `u' if this has an auto-increment or auto-decrement. */
12716 if (GET_CODE (x) == MEM
12717 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12718 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12719 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12720 putc ('u', file);
9854d9ed 12721 return;
9878760c 12722
e0cd0770
JC
12723 case 'V':
12724 /* Print the trap code for this operand. */
12725 switch (GET_CODE (x))
12726 {
12727 case EQ:
12728 fputs ("eq", file); /* 4 */
12729 break;
12730 case NE:
12731 fputs ("ne", file); /* 24 */
12732 break;
12733 case LT:
12734 fputs ("lt", file); /* 16 */
12735 break;
12736 case LE:
12737 fputs ("le", file); /* 20 */
12738 break;
12739 case GT:
12740 fputs ("gt", file); /* 8 */
12741 break;
12742 case GE:
12743 fputs ("ge", file); /* 12 */
12744 break;
12745 case LTU:
12746 fputs ("llt", file); /* 2 */
12747 break;
12748 case LEU:
12749 fputs ("lle", file); /* 6 */
12750 break;
12751 case GTU:
12752 fputs ("lgt", file); /* 1 */
12753 break;
12754 case GEU:
12755 fputs ("lge", file); /* 5 */
12756 break;
12757 default:
37409796 12758 gcc_unreachable ();
e0cd0770
JC
12759 }
12760 break;
12761
9854d9ed
RK
12762 case 'w':
12763 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12764 normally. */
12765 if (INT_P (x))
f676971a 12766 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12767 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12768 else
12769 print_operand (file, x, 0);
9878760c
RK
12770 return;
12771
9854d9ed 12772 case 'W':
e2c953b6 12773 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12774 val = (GET_CODE (x) == CONST_INT
12775 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12776
12777 if (val < 0)
12778 i = -1;
9854d9ed 12779 else
e2c953b6
DE
12780 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12781 if ((val <<= 1) < 0)
12782 break;
12783
12784#if HOST_BITS_PER_WIDE_INT == 32
12785 if (GET_CODE (x) == CONST_INT && i >= 0)
12786 i += 32; /* zero-extend high-part was all 0's */
12787 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12788 {
12789 val = CONST_DOUBLE_LOW (x);
12790
37409796
NS
12791 gcc_assert (val);
12792 if (val < 0)
e2c953b6
DE
12793 --i;
12794 else
12795 for ( ; i < 64; i++)
12796 if ((val <<= 1) < 0)
12797 break;
12798 }
12799#endif
12800
12801 fprintf (file, "%d", i + 1);
9854d9ed 12802 return;
9878760c 12803
9854d9ed
RK
12804 case 'X':
12805 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12806 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12807 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12808 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12809 putc ('x', file);
9854d9ed 12810 return;
9878760c 12811
9854d9ed
RK
12812 case 'Y':
12813 /* Like 'L', for third word of TImode */
12814 if (GET_CODE (x) == REG)
fb5c67a7 12815 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12816 else if (GET_CODE (x) == MEM)
9878760c 12817 {
9854d9ed
RK
12818 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12819 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12820 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12821 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12822 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12823 else
d7624dc0 12824 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12825 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12826 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12827 reg_names[SMALL_DATA_REG]);
9878760c
RK
12828 }
12829 return;
f676971a 12830
9878760c 12831 case 'z':
b4ac57ab
RS
12832 /* X is a SYMBOL_REF. Write out the name preceded by a
12833 period and without any trailing data in brackets. Used for function
4d30c363
MM
12834 names. If we are configured for System V (or the embedded ABI) on
12835 the PowerPC, do not emit the period, since those systems do not use
12836 TOCs and the like. */
37409796 12837 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12838
c4ad648e
AM
12839 /* Mark the decl as referenced so that cgraph will output the
12840 function. */
9bf6462a 12841 if (SYMBOL_REF_DECL (x))
c4ad648e 12842 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12843
85b776df 12844 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12845 if (TARGET_MACHO)
12846 {
12847 const char *name = XSTR (x, 0);
a031e781 12848#if TARGET_MACHO
3b48085e 12849 if (MACHOPIC_INDIRECT
11abc112
MM
12850 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12851 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12852#endif
12853 assemble_name (file, name);
12854 }
85b776df 12855 else if (!DOT_SYMBOLS)
9739c90c 12856 assemble_name (file, XSTR (x, 0));
85b776df
AM
12857 else
12858 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12859 return;
12860
9854d9ed
RK
12861 case 'Z':
12862 /* Like 'L', for last word of TImode. */
12863 if (GET_CODE (x) == REG)
fb5c67a7 12864 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12865 else if (GET_CODE (x) == MEM)
12866 {
12867 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12868 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12869 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12870 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12871 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12872 else
d7624dc0 12873 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12874 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12875 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12876 reg_names[SMALL_DATA_REG]);
9854d9ed 12877 }
5c23c401 12878 return;
0ac081f6 12879
a3170dc6 12880 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12881 case 'y':
12882 {
12883 rtx tmp;
12884
37409796 12885 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12886
12887 tmp = XEXP (x, 0);
12888
90d3ff1c 12889 /* Ugly hack because %y is overloaded. */
8ef65e3d 12890 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12891 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12892 || GET_MODE (x) == TFmode
12893 || GET_MODE (x) == TImode))
a3170dc6
AH
12894 {
12895 /* Handle [reg]. */
12896 if (GET_CODE (tmp) == REG)
12897 {
12898 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12899 break;
12900 }
12901 /* Handle [reg+UIMM]. */
12902 else if (GET_CODE (tmp) == PLUS &&
12903 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12904 {
12905 int x;
12906
37409796 12907 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12908
12909 x = INTVAL (XEXP (tmp, 1));
12910 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12911 break;
12912 }
12913
12914 /* Fall through. Must be [reg+reg]. */
12915 }
850e8d3d
DN
12916 if (TARGET_ALTIVEC
12917 && GET_CODE (tmp) == AND
12918 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12919 && INTVAL (XEXP (tmp, 1)) == -16)
12920 tmp = XEXP (tmp, 0);
0ac081f6 12921 if (GET_CODE (tmp) == REG)
c62f2db5 12922 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12923 else
0ac081f6 12924 {
cb8cc791
AP
12925 if (!GET_CODE (tmp) == PLUS
12926 || !REG_P (XEXP (tmp, 0))
12927 || !REG_P (XEXP (tmp, 1)))
12928 {
12929 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12930 break;
12931 }
bb8df8a6 12932
0ac081f6
AH
12933 if (REGNO (XEXP (tmp, 0)) == 0)
12934 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12935 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12936 else
12937 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12938 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12939 }
0ac081f6
AH
12940 break;
12941 }
f676971a 12942
9878760c
RK
12943 case 0:
12944 if (GET_CODE (x) == REG)
12945 fprintf (file, "%s", reg_names[REGNO (x)]);
12946 else if (GET_CODE (x) == MEM)
12947 {
12948 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12949 know the width from the mode. */
12950 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12951 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12952 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12953 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12954 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12955 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12956 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12957 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12958 else
a54d04b7 12959 output_address (XEXP (x, 0));
9878760c
RK
12960 }
12961 else
a54d04b7 12962 output_addr_const (file, x);
a85d226b 12963 return;
9878760c 12964
c4501e62
JJ
12965 case '&':
12966 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12967 return;
12968
9878760c
RK
12969 default:
12970 output_operand_lossage ("invalid %%xn code");
12971 }
12972}
12973\f
12974/* Print the address of an operand. */
12975
12976void
a2369ed3 12977print_operand_address (FILE *file, rtx x)
9878760c
RK
12978{
12979 if (GET_CODE (x) == REG)
4697a36c 12980 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12981 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12982 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12983 {
12984 output_addr_const (file, x);
ba5e43aa 12985 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12986 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12987 reg_names[SMALL_DATA_REG]);
37409796
NS
12988 else
12989 gcc_assert (!TARGET_TOC);
9878760c
RK
12990 }
12991 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12992 {
9024f4b8 12993 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12994 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12995 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12996 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12997 else
4697a36c
MM
12998 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12999 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
13000 }
13001 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
13002 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
13003 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
13004#if TARGET_ELF
13005 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 13006 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
13007 {
13008 output_addr_const (file, XEXP (x, 1));
13009 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
13010 }
c859cda6
DJ
13011#endif
13012#if TARGET_MACHO
13013 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 13014 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
13015 {
13016 fprintf (file, "lo16(");
13017 output_addr_const (file, XEXP (x, 1));
13018 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
13019 }
3cb999d8 13020#endif
4d588c14 13021 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 13022 {
2e4316da 13023 output_addr_const (file, XEXP (x, 1));
9ebbca7d
GK
13024 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
13025 }
9878760c 13026 else
37409796 13027 gcc_unreachable ();
9878760c
RK
13028}
13029\f
2e4316da
RS
13030/* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */
13031
13032bool
13033rs6000_output_addr_const_extra (FILE *file, rtx x)
13034{
13035 if (GET_CODE (x) == UNSPEC)
13036 switch (XINT (x, 1))
13037 {
13038 case UNSPEC_TOCREL:
13039 x = XVECEXP (x, 0, 0);
13040 gcc_assert (GET_CODE (x) == SYMBOL_REF);
13041 output_addr_const (file, x);
13042 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
13043 {
13044 putc ('-', file);
13045 assemble_name (file, toc_label_name);
13046 }
13047 else if (TARGET_ELF)
13048 fputs ("@toc", file);
13049 return true;
08a6a74b
RS
13050
13051#if TARGET_MACHO
13052 case UNSPEC_MACHOPIC_OFFSET:
13053 output_addr_const (file, XVECEXP (x, 0, 0));
13054 putc ('-', file);
13055 machopic_output_function_base_name (file);
13056 return true;
13057#endif
2e4316da
RS
13058 }
13059 return false;
13060}
13061\f
88cad84b 13062/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
13063 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
13064 is defined. It also needs to handle DI-mode objects on 64-bit
13065 targets. */
13066
13067static bool
a2369ed3 13068rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 13069{
f4f4921e 13070#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 13071 /* Special handling for SI values. */
84dcde01 13072 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 13073 {
301d03af 13074 static int recurse = 0;
f676971a 13075
301d03af
RS
13076 /* For -mrelocatable, we mark all addresses that need to be fixed up
13077 in the .fixup section. */
13078 if (TARGET_RELOCATABLE
d6b5193b
RS
13079 && in_section != toc_section
13080 && in_section != text_section
4325ca90 13081 && !unlikely_text_section_p (in_section)
301d03af
RS
13082 && !recurse
13083 && GET_CODE (x) != CONST_INT
13084 && GET_CODE (x) != CONST_DOUBLE
13085 && CONSTANT_P (x))
13086 {
13087 char buf[256];
13088
13089 recurse = 1;
13090 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
13091 fixuplabelno++;
13092 ASM_OUTPUT_LABEL (asm_out_file, buf);
13093 fprintf (asm_out_file, "\t.long\t(");
13094 output_addr_const (asm_out_file, x);
13095 fprintf (asm_out_file, ")@fixup\n");
13096 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
13097 ASM_OUTPUT_ALIGN (asm_out_file, 2);
13098 fprintf (asm_out_file, "\t.long\t");
13099 assemble_name (asm_out_file, buf);
13100 fprintf (asm_out_file, "\n\t.previous\n");
13101 recurse = 0;
13102 return true;
13103 }
13104 /* Remove initial .'s to turn a -mcall-aixdesc function
13105 address into the address of the descriptor, not the function
13106 itself. */
13107 else if (GET_CODE (x) == SYMBOL_REF
13108 && XSTR (x, 0)[0] == '.'
13109 && DEFAULT_ABI == ABI_AIX)
13110 {
13111 const char *name = XSTR (x, 0);
13112 while (*name == '.')
13113 name++;
13114
13115 fprintf (asm_out_file, "\t.long\t%s\n", name);
13116 return true;
13117 }
13118 }
f4f4921e 13119#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
13120 return default_assemble_integer (x, size, aligned_p);
13121}
93638d7a
AM
13122
13123#ifdef HAVE_GAS_HIDDEN
13124/* Emit an assembler directive to set symbol visibility for DECL to
13125 VISIBILITY_TYPE. */
13126
5add3202 13127static void
a2369ed3 13128rs6000_assemble_visibility (tree decl, int vis)
93638d7a 13129{
93638d7a
AM
13130 /* Functions need to have their entry point symbol visibility set as
13131 well as their descriptor symbol visibility. */
85b776df
AM
13132 if (DEFAULT_ABI == ABI_AIX
13133 && DOT_SYMBOLS
13134 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 13135 {
25fdb4dc 13136 static const char * const visibility_types[] = {
c4ad648e 13137 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
13138 };
13139
13140 const char *name, *type;
93638d7a
AM
13141
13142 name = ((* targetm.strip_name_encoding)
13143 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 13144 type = visibility_types[vis];
93638d7a 13145
25fdb4dc
RH
13146 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
13147 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 13148 }
25fdb4dc
RH
13149 else
13150 default_assemble_visibility (decl, vis);
93638d7a
AM
13151}
13152#endif
301d03af 13153\f
39a10a29 13154enum rtx_code
a2369ed3 13155rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
13156{
13157 /* Reversal of FP compares takes care -- an ordered compare
13158 becomes an unordered compare and vice versa. */
f676971a 13159 if (mode == CCFPmode
bc9ec0e0
GK
13160 && (!flag_finite_math_only
13161 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
13162 || code == UNEQ || code == LTGT))
bab6226b 13163 return reverse_condition_maybe_unordered (code);
39a10a29 13164 else
bab6226b 13165 return reverse_condition (code);
39a10a29
GK
13166}
13167
39a10a29
GK
13168/* Generate a compare for CODE. Return a brand-new rtx that
13169 represents the result of the compare. */
a4f6c312 13170
39a10a29 13171static rtx
f90b7a5a 13172rs6000_generate_compare (rtx cmp, enum machine_mode mode)
39a10a29
GK
13173{
13174 enum machine_mode comp_mode;
13175 rtx compare_result;
f90b7a5a
PB
13176 enum rtx_code code = GET_CODE (cmp);
13177 rtx op0 = XEXP (cmp, 0);
13178 rtx op1 = XEXP (cmp, 1);
39a10a29 13179
f90b7a5a 13180 if (FLOAT_MODE_P (mode))
39a10a29
GK
13181 comp_mode = CCFPmode;
13182 else if (code == GTU || code == LTU
c4ad648e 13183 || code == GEU || code == LEU)
39a10a29 13184 comp_mode = CCUNSmode;
60934f9c 13185 else if ((code == EQ || code == NE)
f90b7a5a
PB
13186 && GET_CODE (op0) == SUBREG
13187 && GET_CODE (op1) == SUBREG
13188 && SUBREG_PROMOTED_UNSIGNED_P (op0)
13189 && SUBREG_PROMOTED_UNSIGNED_P (op1))
60934f9c
NS
13190 /* These are unsigned values, perhaps there will be a later
13191 ordering compare that can be shared with this one.
13192 Unfortunately we cannot detect the signedness of the operands
13193 for non-subregs. */
13194 comp_mode = CCUNSmode;
39a10a29
GK
13195 else
13196 comp_mode = CCmode;
13197
13198 /* First, the compare. */
13199 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 13200
cef6b86c 13201 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 13202 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
f90b7a5a 13203 && FLOAT_MODE_P (mode))
a3170dc6 13204 {
64022b5d 13205 rtx cmp, or_result, compare_result2;
f90b7a5a 13206 enum machine_mode op_mode = GET_MODE (op0);
4d4cbc0e
AH
13207
13208 if (op_mode == VOIDmode)
f90b7a5a 13209 op_mode = GET_MODE (op1);
a3170dc6 13210
cef6b86c
EB
13211 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
13212 This explains the following mess. */
423c1189 13213
a3170dc6
AH
13214 switch (code)
13215 {
423c1189 13216 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
13217 switch (op_mode)
13218 {
13219 case SFmode:
1cdc0d8f 13220 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13221 ? gen_tstsfeq_gpr (compare_result, op0, op1)
13222 : gen_cmpsfeq_gpr (compare_result, op0, op1);
37409796
NS
13223 break;
13224
13225 case DFmode:
1cdc0d8f 13226 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13227 ? gen_tstdfeq_gpr (compare_result, op0, op1)
13228 : gen_cmpdfeq_gpr (compare_result, op0, op1);
37409796
NS
13229 break;
13230
17caeff2 13231 case TFmode:
1cdc0d8f 13232 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13233 ? gen_tsttfeq_gpr (compare_result, op0, op1)
13234 : gen_cmptfeq_gpr (compare_result, op0, op1);
17caeff2
JM
13235 break;
13236
37409796
NS
13237 default:
13238 gcc_unreachable ();
13239 }
a3170dc6 13240 break;
bb8df8a6 13241
423c1189 13242 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
13243 switch (op_mode)
13244 {
13245 case SFmode:
1cdc0d8f 13246 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13247 ? gen_tstsfgt_gpr (compare_result, op0, op1)
13248 : gen_cmpsfgt_gpr (compare_result, op0, op1);
37409796 13249 break;
bb8df8a6 13250
37409796 13251 case DFmode:
1cdc0d8f 13252 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13253 ? gen_tstdfgt_gpr (compare_result, op0, op1)
13254 : gen_cmpdfgt_gpr (compare_result, op0, op1);
37409796
NS
13255 break;
13256
17caeff2 13257 case TFmode:
1cdc0d8f 13258 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13259 ? gen_tsttfgt_gpr (compare_result, op0, op1)
13260 : gen_cmptfgt_gpr (compare_result, op0, op1);
17caeff2
JM
13261 break;
13262
37409796
NS
13263 default:
13264 gcc_unreachable ();
13265 }
a3170dc6 13266 break;
bb8df8a6 13267
423c1189 13268 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
13269 switch (op_mode)
13270 {
13271 case SFmode:
1cdc0d8f 13272 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13273 ? gen_tstsflt_gpr (compare_result, op0, op1)
13274 : gen_cmpsflt_gpr (compare_result, op0, op1);
37409796 13275 break;
bb8df8a6 13276
37409796 13277 case DFmode:
1cdc0d8f 13278 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13279 ? gen_tstdflt_gpr (compare_result, op0, op1)
13280 : gen_cmpdflt_gpr (compare_result, op0, op1);
37409796
NS
13281 break;
13282
17caeff2 13283 case TFmode:
1cdc0d8f 13284 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13285 ? gen_tsttflt_gpr (compare_result, op0, op1)
13286 : gen_cmptflt_gpr (compare_result, op0, op1);
17caeff2
JM
13287 break;
13288
37409796
NS
13289 default:
13290 gcc_unreachable ();
13291 }
a3170dc6 13292 break;
4d4cbc0e 13293 default:
37409796 13294 gcc_unreachable ();
a3170dc6
AH
13295 }
13296
13297 /* Synthesize LE and GE from LT/GT || EQ. */
13298 if (code == LE || code == GE || code == LEU || code == GEU)
13299 {
a3170dc6
AH
13300 emit_insn (cmp);
13301
13302 switch (code)
13303 {
13304 case LE: code = LT; break;
13305 case GE: code = GT; break;
13306 case LEU: code = LT; break;
13307 case GEU: code = GT; break;
37409796 13308 default: gcc_unreachable ();
a3170dc6
AH
13309 }
13310
a3170dc6
AH
13311 compare_result2 = gen_reg_rtx (CCFPmode);
13312
13313 /* Do the EQ. */
37409796
NS
13314 switch (op_mode)
13315 {
13316 case SFmode:
1cdc0d8f 13317 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13318 ? gen_tstsfeq_gpr (compare_result2, op0, op1)
13319 : gen_cmpsfeq_gpr (compare_result2, op0, op1);
37409796
NS
13320 break;
13321
13322 case DFmode:
1cdc0d8f 13323 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13324 ? gen_tstdfeq_gpr (compare_result2, op0, op1)
13325 : gen_cmpdfeq_gpr (compare_result2, op0, op1);
37409796
NS
13326 break;
13327
17caeff2 13328 case TFmode:
1cdc0d8f 13329 cmp = (flag_finite_math_only && !flag_trapping_math)
f90b7a5a
PB
13330 ? gen_tsttfeq_gpr (compare_result2, op0, op1)
13331 : gen_cmptfeq_gpr (compare_result2, op0, op1);
17caeff2
JM
13332 break;
13333
37409796
NS
13334 default:
13335 gcc_unreachable ();
13336 }
a3170dc6
AH
13337 emit_insn (cmp);
13338
a3170dc6 13339 /* OR them together. */
64022b5d
AH
13340 or_result = gen_reg_rtx (CCFPmode);
13341 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
13342 compare_result2);
a3170dc6
AH
13343 compare_result = or_result;
13344 code = EQ;
13345 }
13346 else
13347 {
a3170dc6 13348 if (code == NE || code == LTGT)
a3170dc6 13349 code = NE;
423c1189
AH
13350 else
13351 code = EQ;
a3170dc6
AH
13352 }
13353
13354 emit_insn (cmp);
13355 }
13356 else
de17c25f
DE
13357 {
13358 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
13359 CLOBBERs to match cmptf_internal2 pattern. */
13360 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
f90b7a5a 13361 && GET_MODE (op0) == TFmode
602ea4d3 13362 && !TARGET_IEEEQUAD
de17c25f
DE
13363 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
13364 emit_insn (gen_rtx_PARALLEL (VOIDmode,
13365 gen_rtvec (9,
13366 gen_rtx_SET (VOIDmode,
13367 compare_result,
f90b7a5a 13368 gen_rtx_COMPARE (comp_mode, op0, op1)),
de17c25f
DE
13369 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13370 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13371 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13372 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13373 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13374 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13375 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13376 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
f90b7a5a
PB
13377 else if (GET_CODE (op1) == UNSPEC
13378 && XINT (op1, 1) == UNSPEC_SP_TEST)
3aebbe5f 13379 {
c24151ce 13380 rtx op1b = XVECEXP (op1, 0, 0);
3aebbe5f
JJ
13381 comp_mode = CCEQmode;
13382 compare_result = gen_reg_rtx (CCEQmode);
13383 if (TARGET_64BIT)
c24151ce 13384 emit_insn (gen_stack_protect_testdi (compare_result, op0, op1b));
3aebbe5f 13385 else
c24151ce 13386 emit_insn (gen_stack_protect_testsi (compare_result, op0, op1b));
3aebbe5f 13387 }
de17c25f
DE
13388 else
13389 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
f90b7a5a 13390 gen_rtx_COMPARE (comp_mode, op0, op1)));
de17c25f 13391 }
f676971a 13392
ca5adc63 13393 /* Some kinds of FP comparisons need an OR operation;
e7108df9 13394 under flag_finite_math_only we don't bother. */
f90b7a5a 13395 if (FLOAT_MODE_P (mode)
e7108df9 13396 && !flag_finite_math_only
8ef65e3d 13397 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
13398 && (code == LE || code == GE
13399 || code == UNEQ || code == LTGT
13400 || code == UNGT || code == UNLT))
13401 {
13402 enum rtx_code or1, or2;
13403 rtx or1_rtx, or2_rtx, compare2_rtx;
13404 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 13405
39a10a29
GK
13406 switch (code)
13407 {
13408 case LE: or1 = LT; or2 = EQ; break;
13409 case GE: or1 = GT; or2 = EQ; break;
13410 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
13411 case LTGT: or1 = LT; or2 = GT; break;
13412 case UNGT: or1 = UNORDERED; or2 = GT; break;
13413 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 13414 default: gcc_unreachable ();
39a10a29
GK
13415 }
13416 validate_condition_mode (or1, comp_mode);
13417 validate_condition_mode (or2, comp_mode);
1c563bed
KH
13418 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
13419 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
13420 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
13421 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
13422 const_true_rtx);
13423 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
13424
13425 compare_result = or_result;
13426 code = EQ;
13427 }
13428
13429 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 13430
1c563bed 13431 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
13432}
13433
13434
13435/* Emit the RTL for an sCOND pattern. */
13436
13437void
f90b7a5a 13438rs6000_emit_sCOND (enum machine_mode mode, rtx operands[])
39a10a29
GK
13439{
13440 rtx condition_rtx;
13441 enum machine_mode op_mode;
b7053a3f 13442 enum rtx_code cond_code;
f90b7a5a 13443 rtx result = operands[0];
39a10a29 13444
f90b7a5a 13445 condition_rtx = rs6000_generate_compare (operands[1], mode);
b7053a3f
GK
13446 cond_code = GET_CODE (condition_rtx);
13447
f90b7a5a 13448 if (FLOAT_MODE_P (mode)
423c1189
AH
13449 && !TARGET_FPRS && TARGET_HARD_FLOAT)
13450 {
13451 rtx t;
13452
13453 PUT_MODE (condition_rtx, SImode);
13454 t = XEXP (condition_rtx, 0);
13455
37409796 13456 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
13457
13458 if (cond_code == NE)
64022b5d 13459 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 13460
64022b5d 13461 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
13462 return;
13463 }
13464
b7053a3f
GK
13465 if (cond_code == NE
13466 || cond_code == GE || cond_code == LE
13467 || cond_code == GEU || cond_code == LEU
13468 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
13469 {
13470 rtx not_result = gen_reg_rtx (CCEQmode);
13471 rtx not_op, rev_cond_rtx;
13472 enum machine_mode cc_mode;
f676971a 13473
b7053a3f
GK
13474 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
13475
1c563bed 13476 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 13477 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
13478 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
13479 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
13480 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
13481 }
39a10a29 13482
f90b7a5a 13483 op_mode = GET_MODE (XEXP (operands[1], 0));
39a10a29 13484 if (op_mode == VOIDmode)
f90b7a5a 13485 op_mode = GET_MODE (XEXP (operands[1], 1));
39a10a29 13486
f90b7a5a 13487 if (TARGET_POWERPC64 && (op_mode == DImode || FLOAT_MODE_P (mode)))
39a10a29
GK
13488 {
13489 PUT_MODE (condition_rtx, DImode);
13490 convert_move (result, condition_rtx, 0);
13491 }
13492 else
13493 {
13494 PUT_MODE (condition_rtx, SImode);
13495 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
13496 }
13497}
13498
39a10a29
GK
13499/* Emit a branch of kind CODE to location LOC. */
13500
13501void
f90b7a5a 13502rs6000_emit_cbranch (enum machine_mode mode, rtx operands[])
39a10a29
GK
13503{
13504 rtx condition_rtx, loc_ref;
13505
f90b7a5a
PB
13506 condition_rtx = rs6000_generate_compare (operands[0], mode);
13507 loc_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
39a10a29
GK
13508 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
13509 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
13510 loc_ref, pc_rtx)));
13511}
13512
12a4e8c5
GK
13513/* Return the string to output a conditional branch to LABEL, which is
13514 the operand number of the label, or -1 if the branch is really a
f676971a 13515 conditional return.
12a4e8c5
GK
13516
13517 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
13518 condition code register and its mode specifies what kind of
13519 comparison we made.
13520
a0ab749a 13521 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
13522
13523 INSN is the insn. */
13524
13525char *
a2369ed3 13526output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
13527{
13528 static char string[64];
13529 enum rtx_code code = GET_CODE (op);
13530 rtx cc_reg = XEXP (op, 0);
13531 enum machine_mode mode = GET_MODE (cc_reg);
13532 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 13533 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
13534 int really_reversed = reversed ^ need_longbranch;
13535 char *s = string;
13536 const char *ccode;
13537 const char *pred;
13538 rtx note;
13539
39a10a29
GK
13540 validate_condition_mode (code, mode);
13541
13542 /* Work out which way this really branches. We could use
13543 reverse_condition_maybe_unordered here always but this
13544 makes the resulting assembler clearer. */
12a4e8c5 13545 if (really_reversed)
de40e1df
DJ
13546 {
13547 /* Reversal of FP compares takes care -- an ordered compare
13548 becomes an unordered compare and vice versa. */
13549 if (mode == CCFPmode)
13550 code = reverse_condition_maybe_unordered (code);
13551 else
13552 code = reverse_condition (code);
13553 }
12a4e8c5 13554
8ef65e3d 13555 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
13556 {
13557 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13558 to the GT bit. */
37409796
NS
13559 switch (code)
13560 {
13561 case EQ:
13562 /* Opposite of GT. */
13563 code = GT;
13564 break;
13565
13566 case NE:
13567 code = UNLE;
13568 break;
13569
13570 default:
13571 gcc_unreachable ();
13572 }
a3170dc6
AH
13573 }
13574
39a10a29 13575 switch (code)
12a4e8c5
GK
13576 {
13577 /* Not all of these are actually distinct opcodes, but
13578 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
13579 case NE: case LTGT:
13580 ccode = "ne"; break;
13581 case EQ: case UNEQ:
13582 ccode = "eq"; break;
f676971a 13583 case GE: case GEU:
50a0b056 13584 ccode = "ge"; break;
f676971a 13585 case GT: case GTU: case UNGT:
50a0b056 13586 ccode = "gt"; break;
f676971a 13587 case LE: case LEU:
50a0b056 13588 ccode = "le"; break;
f676971a 13589 case LT: case LTU: case UNLT:
50a0b056 13590 ccode = "lt"; break;
12a4e8c5
GK
13591 case UNORDERED: ccode = "un"; break;
13592 case ORDERED: ccode = "nu"; break;
13593 case UNGE: ccode = "nl"; break;
13594 case UNLE: ccode = "ng"; break;
13595 default:
37409796 13596 gcc_unreachable ();
12a4e8c5 13597 }
f676971a
EC
13598
13599 /* Maybe we have a guess as to how likely the branch is.
94a54f47 13600 The old mnemonics don't have a way to specify this information. */
f4857b9b 13601 pred = "";
12a4e8c5
GK
13602 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13603 if (note != NULL_RTX)
13604 {
13605 /* PROB is the difference from 50%. */
13606 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13607
13608 /* Only hint for highly probable/improbable branches on newer
13609 cpus as static prediction overrides processor dynamic
13610 prediction. For older cpus we may as well always hint, but
13611 assume not taken for branches that are very close to 50% as a
13612 mispredicted taken branch is more expensive than a
f676971a 13613 mispredicted not-taken branch. */
ec507f2d 13614 if (rs6000_always_hint
2c9e13f3
JH
13615 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13616 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13617 {
13618 if (abs (prob) > REG_BR_PROB_BASE / 20
13619 && ((prob > 0) ^ need_longbranch))
c4ad648e 13620 pred = "+";
f4857b9b
AM
13621 else
13622 pred = "-";
13623 }
12a4e8c5 13624 }
12a4e8c5
GK
13625
13626 if (label == NULL)
94a54f47 13627 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13628 else
94a54f47 13629 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13630
37c67319 13631 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13632 Assume they'd only be the first character.... */
37c67319
GK
13633 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13634 *s++ = '%';
94a54f47 13635 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13636
13637 if (label != NULL)
13638 {
13639 /* If the branch distance was too far, we may have to use an
13640 unconditional branch to go the distance. */
13641 if (need_longbranch)
44518ddd 13642 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13643 else
13644 s += sprintf (s, ",%s", label);
13645 }
13646
13647 return string;
13648}
50a0b056 13649
64022b5d 13650/* Return the string to flip the GT bit on a CR. */
423c1189 13651char *
64022b5d 13652output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13653{
13654 static char string[64];
13655 int a, b;
13656
37409796
NS
13657 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13658 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13659
64022b5d
AH
13660 /* GT bit. */
13661 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13662 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13663
13664 sprintf (string, "crnot %d,%d", a, b);
13665 return string;
13666}
13667
21213b4c
DP
13668/* Return insn index for the vector compare instruction for given CODE,
13669 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13670 not available. */
13671
13672static int
94ff898d 13673get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13674 enum machine_mode dest_mode,
13675 enum machine_mode op_mode)
13676{
13677 if (!TARGET_ALTIVEC)
13678 return INSN_NOT_AVAILABLE;
13679
13680 switch (code)
13681 {
13682 case EQ:
13683 if (dest_mode == V16QImode && op_mode == V16QImode)
13684 return UNSPEC_VCMPEQUB;
13685 if (dest_mode == V8HImode && op_mode == V8HImode)
13686 return UNSPEC_VCMPEQUH;
13687 if (dest_mode == V4SImode && op_mode == V4SImode)
13688 return UNSPEC_VCMPEQUW;
13689 if (dest_mode == V4SImode && op_mode == V4SFmode)
13690 return UNSPEC_VCMPEQFP;
13691 break;
13692 case GE:
13693 if (dest_mode == V4SImode && op_mode == V4SFmode)
13694 return UNSPEC_VCMPGEFP;
13695 case GT:
13696 if (dest_mode == V16QImode && op_mode == V16QImode)
13697 return UNSPEC_VCMPGTSB;
13698 if (dest_mode == V8HImode && op_mode == V8HImode)
13699 return UNSPEC_VCMPGTSH;
13700 if (dest_mode == V4SImode && op_mode == V4SImode)
13701 return UNSPEC_VCMPGTSW;
13702 if (dest_mode == V4SImode && op_mode == V4SFmode)
13703 return UNSPEC_VCMPGTFP;
13704 break;
13705 case GTU:
13706 if (dest_mode == V16QImode && op_mode == V16QImode)
13707 return UNSPEC_VCMPGTUB;
13708 if (dest_mode == V8HImode && op_mode == V8HImode)
13709 return UNSPEC_VCMPGTUH;
13710 if (dest_mode == V4SImode && op_mode == V4SImode)
13711 return UNSPEC_VCMPGTUW;
13712 break;
13713 default:
13714 break;
13715 }
13716 return INSN_NOT_AVAILABLE;
13717}
13718
13719/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13720 DMODE is expected destination mode. This is a recursive function. */
13721
13722static rtx
13723rs6000_emit_vector_compare (enum rtx_code rcode,
13724 rtx op0, rtx op1,
13725 enum machine_mode dmode)
13726{
13727 int vec_cmp_insn;
13728 rtx mask;
13729 enum machine_mode dest_mode;
13730 enum machine_mode op_mode = GET_MODE (op1);
13731
37409796
NS
13732 gcc_assert (TARGET_ALTIVEC);
13733 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13734
13735 /* Floating point vector compare instructions uses destination V4SImode.
13736 Move destination to appropriate mode later. */
13737 if (dmode == V4SFmode)
13738 dest_mode = V4SImode;
13739 else
13740 dest_mode = dmode;
13741
13742 mask = gen_reg_rtx (dest_mode);
13743 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13744
13745 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13746 {
13747 bool swap_operands = false;
13748 bool try_again = false;
13749 switch (rcode)
13750 {
13751 case LT:
13752 rcode = GT;
13753 swap_operands = true;
13754 try_again = true;
13755 break;
13756 case LTU:
13757 rcode = GTU;
13758 swap_operands = true;
13759 try_again = true;
13760 break;
13761 case NE:
370df7db
JC
13762 case UNLE:
13763 case UNLT:
13764 case UNGE:
13765 case UNGT:
13766 /* Invert condition and try again.
13767 e.g., A != B becomes ~(A==B). */
21213b4c 13768 {
370df7db 13769 enum rtx_code rev_code;
21213b4c 13770 enum insn_code nor_code;
d1123cde 13771 rtx eq_rtx;
370df7db
JC
13772
13773 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13774 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13775 dest_mode);
94ff898d 13776
166cdb08 13777 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13778 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13779 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13780
13781 if (dmode != dest_mode)
13782 {
13783 rtx temp = gen_reg_rtx (dest_mode);
13784 convert_move (temp, mask, 0);
13785 return temp;
13786 }
13787 return mask;
13788 }
13789 break;
13790 case GE:
13791 case GEU:
13792 case LE:
13793 case LEU:
13794 /* Try GT/GTU/LT/LTU OR EQ */
13795 {
13796 rtx c_rtx, eq_rtx;
13797 enum insn_code ior_code;
13798 enum rtx_code new_code;
13799
37409796
NS
13800 switch (rcode)
13801 {
13802 case GE:
13803 new_code = GT;
13804 break;
13805
13806 case GEU:
13807 new_code = GTU;
13808 break;
13809
13810 case LE:
13811 new_code = LT;
13812 break;
13813
13814 case LEU:
13815 new_code = LTU;
13816 break;
13817
13818 default:
13819 gcc_unreachable ();
13820 }
21213b4c
DP
13821
13822 c_rtx = rs6000_emit_vector_compare (new_code,
13823 op0, op1, dest_mode);
13824 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13825 dest_mode);
13826
166cdb08 13827 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13828 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13829 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13830 if (dmode != dest_mode)
13831 {
13832 rtx temp = gen_reg_rtx (dest_mode);
13833 convert_move (temp, mask, 0);
13834 return temp;
13835 }
13836 return mask;
13837 }
13838 break;
13839 default:
37409796 13840 gcc_unreachable ();
21213b4c
DP
13841 }
13842
13843 if (try_again)
13844 {
13845 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13846 /* You only get two chances. */
13847 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13848 }
13849
13850 if (swap_operands)
13851 {
13852 rtx tmp;
13853 tmp = op0;
13854 op0 = op1;
13855 op1 = tmp;
13856 }
13857 }
13858
915167f5
GK
13859 emit_insn (gen_rtx_SET (VOIDmode, mask,
13860 gen_rtx_UNSPEC (dest_mode,
13861 gen_rtvec (2, op0, op1),
13862 vec_cmp_insn)));
21213b4c
DP
13863 if (dmode != dest_mode)
13864 {
13865 rtx temp = gen_reg_rtx (dest_mode);
13866 convert_move (temp, mask, 0);
13867 return temp;
13868 }
13869 return mask;
13870}
13871
13872/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13873 valid insn doesn exist for given mode. */
13874
13875static int
13876get_vsel_insn (enum machine_mode mode)
13877{
13878 switch (mode)
13879 {
13880 case V4SImode:
13881 return UNSPEC_VSEL4SI;
13882 break;
13883 case V4SFmode:
13884 return UNSPEC_VSEL4SF;
13885 break;
13886 case V8HImode:
13887 return UNSPEC_VSEL8HI;
13888 break;
13889 case V16QImode:
13890 return UNSPEC_VSEL16QI;
13891 break;
13892 default:
13893 return INSN_NOT_AVAILABLE;
13894 break;
13895 }
13896 return INSN_NOT_AVAILABLE;
13897}
13898
13899/* Emit vector select insn where DEST is destination using
13900 operands OP1, OP2 and MASK. */
13901
13902static void
13903rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13904{
13905 rtx t, temp;
13906 enum machine_mode dest_mode = GET_MODE (dest);
13907 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13908
13909 temp = gen_reg_rtx (dest_mode);
94ff898d 13910
bb8df8a6 13911 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13912 select op2. */
915167f5
GK
13913 t = gen_rtx_SET (VOIDmode, temp,
13914 gen_rtx_UNSPEC (dest_mode,
13915 gen_rtvec (3, op2, op1, mask),
13916 vsel_insn_index));
21213b4c
DP
13917 emit_insn (t);
13918 emit_move_insn (dest, temp);
13919 return;
13920}
13921
94ff898d 13922/* Emit vector conditional expression.
21213b4c
DP
13923 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13924 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13925
13926int
13927rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13928 rtx cond, rtx cc_op0, rtx cc_op1)
13929{
13930 enum machine_mode dest_mode = GET_MODE (dest);
13931 enum rtx_code rcode = GET_CODE (cond);
13932 rtx mask;
13933
13934 if (!TARGET_ALTIVEC)
13935 return 0;
13936
13937 /* Get the vector mask for the given relational operations. */
13938 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13939
13940 rs6000_emit_vector_select (dest, op1, op2, mask);
13941
13942 return 1;
13943}
13944
50a0b056
GK
13945/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13946 operands of the last comparison is nonzero/true, FALSE_COND if it
13947 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13948
50a0b056 13949int
a2369ed3 13950rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13951{
13952 enum rtx_code code = GET_CODE (op);
f90b7a5a
PB
13953 rtx op0 = XEXP (op, 0);
13954 rtx op1 = XEXP (op, 1);
50a0b056 13955 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13956 enum machine_mode compare_mode = GET_MODE (op0);
13957 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13958 rtx temp;
add2402e 13959 bool is_against_zero;
50a0b056 13960
a3c9585f 13961 /* These modes should always match. */
a3170dc6
AH
13962 if (GET_MODE (op1) != compare_mode
13963 /* In the isel case however, we can use a compare immediate, so
13964 op1 may be a small constant. */
13965 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13966 return 0;
178c3eff 13967 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13968 return 0;
178c3eff 13969 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13970 return 0;
13971
50a0b056 13972 /* First, work out if the hardware can do this at all, or
a3c9585f 13973 if it's too slow.... */
f90b7a5a 13974 if (!FLOAT_MODE_P (compare_mode))
a3170dc6
AH
13975 {
13976 if (TARGET_ISEL)
13977 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13978 return 0;
13979 }
8ef65e3d 13980 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13981 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13982 return 0;
50a0b056 13983
add2402e 13984 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13985
add2402e
GK
13986 /* A floating-point subtract might overflow, underflow, or produce
13987 an inexact result, thus changing the floating-point flags, so it
13988 can't be generated if we care about that. It's safe if one side
13989 of the construct is zero, since then no subtract will be
13990 generated. */
ebb109ad 13991 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13992 && flag_trapping_math && ! is_against_zero)
13993 return 0;
13994
50a0b056
GK
13995 /* Eliminate half of the comparisons by switching operands, this
13996 makes the remaining code simpler. */
13997 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13998 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13999 {
14000 code = reverse_condition_maybe_unordered (code);
14001 temp = true_cond;
14002 true_cond = false_cond;
14003 false_cond = temp;
14004 }
14005
14006 /* UNEQ and LTGT take four instructions for a comparison with zero,
14007 it'll probably be faster to use a branch here too. */
bc9ec0e0 14008 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 14009 return 0;
f676971a 14010
50a0b056
GK
14011 if (GET_CODE (op1) == CONST_DOUBLE)
14012 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 14013
b6d08ca1 14014 /* We're going to try to implement comparisons by performing
50a0b056
GK
14015 a subtract, then comparing against zero. Unfortunately,
14016 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 14017 know that the operand is finite and the comparison
50a0b056 14018 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 14019 if (HONOR_INFINITIES (compare_mode)
50a0b056 14020 && code != GT && code != UNGE
045572c7 14021 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
14022 /* Constructs of the form (a OP b ? a : b) are safe. */
14023 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 14024 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
14025 && ! rtx_equal_p (op1, true_cond))))
14026 return 0;
add2402e 14027
50a0b056
GK
14028 /* At this point we know we can use fsel. */
14029
14030 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
14031 if (! is_against_zero)
14032 {
14033 temp = gen_reg_rtx (compare_mode);
14034 emit_insn (gen_rtx_SET (VOIDmode, temp,
14035 gen_rtx_MINUS (compare_mode, op0, op1)));
14036 op0 = temp;
14037 op1 = CONST0_RTX (compare_mode);
14038 }
50a0b056
GK
14039
14040 /* If we don't care about NaNs we can reduce some of the comparisons
14041 down to faster ones. */
bc9ec0e0 14042 if (! HONOR_NANS (compare_mode))
50a0b056
GK
14043 switch (code)
14044 {
14045 case GT:
14046 code = LE;
14047 temp = true_cond;
14048 true_cond = false_cond;
14049 false_cond = temp;
14050 break;
14051 case UNGE:
14052 code = GE;
14053 break;
14054 case UNEQ:
14055 code = EQ;
14056 break;
14057 default:
14058 break;
14059 }
14060
14061 /* Now, reduce everything down to a GE. */
14062 switch (code)
14063 {
14064 case GE:
14065 break;
14066
14067 case LE:
3148ad6d
DJ
14068 temp = gen_reg_rtx (compare_mode);
14069 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
14070 op0 = temp;
14071 break;
14072
14073 case ORDERED:
3148ad6d
DJ
14074 temp = gen_reg_rtx (compare_mode);
14075 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
14076 op0 = temp;
14077 break;
14078
14079 case EQ:
3148ad6d 14080 temp = gen_reg_rtx (compare_mode);
f676971a 14081 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
14082 gen_rtx_NEG (compare_mode,
14083 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
14084 op0 = temp;
14085 break;
14086
14087 case UNGE:
bc9ec0e0 14088 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 14089 temp = gen_reg_rtx (result_mode);
50a0b056 14090 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 14091 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
14092 gen_rtx_GE (VOIDmode,
14093 op0, op1),
14094 true_cond, false_cond)));
bc9ec0e0
GK
14095 false_cond = true_cond;
14096 true_cond = temp;
50a0b056 14097
3148ad6d
DJ
14098 temp = gen_reg_rtx (compare_mode);
14099 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
14100 op0 = temp;
14101 break;
14102
14103 case GT:
bc9ec0e0 14104 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 14105 temp = gen_reg_rtx (result_mode);
50a0b056 14106 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 14107 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
14108 gen_rtx_GE (VOIDmode,
14109 op0, op1),
14110 true_cond, false_cond)));
bc9ec0e0
GK
14111 true_cond = false_cond;
14112 false_cond = temp;
50a0b056 14113
3148ad6d
DJ
14114 temp = gen_reg_rtx (compare_mode);
14115 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
14116 op0 = temp;
14117 break;
14118
14119 default:
37409796 14120 gcc_unreachable ();
50a0b056
GK
14121 }
14122
14123 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 14124 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
14125 gen_rtx_GE (VOIDmode,
14126 op0, op1),
14127 true_cond, false_cond)));
14128 return 1;
14129}
14130
a3170dc6
AH
14131/* Same as above, but for ints (isel). */
14132
14133static int
a2369ed3 14134rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
14135{
14136 rtx condition_rtx, cr;
cacf1ca8 14137 enum machine_mode mode = GET_MODE (XEXP (op, 0));
a3170dc6 14138
cacf1ca8 14139 if (mode != SImode && (!TARGET_POWERPC64 || mode != DImode))
a3170dc6
AH
14140 return 0;
14141
14142 /* We still have to do the compare, because isel doesn't do a
14143 compare, it just looks at the CRx bits set by a previous compare
14144 instruction. */
f90b7a5a 14145 condition_rtx = rs6000_generate_compare (op, SImode);
a3170dc6
AH
14146 cr = XEXP (condition_rtx, 0);
14147
cacf1ca8
MM
14148 if (mode == SImode)
14149 {
14150 if (GET_MODE (cr) == CCmode)
14151 emit_insn (gen_isel_signed_si (dest, condition_rtx,
14152 true_cond, false_cond, cr));
14153 else
14154 emit_insn (gen_isel_unsigned_si (dest, condition_rtx,
14155 true_cond, false_cond, cr));
14156 }
a3170dc6 14157 else
cacf1ca8
MM
14158 {
14159 if (GET_MODE (cr) == CCmode)
14160 emit_insn (gen_isel_signed_di (dest, condition_rtx,
14161 true_cond, false_cond, cr));
14162 else
14163 emit_insn (gen_isel_unsigned_di (dest, condition_rtx,
14164 true_cond, false_cond, cr));
14165 }
a3170dc6
AH
14166
14167 return 1;
14168}
14169
14170const char *
a2369ed3 14171output_isel (rtx *operands)
a3170dc6
AH
14172{
14173 enum rtx_code code;
14174
14175 code = GET_CODE (operands[1]);
14176 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
14177 {
14178 PUT_CODE (operands[1], reverse_condition (code));
14179 return "isel %0,%3,%2,%j1";
14180 }
14181 else
14182 return "isel %0,%2,%3,%j1";
14183}
14184
50a0b056 14185void
a2369ed3 14186rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
14187{
14188 enum machine_mode mode = GET_MODE (op0);
5dc8d536 14189 enum rtx_code c;
50a0b056 14190 rtx target;
5dc8d536
AH
14191
14192 if (code == SMAX || code == SMIN)
14193 c = GE;
14194 else
14195 c = GEU;
14196
50a0b056 14197 if (code == SMAX || code == UMAX)
f676971a 14198 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
14199 op0, op1, mode, 0);
14200 else
f676971a 14201 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 14202 op1, op0, mode, 0);
37409796 14203 gcc_assert (target);
50a0b056
GK
14204 if (target != dest)
14205 emit_move_insn (dest, target);
14206}
46c07df8 14207
915167f5
GK
14208/* Emit instructions to perform a load-reserved/store-conditional operation.
14209 The operation performed is an atomic
14210 (set M (CODE:MODE M OP))
14211 If not NULL, BEFORE is atomically set to M before the operation, and
14212 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 14213 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
14214 Either OP or M may be wrapped in a NOT operation. */
14215
14216void
14217rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
14218 rtx m, rtx op, rtx before_param, rtx after_param,
14219 bool sync_p)
14220{
14221 enum machine_mode used_mode;
14222 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
14223 rtx used_m;
14224 rtvec vec;
14225 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14226 rtx shift = NULL_RTX;
bb8df8a6 14227
915167f5 14228 if (sync_p)
a3cb4b82 14229 emit_insn (gen_lwsync ());
bb8df8a6 14230
915167f5
GK
14231 used_m = m;
14232
14233 /* If this is smaller than SImode, we'll have to use SImode with
14234 adjustments. */
14235 if (mode == QImode || mode == HImode)
14236 {
14237 rtx newop, oldop;
14238
14239 if (MEM_ALIGN (used_m) >= 32)
14240 {
14241 int ishift = 0;
14242 if (BYTES_BIG_ENDIAN)
14243 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 14244
915167f5 14245 shift = GEN_INT (ishift);
c75c6d11 14246 used_m = change_address (used_m, SImode, 0);
915167f5
GK
14247 }
14248 else
14249 {
14250 rtx addrSI, aligned_addr;
a9c9d3fa 14251 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 14252
c75c6d11
JJ
14253 addrSI = gen_lowpart_common (SImode,
14254 force_reg (Pmode, XEXP (used_m, 0)));
14255 addrSI = force_reg (SImode, addrSI);
915167f5
GK
14256 shift = gen_reg_rtx (SImode);
14257
14258 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
14259 GEN_INT (shift_mask)));
14260 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
14261
14262 aligned_addr = expand_binop (Pmode, and_optab,
14263 XEXP (used_m, 0),
14264 GEN_INT (-4), NULL_RTX,
14265 1, OPTAB_LIB_WIDEN);
14266 used_m = change_address (used_m, SImode, aligned_addr);
14267 set_mem_align (used_m, 32);
915167f5 14268 }
c75c6d11
JJ
14269 /* It's safe to keep the old alias set of USED_M, because
14270 the operation is atomic and only affects the original
14271 USED_M. */
f66f9865 14272 m = used_m;
915167f5
GK
14273
14274 if (GET_CODE (op) == NOT)
14275 {
14276 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
14277 oldop = gen_rtx_NOT (SImode, oldop);
14278 }
14279 else
14280 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 14281
915167f5
GK
14282 switch (code)
14283 {
14284 case IOR:
14285 case XOR:
14286 newop = expand_binop (SImode, and_optab,
14287 oldop, GEN_INT (imask), NULL_RTX,
14288 1, OPTAB_LIB_WIDEN);
14289 emit_insn (gen_ashlsi3 (newop, newop, shift));
14290 break;
14291
f66f9865
DE
14292 case NOT: /* NAND */
14293 newop = expand_binop (SImode, ior_optab,
14294 oldop, GEN_INT (~imask), NULL_RTX,
14295 1, OPTAB_LIB_WIDEN);
14296 emit_insn (gen_rotlsi3 (newop, newop, shift));
14297 break;
14298
915167f5
GK
14299 case AND:
14300 newop = expand_binop (SImode, ior_optab,
14301 oldop, GEN_INT (~imask), NULL_RTX,
14302 1, OPTAB_LIB_WIDEN);
a9c9d3fa 14303 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
14304 break;
14305
14306 case PLUS:
9f0076e5 14307 case MINUS:
915167f5
GK
14308 {
14309 rtx mask;
bb8df8a6 14310
915167f5
GK
14311 newop = expand_binop (SImode, and_optab,
14312 oldop, GEN_INT (imask), NULL_RTX,
14313 1, OPTAB_LIB_WIDEN);
14314 emit_insn (gen_ashlsi3 (newop, newop, shift));
14315
14316 mask = gen_reg_rtx (SImode);
14317 emit_move_insn (mask, GEN_INT (imask));
14318 emit_insn (gen_ashlsi3 (mask, mask, shift));
14319
9f0076e5
DE
14320 if (code == PLUS)
14321 newop = gen_rtx_PLUS (SImode, m, newop);
14322 else
14323 newop = gen_rtx_MINUS (SImode, m, newop);
14324 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
14325 newop = gen_rtx_IOR (SImode, newop,
14326 gen_rtx_AND (SImode,
14327 gen_rtx_NOT (SImode, mask),
14328 m));
14329 break;
14330 }
14331
14332 default:
14333 gcc_unreachable ();
14334 }
14335
14336 op = newop;
14337 used_mode = SImode;
14338 before = gen_reg_rtx (used_mode);
14339 after = gen_reg_rtx (used_mode);
14340 }
14341 else
14342 {
14343 used_mode = mode;
14344 before = before_param;
14345 after = after_param;
14346
14347 if (before == NULL_RTX)
14348 before = gen_reg_rtx (used_mode);
14349 if (after == NULL_RTX)
14350 after = gen_reg_rtx (used_mode);
14351 }
bb8df8a6 14352
f66f9865 14353 if ((code == PLUS || code == MINUS)
9f0076e5 14354 && used_mode != mode)
915167f5
GK
14355 the_op = op; /* Computed above. */
14356 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
14357 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
f66f9865
DE
14358 else if (code == NOT)
14359 the_op = gen_rtx_fmt_ee (IOR, used_mode,
14360 gen_rtx_NOT (used_mode, m),
14361 gen_rtx_NOT (used_mode, op));
915167f5
GK
14362 else
14363 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
14364
14365 set_after = gen_rtx_SET (VOIDmode, after, the_op);
14366 set_before = gen_rtx_SET (VOIDmode, before, used_m);
14367 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
14368 gen_rtx_UNSPEC (used_mode,
14369 gen_rtvec (1, the_op),
14370 UNSPEC_SYNC_OP));
915167f5
GK
14371 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
14372
9f0076e5 14373 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
14374 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
14375 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
14376 else
14377 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
14378 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
14379
14380 /* Shift and mask the return values properly. */
14381 if (used_mode != mode && before_param)
14382 {
14383 emit_insn (gen_lshrsi3 (before, before, shift));
14384 convert_move (before_param, before, 1);
14385 }
14386
14387 if (used_mode != mode && after_param)
14388 {
14389 emit_insn (gen_lshrsi3 (after, after, shift));
14390 convert_move (after_param, after, 1);
14391 }
14392
14393 /* The previous sequence will end with a branch that's dependent on
14394 the conditional store, so placing an isync will ensure that no
14395 other instructions (especially, no load or store instructions)
14396 can start before the atomic operation completes. */
14397 if (sync_p)
14398 emit_insn (gen_isync ());
14399}
14400
b52110d4
DE
14401/* A subroutine of the atomic operation splitters. Jump to LABEL if
14402 COND is true. Mark the jump as unlikely to be taken. */
14403
14404static void
14405emit_unlikely_jump (rtx cond, rtx label)
14406{
14407 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
14408 rtx x;
14409
14410 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
14411 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
bbbbb16a 14412 add_reg_note (x, REG_BR_PROB, very_unlikely);
b52110d4
DE
14413}
14414
14415/* A subroutine of the atomic operation splitters. Emit a load-locked
14416 instruction in MODE. */
14417
14418static void
14419emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
14420{
14421 rtx (*fn) (rtx, rtx) = NULL;
14422 if (mode == SImode)
14423 fn = gen_load_locked_si;
14424 else if (mode == DImode)
14425 fn = gen_load_locked_di;
14426 emit_insn (fn (reg, mem));
14427}
14428
14429/* A subroutine of the atomic operation splitters. Emit a store-conditional
14430 instruction in MODE. */
14431
14432static void
14433emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
14434{
14435 rtx (*fn) (rtx, rtx, rtx) = NULL;
14436 if (mode == SImode)
14437 fn = gen_store_conditional_si;
14438 else if (mode == DImode)
14439 fn = gen_store_conditional_di;
14440
9f0076e5 14441 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
14442 if (PPC405_ERRATUM77)
14443 emit_insn (gen_memory_barrier ());
14444
14445 emit_insn (fn (res, mem, val));
14446}
14447
ea2c620c 14448/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 14449 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
14450 operand of the binary operator. BEFORE and AFTER are optional locations to
14451 return the value of MEM either before of after the operation. SCRATCH is
14452 a scratch register. */
14453
14454void
14455rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
14456 rtx before, rtx after, rtx scratch)
14457{
14458 enum machine_mode mode = GET_MODE (mem);
14459 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14460
a3cb4b82 14461 emit_insn (gen_lwsync ());
9f0076e5
DE
14462
14463 label = gen_label_rtx ();
14464 emit_label (label);
14465 label = gen_rtx_LABEL_REF (VOIDmode, label);
14466
14467 if (before == NULL_RTX)
14468 before = scratch;
14469 emit_load_locked (mode, before, mem);
14470
14471 if (code == NOT)
f66f9865
DE
14472 x = gen_rtx_IOR (mode,
14473 gen_rtx_NOT (mode, before),
14474 gen_rtx_NOT (mode, val));
9f0076e5
DE
14475 else if (code == AND)
14476 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
14477 else
14478 x = gen_rtx_fmt_ee (code, mode, before, val);
14479
14480 if (after != NULL_RTX)
14481 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
14482 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
14483
14484 emit_store_conditional (mode, cond, mem, scratch);
14485
14486 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14487 emit_unlikely_jump (x, label);
14488
14489 emit_insn (gen_isync ());
14490}
14491
b52110d4
DE
14492/* Expand an atomic compare and swap operation. MEM is the memory on which
14493 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
14494 value to be stored. SCRATCH is a scratch GPR. */
14495
14496void
14497rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
14498 rtx scratch)
14499{
14500 enum machine_mode mode = GET_MODE (mem);
14501 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14502
a3cb4b82 14503 emit_insn (gen_lwsync ());
b52110d4
DE
14504
14505 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14506 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14507 emit_label (XEXP (label1, 0));
14508
14509 emit_load_locked (mode, retval, mem);
14510
14511 x = gen_rtx_COMPARE (CCmode, retval, oldval);
14512 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14513
14514 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14515 emit_unlikely_jump (x, label2);
14516
14517 emit_move_insn (scratch, newval);
14518 emit_store_conditional (mode, cond, mem, scratch);
14519
14520 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14521 emit_unlikely_jump (x, label1);
14522
14523 emit_insn (gen_isync ());
14524 emit_label (XEXP (label2, 0));
14525}
14526
14527/* Expand an atomic test and set operation. MEM is the memory on which
14528 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
14529
14530void
14531rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
14532{
14533 enum machine_mode mode = GET_MODE (mem);
14534 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14535
b52110d4
DE
14536 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14537 emit_label (XEXP (label, 0));
14538
14539 emit_load_locked (mode, retval, mem);
14540 emit_move_insn (scratch, val);
14541 emit_store_conditional (mode, cond, mem, scratch);
14542
14543 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14544 emit_unlikely_jump (x, label);
14545
14546 emit_insn (gen_isync ());
14547}
14548
9fc75b97
DE
14549void
14550rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14551{
14552 enum machine_mode mode = GET_MODE (mem);
14553 rtx addrSI, align, wdst, shift, mask;
14554 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14555 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14556
14557 /* Shift amount for subword relative to aligned word. */
4b49d9ce
AP
14558 addrSI = force_reg (GET_MODE (XEXP (mem, 0)), XEXP (mem, 0));
14559 addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI));
9fc75b97
DE
14560 shift = gen_reg_rtx (SImode);
14561 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14562 GEN_INT (shift_mask)));
14563 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14564
14565 /* Shift and mask old value into position within word. */
14566 oldval = convert_modes (SImode, mode, oldval, 1);
14567 oldval = expand_binop (SImode, and_optab,
14568 oldval, GEN_INT (imask), NULL_RTX,
14569 1, OPTAB_LIB_WIDEN);
14570 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14571
14572 /* Shift and mask new value into position within word. */
14573 newval = convert_modes (SImode, mode, newval, 1);
14574 newval = expand_binop (SImode, and_optab,
14575 newval, GEN_INT (imask), NULL_RTX,
14576 1, OPTAB_LIB_WIDEN);
14577 emit_insn (gen_ashlsi3 (newval, newval, shift));
14578
14579 /* Mask for insertion. */
14580 mask = gen_reg_rtx (SImode);
14581 emit_move_insn (mask, GEN_INT (imask));
14582 emit_insn (gen_ashlsi3 (mask, mask, shift));
14583
14584 /* Address of aligned word containing subword. */
14585 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14586 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14587 mem = change_address (mem, SImode, align);
14588 set_mem_align (mem, 32);
14589 MEM_VOLATILE_P (mem) = 1;
14590
14591 wdst = gen_reg_rtx (SImode);
14592 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14593 oldval, newval, mem));
14594
2725b75c
JJ
14595 /* Shift the result back. */
14596 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14597
9fc75b97
DE
14598 emit_move_insn (dst, gen_lowpart (mode, wdst));
14599}
14600
14601void
14602rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14603 rtx oldval, rtx newval, rtx mem,
14604 rtx scratch)
14605{
14606 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14607
a3cb4b82 14608 emit_insn (gen_lwsync ());
9fc75b97
DE
14609 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14610 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14611 emit_label (XEXP (label1, 0));
14612
14613 emit_load_locked (SImode, scratch, mem);
14614
14615 /* Mask subword within loaded value for comparison with oldval.
14616 Use UNSPEC_AND to avoid clobber.*/
14617 emit_insn (gen_rtx_SET (SImode, dest,
14618 gen_rtx_UNSPEC (SImode,
14619 gen_rtvec (2, scratch, mask),
14620 UNSPEC_AND)));
14621
14622 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14623 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14624
14625 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14626 emit_unlikely_jump (x, label2);
14627
14628 /* Clear subword within loaded value for insertion of new value. */
14629 emit_insn (gen_rtx_SET (SImode, scratch,
14630 gen_rtx_AND (SImode,
14631 gen_rtx_NOT (SImode, mask), scratch)));
14632 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14633 emit_store_conditional (SImode, cond, mem, scratch);
14634
14635 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14636 emit_unlikely_jump (x, label1);
14637
14638 emit_insn (gen_isync ());
14639 emit_label (XEXP (label2, 0));
14640}
14641
14642
b52110d4 14643 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14644 multi-register moves. It will emit at most one instruction for
14645 each register that is accessed; that is, it won't emit li/lis pairs
14646 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14647 register. */
46c07df8 14648
46c07df8 14649void
a9baceb1 14650rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14651{
a9baceb1
GK
14652 /* The register number of the first register being moved. */
14653 int reg;
14654 /* The mode that is to be moved. */
14655 enum machine_mode mode;
14656 /* The mode that the move is being done in, and its size. */
14657 enum machine_mode reg_mode;
14658 int reg_mode_size;
14659 /* The number of registers that will be moved. */
14660 int nregs;
14661
14662 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14663 mode = GET_MODE (dst);
c8b622ff 14664 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14665 if (FP_REGNO_P (reg))
696e45ba
ME
14666 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode :
14667 ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? DFmode : SFmode);
a9baceb1
GK
14668 else if (ALTIVEC_REGNO_P (reg))
14669 reg_mode = V16QImode;
4f011e1e
JM
14670 else if (TARGET_E500_DOUBLE && mode == TFmode)
14671 reg_mode = DFmode;
a9baceb1
GK
14672 else
14673 reg_mode = word_mode;
14674 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14675
37409796 14676 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14677
a9baceb1
GK
14678 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14679 {
14680 /* Move register range backwards, if we might have destructive
14681 overlap. */
14682 int i;
14683 for (i = nregs - 1; i >= 0; i--)
f676971a 14684 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14685 simplify_gen_subreg (reg_mode, dst, mode,
14686 i * reg_mode_size),
14687 simplify_gen_subreg (reg_mode, src, mode,
14688 i * reg_mode_size)));
14689 }
46c07df8
HP
14690 else
14691 {
a9baceb1
GK
14692 int i;
14693 int j = -1;
14694 bool used_update = false;
46c07df8 14695
c1e55850 14696 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14697 {
14698 rtx breg;
3a1f863f 14699
a9baceb1
GK
14700 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14701 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14702 {
14703 rtx delta_rtx;
a9baceb1 14704 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14705 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14706 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14707 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14708 emit_insn (TARGET_32BIT
14709 ? gen_addsi3 (breg, breg, delta_rtx)
14710 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14711 src = replace_equiv_address (src, breg);
3a1f863f 14712 }
d04b6e6e 14713 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14714 {
13e2e16e 14715 rtx basereg;
c1e55850
GK
14716 basereg = gen_rtx_REG (Pmode, reg);
14717 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14718 src = replace_equiv_address (src, basereg);
c1e55850 14719 }
3a1f863f 14720
0423421f
AM
14721 breg = XEXP (src, 0);
14722 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14723 breg = XEXP (breg, 0);
14724
14725 /* If the base register we are using to address memory is
14726 also a destination reg, then change that register last. */
14727 if (REG_P (breg)
14728 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14729 && REGNO (breg) < REGNO (dst) + nregs)
14730 j = REGNO (breg) - REGNO (dst);
c4ad648e 14731 }
46c07df8 14732
a9baceb1 14733 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14734 {
14735 rtx breg;
14736
a9baceb1
GK
14737 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14738 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14739 {
14740 rtx delta_rtx;
a9baceb1 14741 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14742 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14743 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14744 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14745
14746 /* We have to update the breg before doing the store.
14747 Use store with update, if available. */
14748
14749 if (TARGET_UPDATE)
14750 {
a9baceb1 14751 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14752 emit_insn (TARGET_32BIT
14753 ? (TARGET_POWERPC64
14754 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14755 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14756 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14757 used_update = true;
3a1f863f
DE
14758 }
14759 else
a9baceb1
GK
14760 emit_insn (TARGET_32BIT
14761 ? gen_addsi3 (breg, breg, delta_rtx)
14762 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14763 dst = replace_equiv_address (dst, breg);
3a1f863f 14764 }
37409796 14765 else
d04b6e6e 14766 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14767 }
14768
46c07df8 14769 for (i = 0; i < nregs; i++)
f676971a 14770 {
3a1f863f
DE
14771 /* Calculate index to next subword. */
14772 ++j;
f676971a 14773 if (j == nregs)
3a1f863f 14774 j = 0;
46c07df8 14775
112cdef5 14776 /* If compiler already emitted move of first word by
a9baceb1 14777 store with update, no need to do anything. */
3a1f863f 14778 if (j == 0 && used_update)
a9baceb1 14779 continue;
f676971a 14780
a9baceb1
GK
14781 emit_insn (gen_rtx_SET (VOIDmode,
14782 simplify_gen_subreg (reg_mode, dst, mode,
14783 j * reg_mode_size),
14784 simplify_gen_subreg (reg_mode, src, mode,
14785 j * reg_mode_size)));
3a1f863f 14786 }
46c07df8
HP
14787 }
14788}
14789
12a4e8c5 14790\f
a4f6c312
SS
14791/* This page contains routines that are used to determine what the
14792 function prologue and epilogue code will do and write them out. */
9878760c 14793
a4f6c312
SS
14794/* Return the first fixed-point register that is required to be
14795 saved. 32 if none. */
9878760c
RK
14796
14797int
863d938c 14798first_reg_to_save (void)
9878760c
RK
14799{
14800 int first_reg;
14801
14802 /* Find lowest numbered live register. */
14803 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14804 if (df_regs_ever_live_p (first_reg)
a38d360d 14805 && (! call_used_regs[first_reg]
1db02437 14806 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14807 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14808 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14809 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14810 break;
14811
ee890fe2 14812#if TARGET_MACHO
93638d7a 14813 if (flag_pic
e3b5732b 14814 && crtl->uses_pic_offset_table
93638d7a 14815 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14816 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14817#endif
14818
9878760c
RK
14819 return first_reg;
14820}
14821
14822/* Similar, for FP regs. */
14823
14824int
863d938c 14825first_fp_reg_to_save (void)
9878760c
RK
14826{
14827 int first_reg;
14828
14829 /* Find lowest numbered live register. */
14830 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14831 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14832 break;
14833
14834 return first_reg;
14835}
00b960c7
AH
14836
14837/* Similar, for AltiVec regs. */
14838
14839static int
863d938c 14840first_altivec_reg_to_save (void)
00b960c7
AH
14841{
14842 int i;
14843
14844 /* Stack frame remains as is unless we are in AltiVec ABI. */
14845 if (! TARGET_ALTIVEC_ABI)
14846 return LAST_ALTIVEC_REGNO + 1;
14847
22fa69da 14848 /* On Darwin, the unwind routines are compiled without
982afe02 14849 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14850 altivec registers when necessary. */
e3b5732b 14851 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14852 && ! TARGET_ALTIVEC)
14853 return FIRST_ALTIVEC_REGNO + 20;
14854
00b960c7
AH
14855 /* Find lowest numbered live register. */
14856 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14857 if (df_regs_ever_live_p (i))
00b960c7
AH
14858 break;
14859
14860 return i;
14861}
14862
14863/* Return a 32-bit mask of the AltiVec registers we need to set in
14864 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14865 the 32-bit word is 0. */
14866
14867static unsigned int
863d938c 14868compute_vrsave_mask (void)
00b960c7
AH
14869{
14870 unsigned int i, mask = 0;
14871
22fa69da 14872 /* On Darwin, the unwind routines are compiled without
982afe02 14873 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14874 call-saved altivec registers when necessary. */
e3b5732b 14875 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14876 && ! TARGET_ALTIVEC)
14877 mask |= 0xFFF;
14878
00b960c7
AH
14879 /* First, find out if we use _any_ altivec registers. */
14880 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14881 if (df_regs_ever_live_p (i))
00b960c7
AH
14882 mask |= ALTIVEC_REG_BIT (i);
14883
14884 if (mask == 0)
14885 return mask;
14886
00b960c7
AH
14887 /* Next, remove the argument registers from the set. These must
14888 be in the VRSAVE mask set by the caller, so we don't need to add
14889 them in again. More importantly, the mask we compute here is
14890 used to generate CLOBBERs in the set_vrsave insn, and we do not
14891 wish the argument registers to die. */
38173d38 14892 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14893 mask &= ~ALTIVEC_REG_BIT (i);
14894
14895 /* Similarly, remove the return value from the set. */
14896 {
14897 bool yes = false;
14898 diddle_return_value (is_altivec_return_reg, &yes);
14899 if (yes)
14900 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14901 }
14902
14903 return mask;
14904}
14905
d62294f5 14906/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14907 size of prologues/epilogues by calling our own save/restore-the-world
14908 routines. */
d62294f5
FJ
14909
14910static void
f57fe068
AM
14911compute_save_world_info (rs6000_stack_t *info_ptr)
14912{
14913 info_ptr->world_save_p = 1;
14914 info_ptr->world_save_p
14915 = (WORLD_SAVE_P (info_ptr)
14916 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14917 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14918 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14919 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14920 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14921 && info_ptr->cr_save_p);
f676971a 14922
d62294f5
FJ
14923 /* This will not work in conjunction with sibcalls. Make sure there
14924 are none. (This check is expensive, but seldom executed.) */
f57fe068 14925 if (WORLD_SAVE_P (info_ptr))
f676971a 14926 {
d62294f5
FJ
14927 rtx insn;
14928 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14929 if ( GET_CODE (insn) == CALL_INSN
14930 && SIBLING_CALL_P (insn))
14931 {
14932 info_ptr->world_save_p = 0;
14933 break;
14934 }
d62294f5 14935 }
f676971a 14936
f57fe068 14937 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14938 {
14939 /* Even if we're not touching VRsave, make sure there's room on the
14940 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14941 will attempt to save it. */
d62294f5
FJ
14942 info_ptr->vrsave_size = 4;
14943
298ac1dd
AP
14944 /* If we are going to save the world, we need to save the link register too. */
14945 info_ptr->lr_save_p = 1;
14946
d62294f5
FJ
14947 /* "Save" the VRsave register too if we're saving the world. */
14948 if (info_ptr->vrsave_mask == 0)
c4ad648e 14949 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14950
14951 /* Because the Darwin register save/restore routines only handle
c4ad648e 14952 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14953 check. */
37409796
NS
14954 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14955 && (info_ptr->first_altivec_reg_save
14956 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14957 }
f676971a 14958 return;
d62294f5
FJ
14959}
14960
14961
00b960c7 14962static void
a2369ed3 14963is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14964{
14965 bool *yes = (bool *) xyes;
14966 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14967 *yes = true;
14968}
14969
4697a36c
MM
14970\f
14971/* Calculate the stack information for the current function. This is
14972 complicated by having two separate calling sequences, the AIX calling
14973 sequence and the V.4 calling sequence.
14974
592696dd 14975 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14976 32-bit 64-bit
4697a36c 14977 SP----> +---------------------------------------+
a260abc9 14978 | back chain to caller | 0 0
4697a36c 14979 +---------------------------------------+
a260abc9 14980 | saved CR | 4 8 (8-11)
4697a36c 14981 +---------------------------------------+
a260abc9 14982 | saved LR | 8 16
4697a36c 14983 +---------------------------------------+
a260abc9 14984 | reserved for compilers | 12 24
4697a36c 14985 +---------------------------------------+
a260abc9 14986 | reserved for binders | 16 32
4697a36c 14987 +---------------------------------------+
a260abc9 14988 | saved TOC pointer | 20 40
4697a36c 14989 +---------------------------------------+
a260abc9 14990 | Parameter save area (P) | 24 48
4697a36c 14991 +---------------------------------------+
a260abc9 14992 | Alloca space (A) | 24+P etc.
802a0058 14993 +---------------------------------------+
a7df97e6 14994 | Local variable space (L) | 24+P+A
4697a36c 14995 +---------------------------------------+
a7df97e6 14996 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14997 +---------------------------------------+
00b960c7
AH
14998 | Save area for AltiVec registers (W) | 24+P+A+L+X
14999 +---------------------------------------+
15000 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
15001 +---------------------------------------+
15002 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 15003 +---------------------------------------+
00b960c7
AH
15004 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
15005 +---------------------------------------+
15006 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
15007 +---------------------------------------+
15008 old SP->| back chain to caller's caller |
15009 +---------------------------------------+
15010
5376a30c
KR
15011 The required alignment for AIX configurations is two words (i.e., 8
15012 or 16 bytes).
15013
15014
4697a36c
MM
15015 V.4 stack frames look like:
15016
15017 SP----> +---------------------------------------+
15018 | back chain to caller | 0
15019 +---------------------------------------+
5eb387b8 15020 | caller's saved LR | 4
4697a36c
MM
15021 +---------------------------------------+
15022 | Parameter save area (P) | 8
15023 +---------------------------------------+
a7df97e6 15024 | Alloca space (A) | 8+P
f676971a 15025 +---------------------------------------+
a7df97e6 15026 | Varargs save area (V) | 8+P+A
f676971a 15027 +---------------------------------------+
a7df97e6 15028 | Local variable space (L) | 8+P+A+V
f676971a 15029 +---------------------------------------+
a7df97e6 15030 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 15031 +---------------------------------------+
00b960c7
AH
15032 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
15033 +---------------------------------------+
15034 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
15035 +---------------------------------------+
15036 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
15037 +---------------------------------------+
c4ad648e
AM
15038 | SPE: area for 64-bit GP registers |
15039 +---------------------------------------+
15040 | SPE alignment padding |
15041 +---------------------------------------+
00b960c7 15042 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 15043 +---------------------------------------+
00b960c7 15044 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 15045 +---------------------------------------+
00b960c7 15046 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
15047 +---------------------------------------+
15048 old SP->| back chain to caller's caller |
15049 +---------------------------------------+
b6c9286a 15050
5376a30c
KR
15051 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
15052 given. (But note below and in sysv4.h that we require only 8 and
15053 may round up the size of our stack frame anyways. The historical
15054 reason is early versions of powerpc-linux which didn't properly
15055 align the stack at program startup. A happy side-effect is that
15056 -mno-eabi libraries can be used with -meabi programs.)
15057
50d440bc 15058 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
15059 the stack alignment requirements may differ. If -mno-eabi is not
15060 given, the required stack alignment is 8 bytes; if -mno-eabi is
15061 given, the required alignment is 16 bytes. (But see V.4 comment
15062 above.) */
4697a36c 15063
61b2fbe7
MM
15064#ifndef ABI_STACK_BOUNDARY
15065#define ABI_STACK_BOUNDARY STACK_BOUNDARY
15066#endif
15067
d1d0c603 15068static rs6000_stack_t *
863d938c 15069rs6000_stack_info (void)
4697a36c 15070{
022123e6 15071 static rs6000_stack_t info;
4697a36c 15072 rs6000_stack_t *info_ptr = &info;
327e5343 15073 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 15074 int ehrd_size;
64045029 15075 int save_align;
8070c91a 15076 int first_gp;
44688022 15077 HOST_WIDE_INT non_fixed_size;
4697a36c 15078
022123e6 15079 memset (&info, 0, sizeof (info));
4697a36c 15080
c19de7aa
AH
15081 if (TARGET_SPE)
15082 {
15083 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 15084 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
15085 cfun->machine->insn_chain_scanned_p
15086 = spe_func_has_64bit_regs_p () + 1;
15087 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
15088 }
15089
a4f6c312 15090 /* Select which calling sequence. */
178274da 15091 info_ptr->abi = DEFAULT_ABI;
9878760c 15092
a4f6c312 15093 /* Calculate which registers need to be saved & save area size. */
4697a36c 15094 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 15095 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
15096 even if it currently looks like we won't. Reload may need it to
15097 get at a constant; if so, it will have already created a constant
15098 pool entry for it. */
2bfcf297 15099 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
15100 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
15101 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 15102 && crtl->uses_const_pool
1db02437 15103 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 15104 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 15105 else
8070c91a
DJ
15106 first_gp = info_ptr->first_gp_reg_save;
15107
15108 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 15109
a3170dc6
AH
15110 /* For the SPE, we have an additional upper 32-bits on each GPR.
15111 Ideally we should save the entire 64-bits only when the upper
15112 half is used in SIMD instructions. Since we only record
15113 registers live (not the size they are used in), this proves
15114 difficult because we'd have to traverse the instruction chain at
15115 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
15116 so we opt to save the GPRs in 64-bits always if but one register
15117 gets used in 64-bits. Otherwise, all the registers in the frame
15118 get saved in 32-bits.
a3170dc6 15119
c19de7aa 15120 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 15121 traditional GP save area will be empty. */
c19de7aa 15122 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
15123 info_ptr->gp_size = 0;
15124
4697a36c
MM
15125 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
15126 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
15127
00b960c7
AH
15128 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
15129 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
15130 - info_ptr->first_altivec_reg_save);
15131
592696dd 15132 /* Does this function call anything? */
71f123ca
FS
15133 info_ptr->calls_p = (! current_function_is_leaf
15134 || cfun->machine->ra_needs_full_frame);
b6c9286a 15135
a4f6c312 15136 /* Determine if we need to save the link register. */
022123e6 15137 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 15138 && crtl->profile
022123e6 15139 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
15140#ifdef TARGET_RELOCATABLE
15141 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
15142#endif
15143 || (info_ptr->first_fp_reg_save != 64
15144 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 15145 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
15146 || info_ptr->calls_p
15147 || rs6000_ra_ever_killed ())
4697a36c
MM
15148 {
15149 info_ptr->lr_save_p = 1;
1de43f85 15150 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
15151 }
15152
9ebbca7d 15153 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
15154 if (df_regs_ever_live_p (CR2_REGNO)
15155 || df_regs_ever_live_p (CR3_REGNO)
15156 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
15157 {
15158 info_ptr->cr_save_p = 1;
178274da 15159 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
15160 info_ptr->cr_size = reg_size;
15161 }
15162
83720594
RH
15163 /* If the current function calls __builtin_eh_return, then we need
15164 to allocate stack space for registers that will hold data for
15165 the exception handler. */
e3b5732b 15166 if (crtl->calls_eh_return)
83720594
RH
15167 {
15168 unsigned int i;
15169 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
15170 continue;
a3170dc6
AH
15171
15172 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
15173 ehrd_size = i * (TARGET_SPE_ABI
15174 && info_ptr->spe_64bit_regs_used != 0
15175 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
15176 }
15177 else
15178 ehrd_size = 0;
15179
592696dd 15180 /* Determine various sizes. */
4697a36c
MM
15181 info_ptr->reg_size = reg_size;
15182 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 15183 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 15184 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 15185 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
15186 if (FRAME_GROWS_DOWNWARD)
15187 info_ptr->vars_size
5b667039
JJ
15188 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
15189 + info_ptr->parm_size,
7d5175e1 15190 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
15191 - (info_ptr->fixed_size + info_ptr->vars_size
15192 + info_ptr->parm_size);
00b960c7 15193
c19de7aa 15194 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 15195 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
15196 else
15197 info_ptr->spe_gp_size = 0;
15198
4d774ff8
HP
15199 if (TARGET_ALTIVEC_ABI)
15200 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 15201 else
4d774ff8
HP
15202 info_ptr->vrsave_mask = 0;
15203
15204 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
15205 info_ptr->vrsave_size = 4;
15206 else
15207 info_ptr->vrsave_size = 0;
b6c9286a 15208
d62294f5
FJ
15209 compute_save_world_info (info_ptr);
15210
592696dd 15211 /* Calculate the offsets. */
178274da 15212 switch (DEFAULT_ABI)
4697a36c 15213 {
b6c9286a 15214 case ABI_NONE:
24d304eb 15215 default:
37409796 15216 gcc_unreachable ();
b6c9286a
MM
15217
15218 case ABI_AIX:
ee890fe2 15219 case ABI_DARWIN:
b6c9286a
MM
15220 info_ptr->fp_save_offset = - info_ptr->fp_size;
15221 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
15222
15223 if (TARGET_ALTIVEC_ABI)
15224 {
15225 info_ptr->vrsave_save_offset
15226 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
15227
982afe02 15228 /* Align stack so vector save area is on a quadword boundary.
9278121c 15229 The padding goes above the vectors. */
00b960c7
AH
15230 if (info_ptr->altivec_size != 0)
15231 info_ptr->altivec_padding_size
9278121c 15232 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
15233 else
15234 info_ptr->altivec_padding_size = 0;
15235
15236 info_ptr->altivec_save_offset
15237 = info_ptr->vrsave_save_offset
15238 - info_ptr->altivec_padding_size
15239 - info_ptr->altivec_size;
9278121c
GK
15240 gcc_assert (info_ptr->altivec_size == 0
15241 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
15242
15243 /* Adjust for AltiVec case. */
15244 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
15245 }
15246 else
15247 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
15248 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
15249 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
15250 break;
15251
15252 case ABI_V4:
b6c9286a
MM
15253 info_ptr->fp_save_offset = - info_ptr->fp_size;
15254 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 15255 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 15256
c19de7aa 15257 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
15258 {
15259 /* Align stack so SPE GPR save area is aligned on a
15260 double-word boundary. */
f78c3290 15261 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
15262 info_ptr->spe_padding_size
15263 = 8 - (-info_ptr->cr_save_offset % 8);
15264 else
15265 info_ptr->spe_padding_size = 0;
15266
15267 info_ptr->spe_gp_save_offset
15268 = info_ptr->cr_save_offset
15269 - info_ptr->spe_padding_size
15270 - info_ptr->spe_gp_size;
15271
15272 /* Adjust for SPE case. */
022123e6 15273 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 15274 }
a3170dc6 15275 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
15276 {
15277 info_ptr->vrsave_save_offset
15278 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
15279
15280 /* Align stack so vector save area is on a quadword boundary. */
15281 if (info_ptr->altivec_size != 0)
15282 info_ptr->altivec_padding_size
15283 = 16 - (-info_ptr->vrsave_save_offset % 16);
15284 else
15285 info_ptr->altivec_padding_size = 0;
15286
15287 info_ptr->altivec_save_offset
15288 = info_ptr->vrsave_save_offset
15289 - info_ptr->altivec_padding_size
15290 - info_ptr->altivec_size;
15291
15292 /* Adjust for AltiVec case. */
022123e6 15293 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
15294 }
15295 else
022123e6
AM
15296 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
15297 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
15298 info_ptr->lr_save_offset = reg_size;
15299 break;
4697a36c
MM
15300 }
15301
64045029 15302 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
15303 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
15304 + info_ptr->gp_size
15305 + info_ptr->altivec_size
15306 + info_ptr->altivec_padding_size
a3170dc6
AH
15307 + info_ptr->spe_gp_size
15308 + info_ptr->spe_padding_size
00b960c7
AH
15309 + ehrd_size
15310 + info_ptr->cr_size
022123e6 15311 + info_ptr->vrsave_size,
64045029 15312 save_align);
00b960c7 15313
44688022 15314 non_fixed_size = (info_ptr->vars_size
ff381587 15315 + info_ptr->parm_size
5b667039 15316 + info_ptr->save_size);
ff381587 15317
44688022
AM
15318 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
15319 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
15320
15321 /* Determine if we need to allocate any stack frame:
15322
a4f6c312
SS
15323 For AIX we need to push the stack if a frame pointer is needed
15324 (because the stack might be dynamically adjusted), if we are
15325 debugging, if we make calls, or if the sum of fp_save, gp_save,
15326 and local variables are more than the space needed to save all
15327 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
15328 + 18*8 = 288 (GPR13 reserved).
ff381587 15329
a4f6c312
SS
15330 For V.4 we don't have the stack cushion that AIX uses, but assume
15331 that the debugger can handle stackless frames. */
ff381587
MM
15332
15333 if (info_ptr->calls_p)
15334 info_ptr->push_p = 1;
15335
178274da 15336 else if (DEFAULT_ABI == ABI_V4)
44688022 15337 info_ptr->push_p = non_fixed_size != 0;
ff381587 15338
178274da
AM
15339 else if (frame_pointer_needed)
15340 info_ptr->push_p = 1;
15341
15342 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
15343 info_ptr->push_p = 1;
15344
ff381587 15345 else
44688022 15346 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 15347
a4f6c312 15348 /* Zero offsets if we're not saving those registers. */
8dda1a21 15349 if (info_ptr->fp_size == 0)
4697a36c
MM
15350 info_ptr->fp_save_offset = 0;
15351
8dda1a21 15352 if (info_ptr->gp_size == 0)
4697a36c
MM
15353 info_ptr->gp_save_offset = 0;
15354
00b960c7
AH
15355 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
15356 info_ptr->altivec_save_offset = 0;
15357
15358 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
15359 info_ptr->vrsave_save_offset = 0;
15360
c19de7aa
AH
15361 if (! TARGET_SPE_ABI
15362 || info_ptr->spe_64bit_regs_used == 0
15363 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
15364 info_ptr->spe_gp_save_offset = 0;
15365
c81fc13e 15366 if (! info_ptr->lr_save_p)
4697a36c
MM
15367 info_ptr->lr_save_offset = 0;
15368
c81fc13e 15369 if (! info_ptr->cr_save_p)
4697a36c
MM
15370 info_ptr->cr_save_offset = 0;
15371
15372 return info_ptr;
15373}
15374
c19de7aa
AH
15375/* Return true if the current function uses any GPRs in 64-bit SIMD
15376 mode. */
15377
15378static bool
863d938c 15379spe_func_has_64bit_regs_p (void)
c19de7aa
AH
15380{
15381 rtx insns, insn;
15382
15383 /* Functions that save and restore all the call-saved registers will
15384 need to save/restore the registers in 64-bits. */
e3b5732b
JH
15385 if (crtl->calls_eh_return
15386 || cfun->calls_setjmp
15387 || crtl->has_nonlocal_goto)
c19de7aa
AH
15388 return true;
15389
15390 insns = get_insns ();
15391
15392 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
15393 {
15394 if (INSN_P (insn))
15395 {
15396 rtx i;
15397
b5a5beb9
AH
15398 /* FIXME: This should be implemented with attributes...
15399
15400 (set_attr "spe64" "true")....then,
15401 if (get_spe64(insn)) return true;
15402
15403 It's the only reliable way to do the stuff below. */
15404
c19de7aa 15405 i = PATTERN (insn);
f82f556d
AH
15406 if (GET_CODE (i) == SET)
15407 {
15408 enum machine_mode mode = GET_MODE (SET_SRC (i));
15409
15410 if (SPE_VECTOR_MODE (mode))
15411 return true;
4f011e1e 15412 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
15413 return true;
15414 }
c19de7aa
AH
15415 }
15416 }
15417
15418 return false;
15419}
15420
d1d0c603 15421static void
a2369ed3 15422debug_stack_info (rs6000_stack_t *info)
9878760c 15423{
d330fd93 15424 const char *abi_string;
24d304eb 15425
c81fc13e 15426 if (! info)
4697a36c
MM
15427 info = rs6000_stack_info ();
15428
15429 fprintf (stderr, "\nStack information for function %s:\n",
15430 ((current_function_decl && DECL_NAME (current_function_decl))
15431 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
15432 : "<unknown>"));
15433
24d304eb
RK
15434 switch (info->abi)
15435 {
b6c9286a
MM
15436 default: abi_string = "Unknown"; break;
15437 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 15438 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 15439 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 15440 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
15441 }
15442
15443 fprintf (stderr, "\tABI = %5s\n", abi_string);
15444
00b960c7
AH
15445 if (TARGET_ALTIVEC_ABI)
15446 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
15447
a3170dc6
AH
15448 if (TARGET_SPE_ABI)
15449 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
15450
4697a36c
MM
15451 if (info->first_gp_reg_save != 32)
15452 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
15453
15454 if (info->first_fp_reg_save != 64)
15455 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 15456
00b960c7
AH
15457 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
15458 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
15459 info->first_altivec_reg_save);
15460
4697a36c
MM
15461 if (info->lr_save_p)
15462 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 15463
4697a36c
MM
15464 if (info->cr_save_p)
15465 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
15466
00b960c7
AH
15467 if (info->vrsave_mask)
15468 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
15469
4697a36c
MM
15470 if (info->push_p)
15471 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
15472
15473 if (info->calls_p)
15474 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
15475
4697a36c
MM
15476 if (info->gp_save_offset)
15477 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
15478
15479 if (info->fp_save_offset)
15480 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
15481
00b960c7
AH
15482 if (info->altivec_save_offset)
15483 fprintf (stderr, "\taltivec_save_offset = %5d\n",
15484 info->altivec_save_offset);
15485
a3170dc6
AH
15486 if (info->spe_gp_save_offset)
15487 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
15488 info->spe_gp_save_offset);
15489
00b960c7
AH
15490 if (info->vrsave_save_offset)
15491 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
15492 info->vrsave_save_offset);
15493
4697a36c
MM
15494 if (info->lr_save_offset)
15495 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
15496
15497 if (info->cr_save_offset)
15498 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
15499
15500 if (info->varargs_save_offset)
15501 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
15502
15503 if (info->total_size)
d1d0c603
JJ
15504 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15505 info->total_size);
4697a36c 15506
4697a36c 15507 if (info->vars_size)
d1d0c603
JJ
15508 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15509 info->vars_size);
4697a36c
MM
15510
15511 if (info->parm_size)
15512 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
15513
15514 if (info->fixed_size)
15515 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
15516
15517 if (info->gp_size)
15518 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
15519
a3170dc6
AH
15520 if (info->spe_gp_size)
15521 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
15522
4697a36c
MM
15523 if (info->fp_size)
15524 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
15525
00b960c7
AH
15526 if (info->altivec_size)
15527 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
15528
15529 if (info->vrsave_size)
15530 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
15531
15532 if (info->altivec_padding_size)
15533 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15534 info->altivec_padding_size);
15535
a3170dc6
AH
15536 if (info->spe_padding_size)
15537 fprintf (stderr, "\tspe_padding_size = %5d\n",
15538 info->spe_padding_size);
15539
4697a36c
MM
15540 if (info->cr_size)
15541 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15542
15543 if (info->save_size)
15544 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15545
15546 if (info->reg_size != 4)
15547 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15548
15549 fprintf (stderr, "\n");
9878760c 15550}
71f123ca
FS
15551
15552rtx
a2369ed3 15553rs6000_return_addr (int count, rtx frame)
71f123ca 15554{
a4f6c312
SS
15555 /* Currently we don't optimize very well between prolog and body
15556 code and for PIC code the code can be actually quite bad, so
15557 don't try to be too clever here. */
f1384257 15558 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
15559 {
15560 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
15561
15562 return
15563 gen_rtx_MEM
15564 (Pmode,
15565 memory_address
15566 (Pmode,
15567 plus_constant (copy_to_reg
15568 (gen_rtx_MEM (Pmode,
15569 memory_address (Pmode, frame))),
15570 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
15571 }
15572
8c29550d 15573 cfun->machine->ra_need_lr = 1;
1de43f85 15574 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
15575}
15576
5e1bf043
DJ
15577/* Say whether a function is a candidate for sibcall handling or not.
15578 We do not allow indirect calls to be optimized into sibling calls.
15579 Also, we can't do it if there are any vector parameters; there's
15580 nowhere to put the VRsave code so it works; note that functions with
15581 vector parameters are required to have a prototype, so the argument
15582 type info must be available here. (The tail recursion case can work
15583 with vector parameters, but there's no way to distinguish here.) */
4977bab6 15584static bool
a2369ed3 15585rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
15586{
15587 tree type;
4977bab6 15588 if (decl)
5e1bf043
DJ
15589 {
15590 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 15591 {
4977bab6 15592 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
15593 type; type = TREE_CHAIN (type))
15594 {
c15b529f 15595 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 15596 return false;
5e1bf043 15597 }
c4ad648e 15598 }
5e1bf043 15599 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
15600 || ((*targetm.binds_local_p) (decl)
15601 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 15602 {
4977bab6 15603 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
15604
15605 if (!lookup_attribute ("longcall", attr_list)
15606 || lookup_attribute ("shortcall", attr_list))
4977bab6 15607 return true;
2bcc50d0 15608 }
5e1bf043 15609 }
4977bab6 15610 return false;
5e1bf043
DJ
15611}
15612
e7e64a25
AS
15613/* NULL if INSN insn is valid within a low-overhead loop.
15614 Otherwise return why doloop cannot be applied.
9419649c
DE
15615 PowerPC uses the COUNT register for branch on table instructions. */
15616
e7e64a25 15617static const char *
3101faab 15618rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15619{
15620 if (CALL_P (insn))
e7e64a25 15621 return "Function call in the loop.";
9419649c
DE
15622
15623 if (JUMP_P (insn)
15624 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15625 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15626 return "Computed branch in the loop.";
9419649c 15627
e7e64a25 15628 return NULL;
9419649c
DE
15629}
15630
71f123ca 15631static int
863d938c 15632rs6000_ra_ever_killed (void)
71f123ca
FS
15633{
15634 rtx top;
5e1bf043
DJ
15635 rtx reg;
15636 rtx insn;
71f123ca 15637
3c072c6b 15638 if (cfun->is_thunk)
71f123ca 15639 return 0;
eb0424da 15640
36f7e964
AH
15641 /* regs_ever_live has LR marked as used if any sibcalls are present,
15642 but this should not force saving and restoring in the
15643 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15644 clobbers LR, so that is inappropriate. */
36f7e964 15645
5e1bf043
DJ
15646 /* Also, the prologue can generate a store into LR that
15647 doesn't really count, like this:
36f7e964 15648
5e1bf043
DJ
15649 move LR->R0
15650 bcl to set PIC register
15651 move LR->R31
15652 move R0->LR
36f7e964
AH
15653
15654 When we're called from the epilogue, we need to avoid counting
15655 this as a store. */
f676971a 15656
71f123ca
FS
15657 push_topmost_sequence ();
15658 top = get_insns ();
15659 pop_topmost_sequence ();
1de43f85 15660 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15661
5e1bf043
DJ
15662 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15663 {
15664 if (INSN_P (insn))
15665 {
022123e6
AM
15666 if (CALL_P (insn))
15667 {
15668 if (!SIBLING_CALL_P (insn))
15669 return 1;
15670 }
1de43f85 15671 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15672 return 1;
36f7e964
AH
15673 else if (set_of (reg, insn) != NULL_RTX
15674 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15675 return 1;
15676 }
15677 }
15678 return 0;
71f123ca 15679}
4697a36c 15680\f
9ebbca7d 15681/* Emit instructions needed to load the TOC register.
c7ca610e 15682 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15683 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15684
15685void
a2369ed3 15686rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15687{
6fb5fa3c 15688 rtx dest;
1db02437 15689 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15690
7f970b70 15691 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15692 {
7f970b70 15693 char buf[30];
e65a3857 15694 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15695
15696 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15697 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15698 if (flag_pic == 2)
15699 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15700 else
15701 got = rs6000_got_sym ();
15702 tmp1 = tmp2 = dest;
15703 if (!fromprolog)
15704 {
15705 tmp1 = gen_reg_rtx (Pmode);
15706 tmp2 = gen_reg_rtx (Pmode);
15707 }
6fb5fa3c
DB
15708 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15709 emit_move_insn (tmp1,
1de43f85 15710 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15711 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15712 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15713 }
15714 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15715 {
6fb5fa3c 15716 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15717 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15718 }
15719 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15720 {
15721 char buf[30];
20b71b17
AM
15722 rtx temp0 = (fromprolog
15723 ? gen_rtx_REG (Pmode, 0)
15724 : gen_reg_rtx (Pmode));
20b71b17 15725
20b71b17
AM
15726 if (fromprolog)
15727 {
ccbca5e4 15728 rtx symF, symL;
38c1f2d7 15729
20b71b17
AM
15730 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15731 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15732
20b71b17
AM
15733 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15734 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15735
6fb5fa3c
DB
15736 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15737 emit_move_insn (dest,
1de43f85 15738 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15739 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15740 }
15741 else
20b71b17
AM
15742 {
15743 rtx tocsym;
20b71b17
AM
15744
15745 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15746 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15747 emit_move_insn (dest,
1de43f85 15748 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15749 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15750 }
6fb5fa3c 15751 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15752 }
20b71b17
AM
15753 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15754 {
15755 /* This is for AIX code running in non-PIC ELF32. */
15756 char buf[30];
15757 rtx realsym;
15758 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15759 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15760
6fb5fa3c
DB
15761 emit_insn (gen_elf_high (dest, realsym));
15762 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15763 }
37409796 15764 else
9ebbca7d 15765 {
37409796 15766 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15767
9ebbca7d 15768 if (TARGET_32BIT)
6fb5fa3c 15769 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15770 else
6fb5fa3c 15771 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15772 }
15773}
15774
d1d0c603
JJ
15775/* Emit instructions to restore the link register after determining where
15776 its value has been stored. */
15777
15778void
15779rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15780{
15781 rs6000_stack_t *info = rs6000_stack_info ();
15782 rtx operands[2];
15783
15784 operands[0] = source;
15785 operands[1] = scratch;
15786
15787 if (info->lr_save_p)
15788 {
15789 rtx frame_rtx = stack_pointer_rtx;
15790 HOST_WIDE_INT sp_offset = 0;
15791 rtx tmp;
15792
15793 if (frame_pointer_needed
e3b5732b 15794 || cfun->calls_alloca
d1d0c603
JJ
15795 || info->total_size > 32767)
15796 {
0be76840 15797 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15798 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15799 frame_rtx = operands[1];
15800 }
15801 else if (info->push_p)
15802 sp_offset = info->total_size;
15803
15804 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15805 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15806 emit_move_insn (tmp, operands[0]);
15807 }
15808 else
1de43f85 15809 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15810}
15811
4862826d 15812static GTY(()) alias_set_type set = -1;
f103e34d 15813
4862826d 15814alias_set_type
863d938c 15815get_TOC_alias_set (void)
9ebbca7d 15816{
f103e34d
GK
15817 if (set == -1)
15818 set = new_alias_set ();
15819 return set;
f676971a 15820}
9ebbca7d 15821
c1207243 15822/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15823 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15824 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15825#if TARGET_ELF
3c9eb5f4 15826static int
f676971a 15827uses_TOC (void)
9ebbca7d 15828{
c4501e62 15829 rtx insn;
38c1f2d7 15830
c4501e62
JJ
15831 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15832 if (INSN_P (insn))
15833 {
15834 rtx pat = PATTERN (insn);
15835 int i;
9ebbca7d 15836
f676971a 15837 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15838 for (i = 0; i < XVECLEN (pat, 0); i++)
15839 {
15840 rtx sub = XVECEXP (pat, 0, i);
15841 if (GET_CODE (sub) == USE)
15842 {
15843 sub = XEXP (sub, 0);
15844 if (GET_CODE (sub) == UNSPEC
15845 && XINT (sub, 1) == UNSPEC_TOC)
15846 return 1;
15847 }
15848 }
15849 }
15850 return 0;
9ebbca7d 15851}
c954844a 15852#endif
38c1f2d7 15853
9ebbca7d 15854rtx
f676971a 15855create_TOC_reference (rtx symbol)
9ebbca7d 15856{
b3a13419 15857 if (!can_create_pseudo_p ())
6fb5fa3c 15858 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15859 return gen_rtx_PLUS (Pmode,
a8a05998 15860 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a 15861 gen_rtx_CONST (Pmode,
2e4316da 15862 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL)));
9ebbca7d 15863}
38c1f2d7 15864
fc4767bb
JJ
15865/* If _Unwind_* has been called from within the same module,
15866 toc register is not guaranteed to be saved to 40(1) on function
15867 entry. Save it there in that case. */
c7ca610e 15868
9ebbca7d 15869void
863d938c 15870rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15871{
15872 rtx mem;
15873 rtx stack_top = gen_reg_rtx (Pmode);
15874 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15875 rtx opcode = gen_reg_rtx (SImode);
15876 rtx tocompare = gen_reg_rtx (SImode);
15877 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15878
8308679f 15879 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15880 emit_move_insn (stack_top, mem);
15881
8308679f
DE
15882 mem = gen_frame_mem (Pmode,
15883 gen_rtx_PLUS (Pmode, stack_top,
15884 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15885 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15886 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15887 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15888 : 0xE8410028, SImode));
9ebbca7d 15889
fc4767bb 15890 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15891 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15892 no_toc_save_needed);
9ebbca7d 15893
8308679f
DE
15894 mem = gen_frame_mem (Pmode,
15895 gen_rtx_PLUS (Pmode, stack_top,
15896 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15897 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15898 emit_label (no_toc_save_needed);
9ebbca7d 15899}
38c1f2d7 15900\f
0be76840
DE
15901/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15902 and the change to the stack pointer. */
ba4828e0 15903
9ebbca7d 15904static void
863d938c 15905rs6000_emit_stack_tie (void)
9ebbca7d 15906{
0be76840
DE
15907 rtx mem = gen_frame_mem (BLKmode,
15908 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15909
9ebbca7d
GK
15910 emit_insn (gen_stack_tie (mem));
15911}
38c1f2d7 15912
9ebbca7d
GK
15913/* Emit the correct code for allocating stack space, as insns.
15914 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15915 If COPY_R11, make sure a copy of the old frame is left in r11,
15916 in preference to r12 if COPY_R12.
9ebbca7d
GK
15917 The generated code may use hard register 0 as a temporary. */
15918
15919static void
f78c3290 15920rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15921{
9ebbca7d
GK
15922 rtx insn;
15923 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15924 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1 15925 rtx todec = gen_int_mode (-size, Pmode);
d4bacef0 15926 rtx par, set, mem;
61168ff1
RS
15927
15928 if (INTVAL (todec) != -size)
15929 {
d4ee4d25 15930 warning (0, "stack frame too large");
61168ff1
RS
15931 emit_insn (gen_trap ());
15932 return;
15933 }
a157febd 15934
e3b5732b 15935 if (crtl->limit_stack)
a157febd
GK
15936 {
15937 if (REG_P (stack_limit_rtx)
f676971a 15938 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15939 && REGNO (stack_limit_rtx) <= 31)
15940 {
5b71a4e7 15941 emit_insn (TARGET_32BIT
9ebbca7d
GK
15942 ? gen_addsi3 (tmp_reg,
15943 stack_limit_rtx,
15944 GEN_INT (size))
15945 : gen_adddi3 (tmp_reg,
15946 stack_limit_rtx,
15947 GEN_INT (size)));
5b71a4e7 15948
9ebbca7d
GK
15949 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15950 const0_rtx));
a157febd
GK
15951 }
15952 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15953 && TARGET_32BIT
f607bc57 15954 && DEFAULT_ABI == ABI_V4)
a157febd 15955 {
9ebbca7d 15956 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15957 gen_rtx_PLUS (Pmode,
15958 stack_limit_rtx,
9ebbca7d 15959 GEN_INT (size)));
5b71a4e7 15960
9ebbca7d
GK
15961 emit_insn (gen_elf_high (tmp_reg, toload));
15962 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15963 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15964 const0_rtx));
a157febd
GK
15965 }
15966 else
d4ee4d25 15967 warning (0, "stack limit expression is not supported");
a157febd
GK
15968 }
15969
d4bacef0 15970 if (copy_r12 || copy_r11)
f78c3290
NF
15971 emit_move_insn (copy_r11
15972 ? gen_rtx_REG (Pmode, 11)
15973 : gen_rtx_REG (Pmode, 12),
15974 stack_reg);
9ebbca7d 15975
d4bacef0 15976 if (size > 32767)
38c1f2d7 15977 {
d4bacef0
BE
15978 /* Need a note here so that try_split doesn't get confused. */
15979 if (get_last_insn () == NULL_RTX)
15980 emit_note (NOTE_INSN_DELETED);
15981 insn = emit_move_insn (tmp_reg, todec);
15982 try_split (PATTERN (insn), insn, 0);
15983 todec = tmp_reg;
9ebbca7d 15984 }
d4bacef0
BE
15985
15986 insn = emit_insn (TARGET_32BIT
15987 ? gen_movsi_update_stack (stack_reg, stack_reg,
15988 todec, stack_reg)
15989 : gen_movdi_di_update_stack (stack_reg, stack_reg,
15990 todec, stack_reg));
15991 /* Since we didn't use gen_frame_mem to generate the MEM, grab
15992 it now and set the alias set/attributes. The above gen_*_update
15993 calls will generate a PARALLEL with the MEM set being the first
15994 operation. */
15995 par = PATTERN (insn);
15996 gcc_assert (GET_CODE (par) == PARALLEL);
15997 set = XVECEXP (par, 0, 0);
15998 gcc_assert (GET_CODE (set) == SET);
15999 mem = SET_DEST (set);
16000 gcc_assert (MEM_P (mem));
16001 MEM_NOTRAP_P (mem) = 1;
16002 set_mem_alias_set (mem, get_frame_alias_set ());
f676971a 16003
9ebbca7d 16004 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a
ILT
16005 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
16006 gen_rtx_SET (VOIDmode, stack_reg,
16007 gen_rtx_PLUS (Pmode, stack_reg,
16008 GEN_INT (-size))));
9ebbca7d
GK
16009}
16010
a4f6c312
SS
16011/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
16012 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
16013 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
16014 deduce these equivalences by itself so it wasn't necessary to hold
16015 its hand so much. */
9ebbca7d
GK
16016
16017static void
f676971a 16018rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 16019 rtx reg2, rtx rreg)
9ebbca7d
GK
16020{
16021 rtx real, temp;
16022
e56c4463
JL
16023 /* copy_rtx will not make unique copies of registers, so we need to
16024 ensure we don't have unwanted sharing here. */
16025 if (reg == reg2)
16026 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
16027
16028 if (reg == rreg)
16029 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
16030
9ebbca7d
GK
16031 real = copy_rtx (PATTERN (insn));
16032
89e7058f
AH
16033 if (reg2 != NULL_RTX)
16034 real = replace_rtx (real, reg2, rreg);
f676971a
EC
16035
16036 real = replace_rtx (real, reg,
9ebbca7d
GK
16037 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
16038 STACK_POINTER_REGNUM),
16039 GEN_INT (val)));
f676971a 16040
9ebbca7d
GK
16041 /* We expect that 'real' is either a SET or a PARALLEL containing
16042 SETs (and possibly other stuff). In a PARALLEL, all the SETs
16043 are important so they all have to be marked RTX_FRAME_RELATED_P. */
16044
16045 if (GET_CODE (real) == SET)
16046 {
16047 rtx set = real;
f676971a 16048
9ebbca7d
GK
16049 temp = simplify_rtx (SET_SRC (set));
16050 if (temp)
16051 SET_SRC (set) = temp;
16052 temp = simplify_rtx (SET_DEST (set));
16053 if (temp)
16054 SET_DEST (set) = temp;
16055 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 16056 {
9ebbca7d
GK
16057 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
16058 if (temp)
16059 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 16060 }
38c1f2d7 16061 }
37409796 16062 else
9ebbca7d
GK
16063 {
16064 int i;
37409796
NS
16065
16066 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
16067 for (i = 0; i < XVECLEN (real, 0); i++)
16068 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
16069 {
16070 rtx set = XVECEXP (real, 0, i);
f676971a 16071
9ebbca7d
GK
16072 temp = simplify_rtx (SET_SRC (set));
16073 if (temp)
16074 SET_SRC (set) = temp;
16075 temp = simplify_rtx (SET_DEST (set));
16076 if (temp)
16077 SET_DEST (set) = temp;
16078 if (GET_CODE (SET_DEST (set)) == MEM)
16079 {
16080 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
16081 if (temp)
16082 XEXP (SET_DEST (set), 0) = temp;
16083 }
16084 RTX_FRAME_RELATED_P (set) = 1;
16085 }
16086 }
c19de7aa 16087
9ebbca7d 16088 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a 16089 add_reg_note (insn, REG_FRAME_RELATED_EXPR, real);
38c1f2d7
MM
16090}
16091
00b960c7
AH
16092/* Returns an insn that has a vrsave set operation with the
16093 appropriate CLOBBERs. */
16094
16095static rtx
a2369ed3 16096generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
16097{
16098 int nclobs, i;
16099 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 16100 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 16101
a004eb82
AH
16102 clobs[0]
16103 = gen_rtx_SET (VOIDmode,
16104 vrsave,
16105 gen_rtx_UNSPEC_VOLATILE (SImode,
16106 gen_rtvec (2, reg, vrsave),
3aca4bff 16107 UNSPECV_SET_VRSAVE));
00b960c7
AH
16108
16109 nclobs = 1;
16110
9aa86737
AH
16111 /* We need to clobber the registers in the mask so the scheduler
16112 does not move sets to VRSAVE before sets of AltiVec registers.
16113
16114 However, if the function receives nonlocal gotos, reload will set
16115 all call saved registers live. We will end up with:
16116
16117 (set (reg 999) (mem))
16118 (parallel [ (set (reg vrsave) (unspec blah))
16119 (clobber (reg 999))])
16120
16121 The clobber will cause the store into reg 999 to be dead, and
16122 flow will attempt to delete an epilogue insn. In this case, we
16123 need an unspec use/set of the register. */
00b960c7
AH
16124
16125 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 16126 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
16127 {
16128 if (!epiloguep || call_used_regs [i])
16129 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
16130 gen_rtx_REG (V4SImode, i));
16131 else
16132 {
16133 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
16134
16135 clobs[nclobs++]
a004eb82
AH
16136 = gen_rtx_SET (VOIDmode,
16137 reg,
16138 gen_rtx_UNSPEC (V4SImode,
16139 gen_rtvec (1, reg), 27));
9aa86737
AH
16140 }
16141 }
00b960c7
AH
16142
16143 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
16144
16145 for (i = 0; i < nclobs; ++i)
16146 XVECEXP (insn, 0, i) = clobs[i];
16147
16148 return insn;
16149}
16150
89e7058f
AH
16151/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
16152 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
16153
16154static void
f676971a 16155emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 16156 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
16157{
16158 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
16159 rtx replacea, replaceb;
16160
16161 int_rtx = GEN_INT (offset);
16162
16163 /* Some cases that need register indexed addressing. */
16164 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 16165 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
16166 || (TARGET_SPE_ABI
16167 && SPE_VECTOR_MODE (mode)
16168 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
16169 {
16170 /* Whomever calls us must make sure r11 is available in the
c4ad648e 16171 flow path of instructions in the prologue. */
89e7058f
AH
16172 offset_rtx = gen_rtx_REG (Pmode, 11);
16173 emit_move_insn (offset_rtx, int_rtx);
16174
16175 replacea = offset_rtx;
16176 replaceb = int_rtx;
16177 }
16178 else
16179 {
16180 offset_rtx = int_rtx;
16181 replacea = NULL_RTX;
16182 replaceb = NULL_RTX;
16183 }
16184
16185 reg = gen_rtx_REG (mode, regno);
16186 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 16187 mem = gen_frame_mem (mode, addr);
89e7058f
AH
16188
16189 insn = emit_move_insn (mem, reg);
16190
16191 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
16192}
16193
a3170dc6
AH
16194/* Emit an offset memory reference suitable for a frame store, while
16195 converting to a valid addressing mode. */
16196
16197static rtx
a2369ed3 16198gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
16199{
16200 rtx int_rtx, offset_rtx;
16201
16202 int_rtx = GEN_INT (offset);
16203
4d4cbc0e 16204 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 16205 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
16206 {
16207 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
16208 emit_move_insn (offset_rtx, int_rtx);
16209 }
16210 else
16211 offset_rtx = int_rtx;
16212
0be76840 16213 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
16214}
16215
6d0a8091
DJ
16216/* Look for user-defined global regs. We should not save and restore these,
16217 and cannot use stmw/lmw if there are any in its range. */
16218
16219static bool
f78c3290 16220no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
16221{
16222 int i;
e1ece9f1 16223 for (i = first; i < gpr ? 32 : 64 ; i++)
f78c3290 16224 if (global_regs[i])
6d0a8091
DJ
16225 return false;
16226 return true;
16227}
16228
699c914a
MS
16229#ifndef TARGET_FIX_AND_CONTINUE
16230#define TARGET_FIX_AND_CONTINUE 0
16231#endif
16232
f78c3290
NF
16233/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
16234#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
16235#define LAST_SAVRES_REGISTER 31
16236#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
16237
16238static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
16239
16240/* Return the symbol for an out-of-line register save/restore routine.
16241 We are saving/restoring GPRs if GPR is true. */
16242
16243static rtx
16244rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
16245{
16246 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
16247 rtx sym;
16248 int select = ((savep ? 1 : 0) << 2
16249 | (gpr
16250 /* On the SPE, we never have any FPRs, but we do have
16251 32/64-bit versions of the routines. */
16252 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
16253 : 0) << 1
16254 | (exitp ? 1: 0));
16255
16256 /* Don't generate bogus routine names. */
16257 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
16258
16259 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
16260
16261 if (sym == NULL)
16262 {
16263 char name[30];
16264 const char *action;
16265 const char *regkind;
16266 const char *exit_suffix;
16267
16268 action = savep ? "save" : "rest";
16269
16270 /* SPE has slightly different names for its routines depending on
16271 whether we are saving 32-bit or 64-bit registers. */
16272 if (TARGET_SPE_ABI)
16273 {
16274 /* No floating point saves on the SPE. */
16275 gcc_assert (gpr);
16276
16277 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
16278 }
16279 else
16280 regkind = gpr ? "gpr" : "fpr";
16281
16282 exit_suffix = exitp ? "_x" : "";
16283
16284 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
16285
16286 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
16287 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
16288 }
16289
16290 return sym;
16291}
16292
16293/* Emit a sequence of insns, including a stack tie if needed, for
16294 resetting the stack pointer. If SAVRES is true, then don't reset the
16295 stack pointer, but move the base of the frame into r11 for use by
16296 out-of-line register restore routines. */
16297
ff35822b 16298static rtx
f78c3290
NF
16299rs6000_emit_stack_reset (rs6000_stack_t *info,
16300 rtx sp_reg_rtx, rtx frame_reg_rtx,
16301 int sp_offset, bool savres)
16302{
16303 /* This blockage is needed so that sched doesn't decide to move
16304 the sp change before the register restores. */
16305 if (frame_reg_rtx != sp_reg_rtx
16306 || (TARGET_SPE_ABI
16307 && info->spe_64bit_regs_used != 0
16308 && info->first_gp_reg_save != 32))
16309 rs6000_emit_stack_tie ();
16310
16311 if (frame_reg_rtx != sp_reg_rtx)
16312 {
f78c3290 16313 if (sp_offset != 0)
ff35822b
JJ
16314 return emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16315 GEN_INT (sp_offset)));
f78c3290 16316 else if (!savres)
ff35822b 16317 return emit_move_insn (sp_reg_rtx, frame_reg_rtx);
f78c3290
NF
16318 }
16319 else if (sp_offset != 0)
16320 {
16321 /* If we are restoring registers out-of-line, we will be using the
16322 "exit" variants of the restore routines, which will reset the
16323 stack for us. But we do need to point r11 into the right place
16324 for those routines. */
16325 rtx dest_reg = (savres
16326 ? gen_rtx_REG (Pmode, 11)
16327 : sp_reg_rtx);
16328
ff35822b
JJ
16329 rtx insn = emit_insn (gen_add3_insn (dest_reg, sp_reg_rtx,
16330 GEN_INT (sp_offset)));
16331 if (!savres)
16332 return insn;
f78c3290 16333 }
ff35822b 16334 return NULL_RTX;
f78c3290
NF
16335}
16336
16337/* Construct a parallel rtx describing the effect of a call to an
16338 out-of-line register save/restore routine. */
16339
16340static rtx
16341rs6000_make_savres_rtx (rs6000_stack_t *info,
16342 rtx frame_reg_rtx, int save_area_offset,
16343 enum machine_mode reg_mode,
16344 bool savep, bool gpr, bool exitp)
16345{
16346 int i;
16347 int offset, start_reg, end_reg, n_regs;
16348 int reg_size = GET_MODE_SIZE (reg_mode);
16349 rtx sym;
16350 rtvec p;
16351
16352 offset = 0;
16353 start_reg = (gpr
16354 ? info->first_gp_reg_save
16355 : info->first_fp_reg_save);
16356 end_reg = gpr ? 32 : 64;
16357 n_regs = end_reg - start_reg;
16358 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
16359
16360 /* If we're saving registers, then we should never say we're exiting. */
16361 gcc_assert ((savep && !exitp) || !savep);
16362
16363 if (exitp)
16364 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
16365
16366 RTVEC_ELT (p, offset++)
16367 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
16368
16369 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
16370 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
16371 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
16372
16373 for (i = 0; i < end_reg - start_reg; i++)
16374 {
16375 rtx addr, reg, mem;
16376 reg = gen_rtx_REG (reg_mode, start_reg + i);
16377 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16378 GEN_INT (save_area_offset + reg_size*i));
16379 mem = gen_frame_mem (reg_mode, addr);
16380
16381 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
16382 savep ? mem : reg,
16383 savep ? reg : mem);
16384 }
16385
16386 return gen_rtx_PARALLEL (VOIDmode, p);
16387}
16388
52ff33d0
NF
16389/* Determine whether the gp REG is really used. */
16390
16391static bool
16392rs6000_reg_live_or_pic_offset_p (int reg)
16393{
6fb5fa3c 16394 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
16395 && (!call_used_regs[reg]
16396 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16397 && TARGET_TOC && TARGET_MINIMAL_TOC)))
16398 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16399 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
16400 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
16401}
16402
f78c3290
NF
16403enum {
16404 SAVRES_MULTIPLE = 0x1,
16405 SAVRES_INLINE_FPRS = 0x2,
16406 SAVRES_INLINE_GPRS = 0x4
16407};
16408
16409/* Determine the strategy for savings/restoring registers. */
16410
16411static int
16412rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
16413 int using_static_chain_p, int sibcall)
16414{
16415 bool using_multiple_p;
16416 bool common;
16417 bool savres_fprs_inline;
16418 bool savres_gprs_inline;
16419 bool noclobber_global_gprs
16420 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
16421
16422 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
16423 && (!TARGET_SPE_ABI
16424 || info->spe_64bit_regs_used == 0)
16425 && info->first_gp_reg_save < 31
16426 && noclobber_global_gprs);
16427 /* Don't bother to try to save things out-of-line if r11 is occupied
16428 by the static chain. It would require too much fiddling and the
16429 static chain is rarely used anyway. */
16430 common = (using_static_chain_p
16431 || sibcall
16432 || crtl->calls_eh_return
16433 || !info->lr_save_p
16434 || cfun->machine->ra_need_lr
16435 || info->total_size > 32767);
16436 savres_fprs_inline = (common
16437 || info->first_fp_reg_save == 64
16438 || !no_global_regs_above (info->first_fp_reg_save,
16439 /*gpr=*/false)
16440 || FP_SAVE_INLINE (info->first_fp_reg_save));
16441 savres_gprs_inline = (common
16442 /* Saving CR interferes with the exit routines
16443 used on the SPE, so just punt here. */
16444 || (!savep
16445 && TARGET_SPE_ABI
16446 && info->spe_64bit_regs_used != 0
16447 && info->cr_save_p != 0)
16448 || info->first_gp_reg_save == 32
16449 || !noclobber_global_gprs
16450 || GP_SAVE_INLINE (info->first_gp_reg_save));
16451
16452 if (savep)
16453 /* If we are going to use store multiple, then don't even bother
16454 with the out-of-line routines, since the store-multiple instruction
16455 will always be smaller. */
16456 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16457 else
16458 {
16459 /* The situation is more complicated with load multiple. We'd
16460 prefer to use the out-of-line routines for restores, since the
16461 "exit" out-of-line routines can handle the restore of LR and
16462 the frame teardown. But we can only use the out-of-line
16463 routines if we know that we've used store multiple or
16464 out-of-line routines in the prologue, i.e. if we've saved all
16465 the registers from first_gp_reg_save. Otherwise, we risk
16466 loading garbage from the stack. Furthermore, we can only use
16467 the "exit" out-of-line gpr restore if we haven't saved any
16468 fprs. */
16469 bool saved_all = !savres_gprs_inline || using_multiple_p;
16470
16471 if (saved_all && info->first_fp_reg_save != 64)
16472 /* We can't use the exit routine; use load multiple if it's
16473 available. */
16474 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16475 }
16476
16477 return (using_multiple_p
16478 | (savres_fprs_inline << 1)
16479 | (savres_gprs_inline << 2));
16480}
16481
9ebbca7d
GK
16482/* Emit function prologue as insns. */
16483
9878760c 16484void
863d938c 16485rs6000_emit_prologue (void)
9878760c 16486{
4697a36c 16487 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 16488 enum machine_mode reg_mode = Pmode;
327e5343 16489 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16490 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
16491 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
16492 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 16493 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 16494 rtx insn;
f78c3290 16495 int strategy;
9ebbca7d 16496 int saving_FPRs_inline;
f78c3290 16497 int saving_GPRs_inline;
9ebbca7d 16498 int using_store_multiple;
f78c3290
NF
16499 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
16500 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
e1ece9f1 16501 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 16502 HOST_WIDE_INT sp_offset = 0;
f676971a 16503
699c914a
MS
16504 if (TARGET_FIX_AND_CONTINUE)
16505 {
16506 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 16507 address by modifying the first 5 instructions of the function
699c914a
MS
16508 to branch to the overriding function. This is necessary to
16509 permit function pointers that point to the old function to
16510 actually forward to the new function. */
16511 emit_insn (gen_nop ());
16512 emit_insn (gen_nop ());
de2ab0ca 16513 emit_insn (gen_nop ());
699c914a
MS
16514 emit_insn (gen_nop ());
16515 emit_insn (gen_nop ());
16516 }
16517
16518 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16519 {
16520 reg_mode = V2SImode;
16521 reg_size = 8;
16522 }
a3170dc6 16523
f78c3290
NF
16524 strategy = rs6000_savres_strategy (info, /*savep=*/true,
16525 /*static_chain_p=*/using_static_chain_p,
16526 /*sibcall=*/0);
16527 using_store_multiple = strategy & SAVRES_MULTIPLE;
16528 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16529 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
16530
16531 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
16532 if (! WORLD_SAVE_P (info)
16533 && info->push_p
acd0b319 16534 && (DEFAULT_ABI == ABI_V4
e3b5732b 16535 || crtl->calls_eh_return))
9ebbca7d 16536 {
f78c3290
NF
16537 bool need_r11 = (TARGET_SPE
16538 ? (!saving_GPRs_inline
16539 && info->spe_64bit_regs_used == 0)
16540 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
16541 if (info->total_size < 32767)
16542 sp_offset = info->total_size;
16543 else
f78c3290
NF
16544 frame_reg_rtx = (need_r11
16545 ? gen_rtx_REG (Pmode, 11)
16546 : frame_ptr_rtx);
f676971a 16547 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
16548 (frame_reg_rtx != sp_reg_rtx
16549 && (info->cr_save_p
16550 || info->lr_save_p
16551 || info->first_fp_reg_save < 64
16552 || info->first_gp_reg_save < 32
f78c3290
NF
16553 )),
16554 need_r11);
9ebbca7d
GK
16555 if (frame_reg_rtx != sp_reg_rtx)
16556 rs6000_emit_stack_tie ();
16557 }
16558
d62294f5 16559 /* Handle world saves specially here. */
f57fe068 16560 if (WORLD_SAVE_P (info))
d62294f5
FJ
16561 {
16562 int i, j, sz;
16563 rtx treg;
16564 rtvec p;
22fa69da 16565 rtx reg0;
d62294f5
FJ
16566
16567 /* save_world expects lr in r0. */
22fa69da 16568 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 16569 if (info->lr_save_p)
c4ad648e 16570 {
22fa69da 16571 insn = emit_move_insn (reg0,
1de43f85 16572 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
16573 RTX_FRAME_RELATED_P (insn) = 1;
16574 }
d62294f5
FJ
16575
16576 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 16577 assumptions about the offsets of various bits of the stack
992d08b1 16578 frame. */
37409796
NS
16579 gcc_assert (info->gp_save_offset == -220
16580 && info->fp_save_offset == -144
16581 && info->lr_save_offset == 8
16582 && info->cr_save_offset == 4
16583 && info->push_p
16584 && info->lr_save_p
e3b5732b 16585 && (!crtl->calls_eh_return
37409796
NS
16586 || info->ehrd_offset == -432)
16587 && info->vrsave_save_offset == -224
22fa69da 16588 && info->altivec_save_offset == -416);
d62294f5
FJ
16589
16590 treg = gen_rtx_REG (SImode, 11);
16591 emit_move_insn (treg, GEN_INT (-info->total_size));
16592
16593 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 16594 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
16595
16596 /* Preserve CR2 for save_world prologues */
22fa69da 16597 sz = 5;
d62294f5
FJ
16598 sz += 32 - info->first_gp_reg_save;
16599 sz += 64 - info->first_fp_reg_save;
16600 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16601 p = rtvec_alloc (sz);
16602 j = 0;
16603 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 16604 gen_rtx_REG (SImode,
1de43f85 16605 LR_REGNO));
d62294f5 16606 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16607 gen_rtx_SYMBOL_REF (Pmode,
16608 "*save_world"));
d62294f5 16609 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16610 properly. */
16611 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16612 {
696e45ba
ME
16613 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16614 ? DFmode : SFmode),
16615 info->first_fp_reg_save + i);
c4ad648e
AM
16616 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16617 GEN_INT (info->fp_save_offset
16618 + sp_offset + 8 * i));
696e45ba
ME
16619 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16620 ? DFmode : SFmode), addr);
c4ad648e
AM
16621
16622 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16623 }
d62294f5 16624 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16625 {
16626 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16627 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16628 GEN_INT (info->altivec_save_offset
16629 + sp_offset + 16 * i));
0be76840 16630 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16631
16632 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16633 }
d62294f5 16634 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16635 {
16636 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16637 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16638 GEN_INT (info->gp_save_offset
16639 + sp_offset + reg_size * i));
0be76840 16640 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16641
16642 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16643 }
16644
16645 {
16646 /* CR register traditionally saved as CR2. */
16647 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16648 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16649 GEN_INT (info->cr_save_offset
16650 + sp_offset));
0be76840 16651 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16652
16653 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16654 }
22fa69da
GK
16655 /* Explain about use of R0. */
16656 if (info->lr_save_p)
16657 {
16658 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16659 GEN_INT (info->lr_save_offset
16660 + sp_offset));
16661 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16662
22fa69da
GK
16663 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16664 }
16665 /* Explain what happens to the stack pointer. */
16666 {
16667 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16668 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16669 }
d62294f5
FJ
16670
16671 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16672 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16673 treg, GEN_INT (-info->total_size));
16674 sp_offset = info->total_size;
d62294f5
FJ
16675 }
16676
9ebbca7d 16677 /* If we use the link register, get it into r0. */
f57fe068 16678 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16679 {
52ff33d0
NF
16680 rtx addr, reg, mem;
16681
f8a57be8 16682 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16683 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16684 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16685
16686 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16687 GEN_INT (info->lr_save_offset + sp_offset));
16688 reg = gen_rtx_REG (Pmode, 0);
16689 mem = gen_rtx_MEM (Pmode, addr);
16690 /* This should not be of rs6000_sr_alias_set, because of
16691 __builtin_return_address. */
16692
16693 insn = emit_move_insn (mem, reg);
16694 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16695 NULL_RTX, NULL_RTX);
f8a57be8 16696 }
9ebbca7d
GK
16697
16698 /* If we need to save CR, put it into r12. */
f57fe068 16699 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16700 {
f8a57be8 16701 rtx set;
f676971a 16702
9ebbca7d 16703 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16704 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16705 RTX_FRAME_RELATED_P (insn) = 1;
16706 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16707 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16708 But that's OK. All we have to do is specify that _one_ condition
16709 code register is saved in this stack slot. The thrower's epilogue
16710 will then restore all the call-saved registers.
16711 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16712 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16713 gen_rtx_REG (SImode, CR2_REGNO));
bbbbb16a 16714 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16715 }
16716
a4f6c312
SS
16717 /* Do any required saving of fpr's. If only one or two to save, do
16718 it ourselves. Otherwise, call function. */
f57fe068 16719 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16720 {
16721 int i;
16722 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16723 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16724 && ! call_used_regs[info->first_fp_reg_save+i]))
696e45ba
ME
16725 emit_frame_save (frame_reg_rtx, frame_ptr_rtx,
16726 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16727 ? DFmode : SFmode,
89e7058f
AH
16728 info->first_fp_reg_save + i,
16729 info->fp_save_offset + sp_offset + 8 * i,
16730 info->total_size);
9ebbca7d 16731 }
f57fe068 16732 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16733 {
16734 rtx par;
16735
16736 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16737 info->fp_save_offset + sp_offset,
16738 DFmode,
16739 /*savep=*/true, /*gpr=*/false,
16740 /*exitp=*/false);
16741 insn = emit_insn (par);
16742 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16743 NULL_RTX, NULL_RTX);
16744 }
16745
16746 /* Save GPRs. This is done as a PARALLEL if we are using
16747 the store-multiple instructions. */
16748 if (!WORLD_SAVE_P (info)
16749 && TARGET_SPE_ABI
16750 && info->spe_64bit_regs_used != 0
16751 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16752 {
16753 int i;
f78c3290
NF
16754 rtx spe_save_area_ptr;
16755
16756 /* Determine whether we can address all of the registers that need
16757 to be saved with an offset from the stack pointer that fits in
16758 the small const field for SPE memory instructions. */
16759 int spe_regs_addressable_via_sp
16760 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16761 + (32 - info->first_gp_reg_save - 1) * reg_size)
16762 && saving_GPRs_inline);
16763 int spe_offset;
16764
16765 if (spe_regs_addressable_via_sp)
16766 {
16767 spe_save_area_ptr = frame_reg_rtx;
16768 spe_offset = info->spe_gp_save_offset + sp_offset;
16769 }
16770 else
16771 {
16772 /* Make r11 point to the start of the SPE save area. We need
16773 to be careful here if r11 is holding the static chain. If
16774 it is, then temporarily save it in r0. We would use r0 as
16775 our base register here, but using r0 as a base register in
16776 loads and stores means something different from what we
16777 would like. */
16778 int ool_adjust = (saving_GPRs_inline
16779 ? 0
16780 : (info->first_gp_reg_save
16781 - (FIRST_SAVRES_REGISTER+1))*8);
16782 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16783 + sp_offset - ool_adjust);
16784
16785 if (using_static_chain_p)
16786 {
16787 rtx r0 = gen_rtx_REG (Pmode, 0);
16788 gcc_assert (info->first_gp_reg_save > 11);
16789
16790 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16791 }
16792
16793 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16794 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16795 frame_reg_rtx,
16796 GEN_INT (offset)));
16797 /* We need to make sure the move to r11 gets noted for
16798 properly outputting unwind information. */
16799 if (!saving_GPRs_inline)
16800 rs6000_frame_related (insn, frame_reg_rtx, offset,
16801 NULL_RTX, NULL_RTX);
16802 spe_offset = 0;
16803 }
16804
16805 if (saving_GPRs_inline)
16806 {
16807 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16808 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16809 {
16810 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16811 rtx offset, addr, mem;
f676971a 16812
f78c3290
NF
16813 /* We're doing all this to ensure that the offset fits into
16814 the immediate offset of 'evstdd'. */
16815 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16816
16817 offset = GEN_INT (reg_size * i + spe_offset);
16818 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16819 mem = gen_rtx_MEM (V2SImode, addr);
16820
16821 insn = emit_move_insn (mem, reg);
16822
16823 rs6000_frame_related (insn, spe_save_area_ptr,
16824 info->spe_gp_save_offset
16825 + sp_offset + reg_size * i,
16826 offset, const0_rtx);
16827 }
16828 }
16829 else
9ebbca7d 16830 {
f78c3290 16831 rtx par;
9ebbca7d 16832
f78c3290
NF
16833 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16834 0, reg_mode,
16835 /*savep=*/true, /*gpr=*/true,
16836 /*exitp=*/false);
16837 insn = emit_insn (par);
16838 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16839 NULL_RTX, NULL_RTX);
9ebbca7d 16840 }
f78c3290
NF
16841
16842
16843 /* Move the static chain pointer back. */
16844 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16845 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16846 }
16847 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16848 {
16849 rtx par;
16850
16851 /* Need to adjust r11 if we saved any FPRs. */
16852 if (info->first_fp_reg_save != 64)
16853 {
16854 rtx r11 = gen_rtx_REG (reg_mode, 11);
16855 rtx offset = GEN_INT (info->total_size
16856 + (-8 * (64-info->first_fp_reg_save)));
16857 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16858 ? sp_reg_rtx : r11);
16859
16860 emit_insn (TARGET_32BIT
16861 ? gen_addsi3 (r11, ptr_reg, offset)
16862 : gen_adddi3 (r11, ptr_reg, offset));
16863 }
16864
16865 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16866 info->gp_save_offset + sp_offset,
16867 reg_mode,
16868 /*savep=*/true, /*gpr=*/true,
16869 /*exitp=*/false);
16870 insn = emit_insn (par);
f676971a 16871 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16872 NULL_RTX, NULL_RTX);
16873 }
f78c3290 16874 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16875 {
308c142a 16876 rtvec p;
9ebbca7d
GK
16877 int i;
16878 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16879 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16880 {
16881 rtx addr, reg, mem;
16882 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16883 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16884 GEN_INT (info->gp_save_offset
16885 + sp_offset
9ebbca7d 16886 + reg_size * i));
0be76840 16887 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16888
16889 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16890 }
16891 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16892 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16893 NULL_RTX, NULL_RTX);
b6c9286a 16894 }
f57fe068 16895 else if (!WORLD_SAVE_P (info))
b6c9286a 16896 {
9ebbca7d
GK
16897 int i;
16898 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16899 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16900 {
16901 rtx addr, reg, mem;
16902 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16903
52ff33d0
NF
16904 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16905 GEN_INT (info->gp_save_offset
16906 + sp_offset
16907 + reg_size * i));
16908 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16909
52ff33d0
NF
16910 insn = emit_move_insn (mem, reg);
16911 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16912 NULL_RTX, NULL_RTX);
16913 }
9ebbca7d
GK
16914 }
16915
83720594
RH
16916 /* ??? There's no need to emit actual instructions here, but it's the
16917 easiest way to get the frame unwind information emitted. */
e3b5732b 16918 if (crtl->calls_eh_return)
83720594 16919 {
78e1b90d
DE
16920 unsigned int i, regno;
16921
fc4767bb
JJ
16922 /* In AIX ABI we need to pretend we save r2 here. */
16923 if (TARGET_AIX)
16924 {
16925 rtx addr, reg, mem;
16926
16927 reg = gen_rtx_REG (reg_mode, 2);
16928 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16929 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16930 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16931
16932 insn = emit_move_insn (mem, reg);
f676971a 16933 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16934 NULL_RTX, NULL_RTX);
16935 PATTERN (insn) = gen_blockage ();
16936 }
16937
83720594
RH
16938 for (i = 0; ; ++i)
16939 {
83720594
RH
16940 regno = EH_RETURN_DATA_REGNO (i);
16941 if (regno == INVALID_REGNUM)
16942 break;
16943
89e7058f
AH
16944 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16945 info->ehrd_offset + sp_offset
16946 + reg_size * (int) i,
16947 info->total_size);
83720594
RH
16948 }
16949 }
16950
9ebbca7d 16951 /* Save CR if we use any that must be preserved. */
f57fe068 16952 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16953 {
16954 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16955 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16956 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16957 /* See the large comment above about why CR2_REGNO is used. */
16958 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16959
9ebbca7d
GK
16960 /* If r12 was used to hold the original sp, copy cr into r0 now
16961 that it's free. */
16962 if (REGNO (frame_reg_rtx) == 12)
16963 {
f8a57be8
GK
16964 rtx set;
16965
9ebbca7d 16966 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16967 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16968 RTX_FRAME_RELATED_P (insn) = 1;
16969 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
bbbbb16a 16970 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16971 }
16972 insn = emit_move_insn (mem, cr_save_rtx);
16973
f676971a 16974 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16975 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16976 }
16977
f676971a 16978 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16979 for which it was done previously. */
f57fe068 16980 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16981 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16982 {
bcb2d701 16983 if (info->total_size < 32767)
2b2c2fe5 16984 sp_offset = info->total_size;
bcb2d701
EC
16985 else
16986 frame_reg_rtx = frame_ptr_rtx;
16987 rs6000_emit_allocate_stack (info->total_size,
16988 (frame_reg_rtx != sp_reg_rtx
16989 && ((info->altivec_size != 0)
16990 || (info->vrsave_mask != 0)
f78c3290
NF
16991 )),
16992 FALSE);
bcb2d701
EC
16993 if (frame_reg_rtx != sp_reg_rtx)
16994 rs6000_emit_stack_tie ();
2b2c2fe5 16995 }
9ebbca7d
GK
16996
16997 /* Set frame pointer, if needed. */
16998 if (frame_pointer_needed)
16999 {
7d5175e1 17000 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
17001 sp_reg_rtx);
17002 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 17003 }
9878760c 17004
2b2c2fe5
EC
17005 /* Save AltiVec registers if needed. Save here because the red zone does
17006 not include AltiVec registers. */
17007 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
17008 {
17009 int i;
17010
17011 /* There should be a non inline version of this, for when we
17012 are saving lots of vector registers. */
17013 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17014 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17015 {
17016 rtx areg, savereg, mem;
17017 int offset;
17018
17019 offset = info->altivec_save_offset + sp_offset
17020 + 16 * (i - info->first_altivec_reg_save);
17021
17022 savereg = gen_rtx_REG (V4SImode, i);
17023
17024 areg = gen_rtx_REG (Pmode, 0);
17025 emit_move_insn (areg, GEN_INT (offset));
17026
17027 /* AltiVec addressing mode is [reg+reg]. */
17028 mem = gen_frame_mem (V4SImode,
17029 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
17030
17031 insn = emit_move_insn (mem, savereg);
17032
17033 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
17034 areg, GEN_INT (offset));
17035 }
17036 }
17037
17038 /* VRSAVE is a bit vector representing which AltiVec registers
17039 are used. The OS uses this to determine which vector
17040 registers to save on a context switch. We need to save
17041 VRSAVE on the stack frame, add whatever AltiVec registers we
17042 used in this function, and do the corresponding magic in the
17043 epilogue. */
17044
17045 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
17046 && info->vrsave_mask != 0)
17047 {
17048 rtx reg, mem, vrsave;
17049 int offset;
17050
17051 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
17052 as frame_reg_rtx and r11 as the static chain pointer for
17053 nested functions. */
17054 reg = gen_rtx_REG (SImode, 0);
17055 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
17056 if (TARGET_MACHO)
17057 emit_insn (gen_get_vrsave_internal (reg));
17058 else
17059 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
17060
17061 if (!WORLD_SAVE_P (info))
17062 {
17063 /* Save VRSAVE. */
17064 offset = info->vrsave_save_offset + sp_offset;
17065 mem = gen_frame_mem (SImode,
17066 gen_rtx_PLUS (Pmode, frame_reg_rtx,
17067 GEN_INT (offset)));
17068 insn = emit_move_insn (mem, reg);
17069 }
17070
17071 /* Include the registers in the mask. */
17072 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
17073
17074 insn = emit_insn (generate_set_vrsave (reg, info, 0));
17075 }
17076
1db02437 17077 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 17078 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
17079 || (DEFAULT_ABI == ABI_V4
17080 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 17081 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
17082 {
17083 /* If emit_load_toc_table will use the link register, we need to save
17084 it. We use R12 for this purpose because emit_load_toc_table
17085 can use register 0. This allows us to use a plain 'blr' to return
17086 from the procedure more often. */
17087 int save_LR_around_toc_setup = (TARGET_ELF
17088 && DEFAULT_ABI != ABI_AIX
17089 && flag_pic
17090 && ! info->lr_save_p
17091 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
17092 if (save_LR_around_toc_setup)
17093 {
1de43f85 17094 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 17095
c4ad648e 17096 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 17097 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 17098
c4ad648e 17099 rs6000_emit_load_toc_table (TRUE);
f8a57be8 17100
c4ad648e 17101 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
17102 RTX_FRAME_RELATED_P (insn) = 1;
17103 }
17104 else
17105 rs6000_emit_load_toc_table (TRUE);
17106 }
ee890fe2 17107
fcce224d 17108#if TARGET_MACHO
ee890fe2 17109 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 17110 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 17111 {
1de43f85 17112 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
08a6a74b 17113 rtx src = gen_rtx_SYMBOL_REF (Pmode, MACHOPIC_FUNCTION_BASE_NAME);
ee890fe2 17114
6d0a8091
DJ
17115 /* Save and restore LR locally around this call (in R0). */
17116 if (!info->lr_save_p)
6fb5fa3c 17117 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 17118
6fb5fa3c 17119 emit_insn (gen_load_macho_picbase (src));
ee890fe2 17120
6fb5fa3c
DB
17121 emit_move_insn (gen_rtx_REG (Pmode,
17122 RS6000_PIC_OFFSET_TABLE_REGNUM),
17123 lr);
6d0a8091
DJ
17124
17125 if (!info->lr_save_p)
6fb5fa3c 17126 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 17127 }
fcce224d 17128#endif
9ebbca7d
GK
17129}
17130
9ebbca7d 17131/* Write function prologue. */
a4f6c312 17132
08c148a8 17133static void
f676971a 17134rs6000_output_function_prologue (FILE *file,
a2369ed3 17135 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
17136{
17137 rs6000_stack_t *info = rs6000_stack_info ();
17138
4697a36c
MM
17139 if (TARGET_DEBUG_STACK)
17140 debug_stack_info (info);
9878760c 17141
a4f6c312
SS
17142 /* Write .extern for any function we will call to save and restore
17143 fp values. */
17144 if (info->first_fp_reg_save < 64
17145 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 17146 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 17147 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 17148 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 17149
c764f757
RK
17150 /* Write .extern for AIX common mode routines, if needed. */
17151 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
17152 {
f6709c70
JW
17153 fputs ("\t.extern __mulh\n", file);
17154 fputs ("\t.extern __mull\n", file);
17155 fputs ("\t.extern __divss\n", file);
17156 fputs ("\t.extern __divus\n", file);
17157 fputs ("\t.extern __quoss\n", file);
17158 fputs ("\t.extern __quous\n", file);
c764f757
RK
17159 common_mode_defined = 1;
17160 }
9878760c 17161
9ebbca7d 17162 if (! HAVE_prologue)
979721f8 17163 {
9ebbca7d 17164 start_sequence ();
9dda4cc8 17165
a4f6c312
SS
17166 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
17167 the "toplevel" insn chain. */
2e040219 17168 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17169 rs6000_emit_prologue ();
2e040219 17170 emit_note (NOTE_INSN_DELETED);
178c3eff 17171
a3c9585f 17172 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17173 {
17174 rtx insn;
17175 unsigned addr = 0;
17176 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17177 {
17178 INSN_ADDRESSES_NEW (insn, addr);
17179 addr += 4;
17180 }
17181 }
9dda4cc8 17182
9ebbca7d 17183 if (TARGET_DEBUG_STACK)
a4f6c312 17184 debug_rtx_list (get_insns (), 100);
c9d691e9 17185 final (get_insns (), file, FALSE);
9ebbca7d 17186 end_sequence ();
979721f8
MM
17187 }
17188
9ebbca7d
GK
17189 rs6000_pic_labelno++;
17190}
f676971a 17191
1c9c5e43
AM
17192/* Non-zero if vmx regs are restored before the frame pop, zero if
17193 we restore after the pop when possible. */
17194#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
17195
f78c3290
NF
17196/* Reload CR from REG. */
17197
17198static void
17199rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
17200{
17201 int count = 0;
17202 int i;
17203
17204 if (using_mfcr_multiple)
17205 {
17206 for (i = 0; i < 8; i++)
17207 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
17208 count++;
17209 gcc_assert (count);
17210 }
17211
17212 if (using_mfcr_multiple && count > 1)
17213 {
17214 rtvec p;
17215 int ndx;
17216
17217 p = rtvec_alloc (count);
17218
17219 ndx = 0;
17220 for (i = 0; i < 8; i++)
17221 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
17222 {
17223 rtvec r = rtvec_alloc (2);
17224 RTVEC_ELT (r, 0) = reg;
17225 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
17226 RTVEC_ELT (p, ndx) =
17227 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
17228 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
17229 ndx++;
17230 }
17231 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17232 gcc_assert (ndx == count);
17233 }
17234 else
17235 for (i = 0; i < 8; i++)
17236 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
17237 {
17238 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
17239 CR0_REGNO+i),
17240 reg));
17241 }
17242}
17243
ff35822b
JJ
17244/* Return true if OFFSET from stack pointer can be clobbered by signals.
17245 V.4 doesn't have any stack cushion, AIX ABIs have 220 or 288 bytes
17246 below stack pointer not cloberred by signals. */
9878760c 17247
ff35822b
JJ
17248static inline bool
17249offset_below_red_zone_p (HOST_WIDE_INT offset)
17250{
17251 return offset < (DEFAULT_ABI == ABI_V4
17252 ? 0
17253 : TARGET_32BIT ? -220 : -288);
17254}
17255
17256/* Emit function epilogue as insns. */
9878760c 17257
9ebbca7d 17258void
a2369ed3 17259rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
17260{
17261 rs6000_stack_t *info;
f78c3290 17262 int restoring_GPRs_inline;
9ebbca7d
GK
17263 int restoring_FPRs_inline;
17264 int using_load_multiple;
d296e02e 17265 int using_mtcr_multiple;
9ebbca7d 17266 int use_backchain_to_restore_sp;
f78c3290
NF
17267 int restore_lr;
17268 int strategy;
9ebbca7d
GK
17269 int sp_offset = 0;
17270 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
17271 rtx frame_reg_rtx = sp_reg_rtx;
ff35822b
JJ
17272 rtx cfa_restores = NULL_RTX;
17273 rtx insn;
0e67400a 17274 enum machine_mode reg_mode = Pmode;
327e5343 17275 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
17276 int i;
17277
c19de7aa
AH
17278 info = rs6000_stack_info ();
17279
17280 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
17281 {
17282 reg_mode = V2SImode;
17283 reg_size = 8;
17284 }
17285
f78c3290
NF
17286 strategy = rs6000_savres_strategy (info, /*savep=*/false,
17287 /*static_chain_p=*/0, sibcall);
17288 using_load_multiple = strategy & SAVRES_MULTIPLE;
17289 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
17290 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 17291 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
17292 || rs6000_cpu == PROCESSOR_PPC603
17293 || rs6000_cpu == PROCESSOR_PPC750
17294 || optimize_size);
1c9c5e43
AM
17295 /* Restore via the backchain when we have a large frame, since this
17296 is more efficient than an addis, addi pair. The second condition
17297 here will not trigger at the moment; We don't actually need a
17298 frame pointer for alloca, but the generic parts of the compiler
17299 give us one anyway. */
17300 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
17301 || info->total_size
17302 + (info->lr_save_p ? info->lr_save_offset : 0)
17303 > 32767
1c9c5e43
AM
17304 || (cfun->calls_alloca
17305 && !frame_pointer_needed));
f78c3290
NF
17306 restore_lr = (info->lr_save_p
17307 && restoring_GPRs_inline
17308 && restoring_FPRs_inline);
9ebbca7d 17309
f57fe068 17310 if (WORLD_SAVE_P (info))
d62294f5
FJ
17311 {
17312 int i, j;
17313 char rname[30];
17314 const char *alloc_rname;
17315 rtvec p;
17316
17317 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
17318 stack slot (which is not likely to be our caller.)
17319 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
17320 rest_world is similar, except any R10 parameter is ignored.
17321 The exception-handling stuff that was here in 2.95 is no
17322 longer necessary. */
d62294f5
FJ
17323
17324 p = rtvec_alloc (9
17325 + 1
f676971a 17326 + 32 - info->first_gp_reg_save
c4ad648e
AM
17327 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
17328 + 63 + 1 - info->first_fp_reg_save);
d62294f5 17329
e3b5732b 17330 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 17331 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
17332 alloc_rname = ggc_strdup (rname);
17333
17334 j = 0;
17335 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
17336 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 17337 gen_rtx_REG (Pmode,
1de43f85 17338 LR_REGNO));
d62294f5 17339 RTVEC_ELT (p, j++)
c4ad648e 17340 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 17341 /* The instruction pattern requires a clobber here;
c4ad648e 17342 it is shared with the restVEC helper. */
d62294f5 17343 RTVEC_ELT (p, j++)
c4ad648e 17344 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
17345
17346 {
c4ad648e
AM
17347 /* CR register traditionally saved as CR2. */
17348 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
17349 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17350 GEN_INT (info->cr_save_offset));
0be76840 17351 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
17352
17353 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
17354 }
17355
17356 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
17357 {
17358 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
17359 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17360 GEN_INT (info->gp_save_offset
17361 + reg_size * i));
0be76840 17362 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
17363
17364 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17365 }
d62294f5 17366 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
17367 {
17368 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
17369 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17370 GEN_INT (info->altivec_save_offset
17371 + 16 * i));
0be76840 17372 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
17373
17374 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17375 }
d62294f5 17376 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e 17377 {
696e45ba
ME
17378 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17379 ? DFmode : SFmode),
17380 info->first_fp_reg_save + i);
c4ad648e
AM
17381 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17382 GEN_INT (info->fp_save_offset
17383 + 8 * i));
696e45ba
ME
17384 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17385 ? DFmode : SFmode), addr);
c4ad648e
AM
17386
17387 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17388 }
d62294f5 17389 RTVEC_ELT (p, j++)
c4ad648e 17390 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 17391 RTVEC_ELT (p, j++)
c4ad648e 17392 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 17393 RTVEC_ELT (p, j++)
c4ad648e 17394 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 17395 RTVEC_ELT (p, j++)
c4ad648e 17396 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 17397 RTVEC_ELT (p, j++)
c4ad648e 17398 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
17399 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
17400
17401 return;
17402 }
17403
45b194f8
AM
17404 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
17405 if (info->push_p)
2b2c2fe5 17406 sp_offset = info->total_size;
f676971a 17407
e6477eaa
AM
17408 /* Restore AltiVec registers if we must do so before adjusting the
17409 stack. */
17410 if (TARGET_ALTIVEC_ABI
17411 && info->altivec_size != 0
1c9c5e43
AM
17412 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17413 || (DEFAULT_ABI != ABI_V4
ff35822b 17414 && offset_below_red_zone_p (info->altivec_save_offset))))
9aa86737
AH
17415 {
17416 int i;
17417
e6477eaa
AM
17418 if (use_backchain_to_restore_sp)
17419 {
17420 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17421 emit_move_insn (frame_reg_rtx,
17422 gen_rtx_MEM (Pmode, sp_reg_rtx));
17423 sp_offset = 0;
17424 }
1c9c5e43
AM
17425 else if (frame_pointer_needed)
17426 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 17427
9aa86737
AH
17428 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17429 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17430 {
ff35822b 17431 rtx addr, areg, mem, reg;
9aa86737
AH
17432
17433 areg = gen_rtx_REG (Pmode, 0);
17434 emit_move_insn
17435 (areg, GEN_INT (info->altivec_save_offset
17436 + sp_offset
17437 + 16 * (i - info->first_altivec_reg_save)));
17438
17439 /* AltiVec addressing mode is [reg+reg]. */
17440 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 17441 mem = gen_frame_mem (V4SImode, addr);
9aa86737 17442
ff35822b
JJ
17443 reg = gen_rtx_REG (V4SImode, i);
17444 emit_move_insn (reg, mem);
17445 if (offset_below_red_zone_p (info->altivec_save_offset
17446 + (i - info->first_altivec_reg_save)
17447 * 16))
17448 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17449 cfa_restores);
9aa86737
AH
17450 }
17451 }
17452
e6477eaa
AM
17453 /* Restore VRSAVE if we must do so before adjusting the stack. */
17454 if (TARGET_ALTIVEC
17455 && TARGET_ALTIVEC_VRSAVE
17456 && info->vrsave_mask != 0
1c9c5e43
AM
17457 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17458 || (DEFAULT_ABI != ABI_V4
ff35822b 17459 && offset_below_red_zone_p (info->vrsave_save_offset))))
e6477eaa
AM
17460 {
17461 rtx addr, mem, reg;
17462
1c9c5e43 17463 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 17464 {
1c9c5e43
AM
17465 if (use_backchain_to_restore_sp)
17466 {
17467 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17468 emit_move_insn (frame_reg_rtx,
17469 gen_rtx_MEM (Pmode, sp_reg_rtx));
17470 sp_offset = 0;
17471 }
17472 else if (frame_pointer_needed)
17473 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
17474 }
17475
17476 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17477 GEN_INT (info->vrsave_save_offset + sp_offset));
17478 mem = gen_frame_mem (SImode, addr);
17479 reg = gen_rtx_REG (SImode, 12);
17480 emit_move_insn (reg, mem);
17481
17482 emit_insn (generate_set_vrsave (reg, info, 1));
17483 }
17484
ff35822b 17485 insn = NULL_RTX;
1c9c5e43
AM
17486 /* If we have a large stack frame, restore the old stack pointer
17487 using the backchain. */
2b2c2fe5
EC
17488 if (use_backchain_to_restore_sp)
17489 {
1c9c5e43 17490 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
17491 {
17492 /* Under V.4, don't reset the stack pointer until after we're done
17493 loading the saved registers. */
17494 if (DEFAULT_ABI == ABI_V4)
17495 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17496
ff35822b
JJ
17497 insn = emit_move_insn (frame_reg_rtx,
17498 gen_rtx_MEM (Pmode, sp_reg_rtx));
e6477eaa
AM
17499 sp_offset = 0;
17500 }
1c9c5e43
AM
17501 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17502 && DEFAULT_ABI == ABI_V4)
17503 /* frame_reg_rtx has been set up by the altivec restore. */
17504 ;
17505 else
17506 {
ff35822b 17507 insn = emit_move_insn (sp_reg_rtx, frame_reg_rtx);
1c9c5e43
AM
17508 frame_reg_rtx = sp_reg_rtx;
17509 }
17510 }
17511 /* If we have a frame pointer, we can restore the old stack pointer
17512 from it. */
17513 else if (frame_pointer_needed)
17514 {
17515 frame_reg_rtx = sp_reg_rtx;
17516 if (DEFAULT_ABI == ABI_V4)
17517 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17518
ff35822b
JJ
17519 insn = emit_insn (gen_add3_insn (frame_reg_rtx, hard_frame_pointer_rtx,
17520 GEN_INT (info->total_size)));
1c9c5e43 17521 sp_offset = 0;
2b2c2fe5 17522 }
45b194f8
AM
17523 else if (info->push_p
17524 && DEFAULT_ABI != ABI_V4
e3b5732b 17525 && !crtl->calls_eh_return)
2b2c2fe5 17526 {
ff35822b
JJ
17527 insn = emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx,
17528 GEN_INT (info->total_size)));
45b194f8 17529 sp_offset = 0;
2b2c2fe5 17530 }
ff35822b
JJ
17531 if (insn && frame_reg_rtx == sp_reg_rtx)
17532 {
17533 if (cfa_restores)
17534 {
17535 REG_NOTES (insn) = cfa_restores;
17536 cfa_restores = NULL_RTX;
17537 }
17538 add_reg_note (insn, REG_CFA_DEF_CFA, sp_reg_rtx);
17539 RTX_FRAME_RELATED_P (insn) = 1;
17540 }
2b2c2fe5 17541
e6477eaa 17542 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
17543 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17544 && TARGET_ALTIVEC_ABI
e6477eaa
AM
17545 && info->altivec_size != 0
17546 && (DEFAULT_ABI == ABI_V4
ff35822b 17547 || !offset_below_red_zone_p (info->altivec_save_offset)))
e6477eaa
AM
17548 {
17549 int i;
17550
17551 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17552 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17553 {
ff35822b 17554 rtx addr, areg, mem, reg;
e6477eaa
AM
17555
17556 areg = gen_rtx_REG (Pmode, 0);
17557 emit_move_insn
17558 (areg, GEN_INT (info->altivec_save_offset
17559 + sp_offset
17560 + 16 * (i - info->first_altivec_reg_save)));
17561
17562 /* AltiVec addressing mode is [reg+reg]. */
17563 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17564 mem = gen_frame_mem (V4SImode, addr);
17565
ff35822b
JJ
17566 reg = gen_rtx_REG (V4SImode, i);
17567 emit_move_insn (reg, mem);
17568 if (DEFAULT_ABI == ABI_V4)
17569 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17570 cfa_restores);
e6477eaa
AM
17571 }
17572 }
17573
17574 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
17575 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17576 && TARGET_ALTIVEC
e6477eaa
AM
17577 && TARGET_ALTIVEC_VRSAVE
17578 && info->vrsave_mask != 0
17579 && (DEFAULT_ABI == ABI_V4
ff35822b 17580 || !offset_below_red_zone_p (info->vrsave_save_offset)))
554c2941
AM
17581 {
17582 rtx addr, mem, reg;
17583
17584 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17585 GEN_INT (info->vrsave_save_offset + sp_offset));
17586 mem = gen_frame_mem (SImode, addr);
17587 reg = gen_rtx_REG (SImode, 12);
17588 emit_move_insn (reg, mem);
17589
17590 emit_insn (generate_set_vrsave (reg, info, 1));
17591 }
17592
f78c3290
NF
17593 /* Get the old lr if we saved it. If we are restoring registers
17594 out-of-line, then the out-of-line routines can do this for us. */
17595 if (restore_lr)
b6c9286a 17596 {
a3170dc6
AH
17597 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17598 info->lr_save_offset + sp_offset);
ba4828e0 17599
9ebbca7d 17600 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 17601 }
f676971a 17602
9ebbca7d
GK
17603 /* Get the old cr if we saved it. */
17604 if (info->cr_save_p)
17605 {
17606 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17607 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 17608 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 17609
9ebbca7d
GK
17610 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17611 }
f676971a 17612
ff35822b
JJ
17613 /* Set LR here to try to overlap restores below. LR is always saved
17614 above incoming stack, so it never needs REG_CFA_RESTORE. */
f78c3290 17615 if (restore_lr)
1de43f85 17616 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 17617 gen_rtx_REG (Pmode, 0));
f676971a 17618
83720594 17619 /* Load exception handler data registers, if needed. */
e3b5732b 17620 if (crtl->calls_eh_return)
83720594 17621 {
78e1b90d
DE
17622 unsigned int i, regno;
17623
fc4767bb
JJ
17624 if (TARGET_AIX)
17625 {
17626 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17627 GEN_INT (sp_offset + 5 * reg_size));
0be76840 17628 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
17629
17630 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17631 }
17632
83720594
RH
17633 for (i = 0; ; ++i)
17634 {
a3170dc6 17635 rtx mem;
83720594
RH
17636
17637 regno = EH_RETURN_DATA_REGNO (i);
17638 if (regno == INVALID_REGNUM)
17639 break;
17640
a3170dc6
AH
17641 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17642 info->ehrd_offset + sp_offset
17643 + reg_size * (int) i);
83720594
RH
17644
17645 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17646 }
17647 }
f676971a 17648
9ebbca7d
GK
17649 /* Restore GPRs. This is done as a PARALLEL if we are using
17650 the load-multiple instructions. */
f78c3290
NF
17651 if (TARGET_SPE_ABI
17652 && info->spe_64bit_regs_used != 0
17653 && info->first_gp_reg_save != 32)
52ff33d0 17654 {
52ff33d0
NF
17655 /* Determine whether we can address all of the registers that need
17656 to be saved with an offset from the stack pointer that fits in
17657 the small const field for SPE memory instructions. */
17658 int spe_regs_addressable_via_sp
f78c3290
NF
17659 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17660 + (32 - info->first_gp_reg_save - 1) * reg_size)
17661 && restoring_GPRs_inline);
52ff33d0
NF
17662 int spe_offset;
17663
17664 if (spe_regs_addressable_via_sp)
45b194f8 17665 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17666 else
17667 {
45b194f8 17668 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17669 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17670 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17671 There's no need to worry here because the static chain is passed
17672 anew to every function. */
f78c3290
NF
17673 int ool_adjust = (restoring_GPRs_inline
17674 ? 0
17675 : (info->first_gp_reg_save
17676 - (FIRST_SAVRES_REGISTER+1))*8);
17677
45b194f8
AM
17678 if (frame_reg_rtx == sp_reg_rtx)
17679 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17680 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17681 GEN_INT (info->spe_gp_save_offset
17682 + sp_offset
17683 - ool_adjust)));
45b194f8
AM
17684 /* Keep the invariant that frame_reg_rtx + sp_offset points
17685 at the top of the stack frame. */
17686 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17687
17688 spe_offset = 0;
17689 }
17690
f78c3290
NF
17691 if (restoring_GPRs_inline)
17692 {
17693 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17694 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17695 {
ff35822b 17696 rtx offset, addr, mem, reg;
52ff33d0 17697
f78c3290
NF
17698 /* We're doing all this to ensure that the immediate offset
17699 fits into the immediate field of 'evldd'. */
17700 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17701
f78c3290
NF
17702 offset = GEN_INT (spe_offset + reg_size * i);
17703 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17704 mem = gen_rtx_MEM (V2SImode, addr);
ff35822b 17705 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
52ff33d0 17706
ff35822b
JJ
17707 insn = emit_move_insn (reg, mem);
17708 if (DEFAULT_ABI == ABI_V4)
17709 {
17710 if (frame_pointer_needed
17711 && info->first_gp_reg_save + i
17712 == HARD_FRAME_POINTER_REGNUM)
17713 {
17714 add_reg_note (insn, REG_CFA_DEF_CFA,
17715 plus_constant (frame_reg_rtx,
17716 sp_offset));
17717 RTX_FRAME_RELATED_P (insn) = 1;
17718 }
17719
17720 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17721 cfa_restores);
17722 }
f78c3290
NF
17723 }
17724 }
17725 else
17726 {
17727 rtx par;
17728
17729 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17730 0, reg_mode,
17731 /*savep=*/false, /*gpr=*/true,
17732 /*exitp=*/true);
17733 emit_jump_insn (par);
f78c3290
NF
17734 /* We don't want anybody else emitting things after we jumped
17735 back. */
17736 return;
17737 }
52ff33d0 17738 }
f78c3290
NF
17739 else if (!restoring_GPRs_inline)
17740 {
17741 /* We are jumping to an out-of-line function. */
17742 bool can_use_exit = info->first_fp_reg_save == 64;
17743 rtx par;
17744
17745 /* Emit stack reset code if we need it. */
17746 if (can_use_exit)
17747 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17748 sp_offset, can_use_exit);
17749 else
17750 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17751 sp_reg_rtx,
17752 GEN_INT (sp_offset - info->fp_size)));
17753
17754 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17755 info->gp_save_offset, reg_mode,
17756 /*savep=*/false, /*gpr=*/true,
17757 /*exitp=*/can_use_exit);
17758
17759 if (can_use_exit)
17760 {
17761 if (info->cr_save_p)
ff35822b
JJ
17762 {
17763 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17764 using_mtcr_multiple);
17765 if (DEFAULT_ABI == ABI_V4)
17766 cfa_restores
17767 = alloc_reg_note (REG_CFA_RESTORE,
17768 gen_rtx_REG (SImode, CR2_REGNO),
17769 cfa_restores);
17770 }
f78c3290
NF
17771
17772 emit_jump_insn (par);
17773
17774 /* We don't want anybody else emitting things after we jumped
17775 back. */
17776 return;
17777 }
ff35822b
JJ
17778
17779 insn = emit_insn (par);
17780 if (DEFAULT_ABI == ABI_V4)
17781 {
17782 if (frame_pointer_needed)
17783 {
17784 add_reg_note (insn, REG_CFA_DEF_CFA,
17785 plus_constant (frame_reg_rtx, sp_offset));
17786 RTX_FRAME_RELATED_P (insn) = 1;
17787 }
17788
17789 for (i = info->first_gp_reg_save; i < 32; i++)
17790 cfa_restores
17791 = alloc_reg_note (REG_CFA_RESTORE,
17792 gen_rtx_REG (reg_mode, i), cfa_restores);
17793 }
f78c3290
NF
17794 }
17795 else if (using_load_multiple)
17796 {
17797 rtvec p;
17798 p = rtvec_alloc (32 - info->first_gp_reg_save);
17799 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17800 {
f676971a
EC
17801 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17802 GEN_INT (info->gp_save_offset
17803 + sp_offset
9ebbca7d 17804 + reg_size * i));
0be76840 17805 rtx mem = gen_frame_mem (reg_mode, addr);
ff35822b 17806 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
ba4828e0 17807
ff35822b
JJ
17808 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, reg, mem);
17809 if (DEFAULT_ABI == ABI_V4)
17810 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17811 cfa_restores);
17812 }
17813 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17814 if (DEFAULT_ABI == ABI_V4 && frame_pointer_needed)
17815 {
17816 add_reg_note (insn, REG_CFA_DEF_CFA,
17817 plus_constant (frame_reg_rtx, sp_offset));
17818 RTX_FRAME_RELATED_P (insn) = 1;
9ebbca7d 17819 }
f78c3290
NF
17820 }
17821 else
17822 {
17823 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17824 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17825 {
17826 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17827 GEN_INT (info->gp_save_offset
17828 + sp_offset
17829 + reg_size * i));
17830 rtx mem = gen_frame_mem (reg_mode, addr);
ff35822b 17831 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f78c3290 17832
ff35822b
JJ
17833 insn = emit_move_insn (reg, mem);
17834 if (DEFAULT_ABI == ABI_V4)
17835 {
17836 if (frame_pointer_needed
17837 && info->first_gp_reg_save + i
17838 == HARD_FRAME_POINTER_REGNUM)
17839 {
17840 add_reg_note (insn, REG_CFA_DEF_CFA,
17841 plus_constant (frame_reg_rtx, sp_offset));
17842 RTX_FRAME_RELATED_P (insn) = 1;
17843 }
17844
17845 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17846 cfa_restores);
17847 }
f78c3290
NF
17848 }
17849 }
9878760c 17850
9ebbca7d
GK
17851 /* Restore fpr's if we need to do it without calling a function. */
17852 if (restoring_FPRs_inline)
17853 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17854 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17855 && ! call_used_regs[info->first_fp_reg_save+i]))
17856 {
ff35822b 17857 rtx addr, mem, reg;
9ebbca7d 17858 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17859 GEN_INT (info->fp_save_offset
17860 + sp_offset
a4f6c312 17861 + 8 * i));
696e45ba
ME
17862 mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17863 ? DFmode : SFmode), addr);
ff35822b
JJ
17864 reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17865 ? DFmode : SFmode),
17866 info->first_fp_reg_save + i);
9ebbca7d 17867
ff35822b
JJ
17868 emit_move_insn (reg, mem);
17869 if (DEFAULT_ABI == ABI_V4)
17870 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
17871 cfa_restores);
9ebbca7d 17872 }
8d30c4ee 17873
9ebbca7d
GK
17874 /* If we saved cr, restore it here. Just those that were used. */
17875 if (info->cr_save_p)
ff35822b
JJ
17876 {
17877 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
17878 if (DEFAULT_ABI == ABI_V4)
17879 cfa_restores
17880 = alloc_reg_note (REG_CFA_RESTORE, gen_rtx_REG (SImode, CR2_REGNO),
17881 cfa_restores);
17882 }
979721f8 17883
9ebbca7d 17884 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17885 have been done. */
ff35822b
JJ
17886 insn = rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17887 sp_offset, !restoring_FPRs_inline);
17888 if (insn)
17889 {
17890 if (cfa_restores)
17891 {
17892 REG_NOTES (insn) = cfa_restores;
17893 cfa_restores = NULL_RTX;
17894 }
17895 add_reg_note (insn, REG_CFA_DEF_CFA, sp_reg_rtx);
17896 RTX_FRAME_RELATED_P (insn) = 1;
17897 }
b6c9286a 17898
e3b5732b 17899 if (crtl->calls_eh_return)
83720594
RH
17900 {
17901 rtx sa = EH_RETURN_STACKADJ_RTX;
ff35822b 17902 emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx, sa));
83720594
RH
17903 }
17904
9ebbca7d
GK
17905 if (!sibcall)
17906 {
17907 rtvec p;
17908 if (! restoring_FPRs_inline)
f78c3290 17909 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17910 else
17911 p = rtvec_alloc (2);
b6c9286a 17912
e35b9579 17913 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17914 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17915 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17916 : gen_rtx_CLOBBER (VOIDmode,
17917 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17918
17919 /* If we have to restore more than two FP registers, branch to the
17920 restore function. It will return to our caller. */
17921 if (! restoring_FPRs_inline)
17922 {
17923 int i;
f78c3290
NF
17924 rtx sym;
17925
17926 sym = rs6000_savres_routine_sym (info,
17927 /*savep=*/false,
17928 /*gpr=*/false,
17929 /*exitp=*/true);
17930 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17931 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17932 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17933 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17934 {
17935 rtx addr, mem;
17936 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17937 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17938 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17939
f78c3290 17940 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17941 gen_rtx_SET (VOIDmode,
17942 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17943 mem);
b6c9286a
MM
17944 }
17945 }
f676971a 17946
9ebbca7d 17947 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17948 }
9878760c
RK
17949}
17950
17951/* Write function epilogue. */
17952
08c148a8 17953static void
f676971a 17954rs6000_output_function_epilogue (FILE *file,
a2369ed3 17955 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17956{
9ebbca7d 17957 if (! HAVE_epilogue)
9878760c 17958 {
9ebbca7d
GK
17959 rtx insn = get_last_insn ();
17960 /* If the last insn was a BARRIER, we don't have to write anything except
17961 the trace table. */
17962 if (GET_CODE (insn) == NOTE)
17963 insn = prev_nonnote_insn (insn);
17964 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17965 {
9ebbca7d
GK
17966 /* This is slightly ugly, but at least we don't have two
17967 copies of the epilogue-emitting code. */
17968 start_sequence ();
17969
17970 /* A NOTE_INSN_DELETED is supposed to be at the start
17971 and end of the "toplevel" insn chain. */
2e040219 17972 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17973 rs6000_emit_epilogue (FALSE);
2e040219 17974 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17975
a3c9585f 17976 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17977 {
17978 rtx insn;
17979 unsigned addr = 0;
17980 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17981 {
17982 INSN_ADDRESSES_NEW (insn, addr);
17983 addr += 4;
17984 }
17985 }
17986
9ebbca7d 17987 if (TARGET_DEBUG_STACK)
a4f6c312 17988 debug_rtx_list (get_insns (), 100);
c9d691e9 17989 final (get_insns (), file, FALSE);
9ebbca7d 17990 end_sequence ();
4697a36c 17991 }
9878760c 17992 }
b4ac57ab 17993
efdba735
SH
17994#if TARGET_MACHO
17995 macho_branch_islands ();
0e5da0be
GK
17996 /* Mach-O doesn't support labels at the end of objects, so if
17997 it looks like we might want one, insert a NOP. */
17998 {
17999 rtx insn = get_last_insn ();
18000 while (insn
18001 && NOTE_P (insn)
a38e7aa5 18002 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 18003 insn = PREV_INSN (insn);
f676971a
EC
18004 if (insn
18005 && (LABEL_P (insn)
0e5da0be 18006 || (NOTE_P (insn)
a38e7aa5 18007 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
18008 fputs ("\tnop\n", file);
18009 }
18010#endif
18011
9b30bae2 18012 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
18013 on its format.
18014
18015 We don't output a traceback table if -finhibit-size-directive was
18016 used. The documentation for -finhibit-size-directive reads
18017 ``don't output a @code{.size} assembler directive, or anything
18018 else that would cause trouble if the function is split in the
18019 middle, and the two halves are placed at locations far apart in
18020 memory.'' The traceback table has this property, since it
18021 includes the offset from the start of the function to the
4d30c363
MM
18022 traceback table itself.
18023
18024 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 18025 different traceback table. */
57ac7be9 18026 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
3c072c6b 18027 && rs6000_traceback != traceback_none && !cfun->is_thunk)
9b30bae2 18028 {
69c75916 18029 const char *fname = NULL;
3ac88239 18030 const char *language_string = lang_hooks.name;
6041bf2f 18031 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 18032 int i;
57ac7be9 18033 int optional_tbtab;
8097c268 18034 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
18035
18036 if (rs6000_traceback == traceback_full)
18037 optional_tbtab = 1;
18038 else if (rs6000_traceback == traceback_part)
18039 optional_tbtab = 0;
18040 else
18041 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 18042
69c75916
AM
18043 if (optional_tbtab)
18044 {
18045 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
18046 while (*fname == '.') /* V.4 encodes . in the name */
18047 fname++;
18048
18049 /* Need label immediately before tbtab, so we can compute
18050 its offset from the function start. */
18051 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
18052 ASM_OUTPUT_LABEL (file, fname);
18053 }
314fc5a9
ILT
18054
18055 /* The .tbtab pseudo-op can only be used for the first eight
18056 expressions, since it can't handle the possibly variable
18057 length fields that follow. However, if you omit the optional
18058 fields, the assembler outputs zeros for all optional fields
18059 anyways, giving each variable length field is minimum length
18060 (as defined in sys/debug.h). Thus we can not use the .tbtab
18061 pseudo-op at all. */
18062
18063 /* An all-zero word flags the start of the tbtab, for debuggers
18064 that have to find it by searching forward from the entry
18065 point or from the current pc. */
19d2d16f 18066 fputs ("\t.long 0\n", file);
314fc5a9
ILT
18067
18068 /* Tbtab format type. Use format type 0. */
19d2d16f 18069 fputs ("\t.byte 0,", file);
314fc5a9 18070
5fc921c1
DE
18071 /* Language type. Unfortunately, there does not seem to be any
18072 official way to discover the language being compiled, so we
18073 use language_string.
18074 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
18075 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
18076 a number, so for now use 9. */
5fc921c1 18077 if (! strcmp (language_string, "GNU C"))
314fc5a9 18078 i = 0;
6de9cd9a 18079 else if (! strcmp (language_string, "GNU F77")
7f62878c 18080 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 18081 i = 1;
8b83775b 18082 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 18083 i = 2;
5fc921c1
DE
18084 else if (! strcmp (language_string, "GNU Ada"))
18085 i = 3;
56438901
AM
18086 else if (! strcmp (language_string, "GNU C++")
18087 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 18088 i = 9;
9517ead8
AG
18089 else if (! strcmp (language_string, "GNU Java"))
18090 i = 13;
5fc921c1
DE
18091 else if (! strcmp (language_string, "GNU Objective-C"))
18092 i = 14;
314fc5a9 18093 else
37409796 18094 gcc_unreachable ();
314fc5a9
ILT
18095 fprintf (file, "%d,", i);
18096
18097 /* 8 single bit fields: global linkage (not set for C extern linkage,
18098 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
18099 from start of procedure stored in tbtab, internal function, function
18100 has controlled storage, function has no toc, function uses fp,
18101 function logs/aborts fp operations. */
18102 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
18103 fprintf (file, "%d,",
18104 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
18105
18106 /* 6 bitfields: function is interrupt handler, name present in
18107 proc table, function calls alloca, on condition directives
18108 (controls stack walks, 3 bits), saves condition reg, saves
18109 link reg. */
18110 /* The `function calls alloca' bit seems to be set whenever reg 31 is
18111 set up as a frame pointer, even when there is no alloca call. */
18112 fprintf (file, "%d,",
6041bf2f
DE
18113 ((optional_tbtab << 6)
18114 | ((optional_tbtab & frame_pointer_needed) << 5)
18115 | (info->cr_save_p << 1)
18116 | (info->lr_save_p)));
314fc5a9 18117
6041bf2f 18118 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
18119 (6 bits). */
18120 fprintf (file, "%d,",
4697a36c 18121 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
18122
18123 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
18124 fprintf (file, "%d,", (32 - first_reg_to_save ()));
18125
6041bf2f
DE
18126 if (optional_tbtab)
18127 {
18128 /* Compute the parameter info from the function decl argument
18129 list. */
18130 tree decl;
18131 int next_parm_info_bit = 31;
314fc5a9 18132
6041bf2f
DE
18133 for (decl = DECL_ARGUMENTS (current_function_decl);
18134 decl; decl = TREE_CHAIN (decl))
18135 {
18136 rtx parameter = DECL_INCOMING_RTL (decl);
18137 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 18138
6041bf2f
DE
18139 if (GET_CODE (parameter) == REG)
18140 {
ebb109ad 18141 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
18142 {
18143 int bits;
18144
18145 float_parms++;
18146
37409796
NS
18147 switch (mode)
18148 {
18149 case SFmode:
e41b2a33 18150 case SDmode:
37409796
NS
18151 bits = 0x2;
18152 break;
18153
18154 case DFmode:
7393f7f8 18155 case DDmode:
37409796 18156 case TFmode:
7393f7f8 18157 case TDmode:
37409796
NS
18158 bits = 0x3;
18159 break;
18160
18161 default:
18162 gcc_unreachable ();
18163 }
6041bf2f
DE
18164
18165 /* If only one bit will fit, don't or in this entry. */
18166 if (next_parm_info_bit > 0)
18167 parm_info |= (bits << (next_parm_info_bit - 1));
18168 next_parm_info_bit -= 2;
18169 }
18170 else
18171 {
18172 fixed_parms += ((GET_MODE_SIZE (mode)
18173 + (UNITS_PER_WORD - 1))
18174 / UNITS_PER_WORD);
18175 next_parm_info_bit -= 1;
18176 }
18177 }
18178 }
18179 }
314fc5a9
ILT
18180
18181 /* Number of fixed point parameters. */
18182 /* This is actually the number of words of fixed point parameters; thus
18183 an 8 byte struct counts as 2; and thus the maximum value is 8. */
18184 fprintf (file, "%d,", fixed_parms);
18185
18186 /* 2 bitfields: number of floating point parameters (7 bits), parameters
18187 all on stack. */
18188 /* This is actually the number of fp registers that hold parameters;
18189 and thus the maximum value is 13. */
18190 /* Set parameters on stack bit if parameters are not in their original
18191 registers, regardless of whether they are on the stack? Xlc
18192 seems to set the bit when not optimizing. */
18193 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
18194
6041bf2f
DE
18195 if (! optional_tbtab)
18196 return;
18197
314fc5a9
ILT
18198 /* Optional fields follow. Some are variable length. */
18199
18200 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
18201 11 double float. */
18202 /* There is an entry for each parameter in a register, in the order that
18203 they occur in the parameter list. Any intervening arguments on the
18204 stack are ignored. If the list overflows a long (max possible length
18205 34 bits) then completely leave off all elements that don't fit. */
18206 /* Only emit this long if there was at least one parameter. */
18207 if (fixed_parms || float_parms)
18208 fprintf (file, "\t.long %d\n", parm_info);
18209
18210 /* Offset from start of code to tb table. */
19d2d16f 18211 fputs ("\t.long ", file);
314fc5a9 18212 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
18213 if (TARGET_AIX)
18214 RS6000_OUTPUT_BASENAME (file, fname);
18215 else
18216 assemble_name (file, fname);
18217 putc ('-', file);
18218 rs6000_output_function_entry (file, fname);
19d2d16f 18219 putc ('\n', file);
314fc5a9
ILT
18220
18221 /* Interrupt handler mask. */
18222 /* Omit this long, since we never set the interrupt handler bit
18223 above. */
18224
18225 /* Number of CTL (controlled storage) anchors. */
18226 /* Omit this long, since the has_ctl bit is never set above. */
18227
18228 /* Displacement into stack of each CTL anchor. */
18229 /* Omit this list of longs, because there are no CTL anchors. */
18230
18231 /* Length of function name. */
69c75916
AM
18232 if (*fname == '*')
18233 ++fname;
296b8152 18234 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
18235
18236 /* Function name. */
18237 assemble_string (fname, strlen (fname));
18238
18239 /* Register for alloca automatic storage; this is always reg 31.
18240 Only emit this if the alloca bit was set above. */
18241 if (frame_pointer_needed)
19d2d16f 18242 fputs ("\t.byte 31\n", file);
b1765bde
DE
18243
18244 fputs ("\t.align 2\n", file);
9b30bae2 18245 }
9878760c 18246}
17167fd8 18247\f
a4f6c312
SS
18248/* A C compound statement that outputs the assembler code for a thunk
18249 function, used to implement C++ virtual function calls with
18250 multiple inheritance. The thunk acts as a wrapper around a virtual
18251 function, adjusting the implicit object parameter before handing
18252 control off to the real function.
18253
18254 First, emit code to add the integer DELTA to the location that
18255 contains the incoming first argument. Assume that this argument
18256 contains a pointer, and is the one used to pass the `this' pointer
18257 in C++. This is the incoming argument *before* the function
18258 prologue, e.g. `%o0' on a sparc. The addition must preserve the
18259 values of all other incoming arguments.
17167fd8
MM
18260
18261 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
18262 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
18263 not touch the return address. Hence returning from FUNCTION will
18264 return to whoever called the current `thunk'.
17167fd8 18265
a4f6c312
SS
18266 The effect must be as if FUNCTION had been called directly with the
18267 adjusted first argument. This macro is responsible for emitting
18268 all of the code for a thunk function; output_function_prologue()
18269 and output_function_epilogue() are not invoked.
17167fd8 18270
a4f6c312
SS
18271 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
18272 been extracted from it.) It might possibly be useful on some
18273 targets, but probably not.
17167fd8 18274
a4f6c312
SS
18275 If you do not define this macro, the target-independent code in the
18276 C++ frontend will generate a less efficient heavyweight thunk that
18277 calls FUNCTION instead of jumping to it. The generic approach does
18278 not support varargs. */
17167fd8 18279
3961e8fe 18280static void
f676971a
EC
18281rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
18282 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 18283 tree function)
17167fd8 18284{
0a2aaacc 18285 rtx this_rtx, insn, funexp;
17167fd8 18286
5b71a4e7 18287 reload_completed = 1;
fe3ad572 18288 epilogue_completed = 1;
56a7189a 18289
5b71a4e7 18290 /* Mark the end of the (empty) prologue. */
2e040219 18291 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 18292
5b71a4e7
DE
18293 /* Find the "this" pointer. If the function returns a structure,
18294 the structure return pointer is in r3. */
61f71b34 18295 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 18296 this_rtx = gen_rtx_REG (Pmode, 4);
56a7189a 18297 else
0a2aaacc 18298 this_rtx = gen_rtx_REG (Pmode, 3);
17167fd8 18299
5b71a4e7
DE
18300 /* Apply the constant offset, if required. */
18301 if (delta)
18302 {
18303 rtx delta_rtx = GEN_INT (delta);
18304 emit_insn (TARGET_32BIT
0a2aaacc
KG
18305 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
18306 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17167fd8
MM
18307 }
18308
5b71a4e7
DE
18309 /* Apply the offset from the vtable, if required. */
18310 if (vcall_offset)
17167fd8 18311 {
5b71a4e7
DE
18312 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
18313 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 18314
0a2aaacc 18315 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
eeff9307
JJ
18316 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
18317 {
18318 emit_insn (TARGET_32BIT
18319 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
18320 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
18321 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
18322 }
18323 else
18324 {
18325 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
18326
18327 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
18328 }
5b71a4e7 18329 emit_insn (TARGET_32BIT
0a2aaacc
KG
18330 ? gen_addsi3 (this_rtx, this_rtx, tmp)
18331 : gen_adddi3 (this_rtx, this_rtx, tmp));
17167fd8
MM
18332 }
18333
5b71a4e7
DE
18334 /* Generate a tail call to the target function. */
18335 if (!TREE_USED (function))
18336 {
18337 assemble_external (function);
18338 TREE_USED (function) = 1;
18339 }
18340 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 18341 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
18342
18343#if TARGET_MACHO
ab82a49f 18344 if (MACHOPIC_INDIRECT)
5b71a4e7 18345 funexp = machopic_indirect_call_target (funexp);
ee890fe2 18346#endif
5b71a4e7
DE
18347
18348 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 18349 generate sibcall RTL explicitly. */
5b71a4e7
DE
18350 insn = emit_call_insn (
18351 gen_rtx_PARALLEL (VOIDmode,
18352 gen_rtvec (4,
18353 gen_rtx_CALL (VOIDmode,
18354 funexp, const0_rtx),
18355 gen_rtx_USE (VOIDmode, const0_rtx),
18356 gen_rtx_USE (VOIDmode,
18357 gen_rtx_REG (SImode,
1de43f85 18358 LR_REGNO)),
5b71a4e7
DE
18359 gen_rtx_RETURN (VOIDmode))));
18360 SIBLING_CALL_P (insn) = 1;
18361 emit_barrier ();
18362
18363 /* Run just enough of rest_of_compilation to get the insns emitted.
18364 There's not really enough bulk here to make other passes such as
18365 instruction scheduling worth while. Note that use_thunk calls
18366 assemble_start_function and assemble_end_function. */
18367 insn = get_insns ();
55e092c4 18368 insn_locators_alloc ();
5b71a4e7
DE
18369 shorten_branches (insn);
18370 final_start_function (insn, file, 1);
c9d691e9 18371 final (insn, file, 1);
5b71a4e7 18372 final_end_function ();
d7087dd2 18373 free_after_compilation (cfun);
5b71a4e7
DE
18374
18375 reload_completed = 0;
fe3ad572 18376 epilogue_completed = 0;
9ebbca7d 18377}
9ebbca7d
GK
18378\f
18379/* A quick summary of the various types of 'constant-pool tables'
18380 under PowerPC:
18381
f676971a 18382 Target Flags Name One table per
9ebbca7d
GK
18383 AIX (none) AIX TOC object file
18384 AIX -mfull-toc AIX TOC object file
18385 AIX -mminimal-toc AIX minimal TOC translation unit
18386 SVR4/EABI (none) SVR4 SDATA object file
18387 SVR4/EABI -fpic SVR4 pic object file
18388 SVR4/EABI -fPIC SVR4 PIC translation unit
18389 SVR4/EABI -mrelocatable EABI TOC function
18390 SVR4/EABI -maix AIX TOC object file
f676971a 18391 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
18392 AIX minimal TOC translation unit
18393
18394 Name Reg. Set by entries contains:
18395 made by addrs? fp? sum?
18396
18397 AIX TOC 2 crt0 as Y option option
18398 AIX minimal TOC 30 prolog gcc Y Y option
18399 SVR4 SDATA 13 crt0 gcc N Y N
18400 SVR4 pic 30 prolog ld Y not yet N
18401 SVR4 PIC 30 prolog gcc Y option option
18402 EABI TOC 30 prolog gcc Y option option
18403
18404*/
18405
9ebbca7d
GK
18406/* Hash functions for the hash table. */
18407
18408static unsigned
a2369ed3 18409rs6000_hash_constant (rtx k)
9ebbca7d 18410{
46b33600
RH
18411 enum rtx_code code = GET_CODE (k);
18412 enum machine_mode mode = GET_MODE (k);
18413 unsigned result = (code << 3) ^ mode;
18414 const char *format;
18415 int flen, fidx;
f676971a 18416
46b33600
RH
18417 format = GET_RTX_FORMAT (code);
18418 flen = strlen (format);
18419 fidx = 0;
9ebbca7d 18420
46b33600
RH
18421 switch (code)
18422 {
18423 case LABEL_REF:
18424 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
18425
18426 case CONST_DOUBLE:
18427 if (mode != VOIDmode)
18428 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
18429 flen = 2;
18430 break;
18431
18432 case CODE_LABEL:
18433 fidx = 3;
18434 break;
18435
18436 default:
18437 break;
18438 }
9ebbca7d
GK
18439
18440 for (; fidx < flen; fidx++)
18441 switch (format[fidx])
18442 {
18443 case 's':
18444 {
18445 unsigned i, len;
18446 const char *str = XSTR (k, fidx);
18447 len = strlen (str);
18448 result = result * 613 + len;
18449 for (i = 0; i < len; i++)
18450 result = result * 613 + (unsigned) str[i];
17167fd8
MM
18451 break;
18452 }
9ebbca7d
GK
18453 case 'u':
18454 case 'e':
18455 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
18456 break;
18457 case 'i':
18458 case 'n':
18459 result = result * 613 + (unsigned) XINT (k, fidx);
18460 break;
18461 case 'w':
18462 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
18463 result = result * 613 + (unsigned) XWINT (k, fidx);
18464 else
18465 {
18466 size_t i;
9390387d 18467 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
18468 result = result * 613 + (unsigned) (XWINT (k, fidx)
18469 >> CHAR_BIT * i);
18470 }
18471 break;
09501938
DE
18472 case '0':
18473 break;
9ebbca7d 18474 default:
37409796 18475 gcc_unreachable ();
9ebbca7d 18476 }
46b33600 18477
9ebbca7d
GK
18478 return result;
18479}
18480
18481static unsigned
a2369ed3 18482toc_hash_function (const void *hash_entry)
9ebbca7d 18483{
f676971a 18484 const struct toc_hash_struct *thc =
a9098fd0
GK
18485 (const struct toc_hash_struct *) hash_entry;
18486 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
18487}
18488
18489/* Compare H1 and H2 for equivalence. */
18490
18491static int
a2369ed3 18492toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
18493{
18494 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
18495 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
18496
a9098fd0
GK
18497 if (((const struct toc_hash_struct *) h1)->key_mode
18498 != ((const struct toc_hash_struct *) h2)->key_mode)
18499 return 0;
18500
5692c7bc 18501 return rtx_equal_p (r1, r2);
9ebbca7d
GK
18502}
18503
28e510bd
MM
18504/* These are the names given by the C++ front-end to vtables, and
18505 vtable-like objects. Ideally, this logic should not be here;
18506 instead, there should be some programmatic way of inquiring as
18507 to whether or not an object is a vtable. */
18508
18509#define VTABLE_NAME_P(NAME) \
9390387d 18510 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
18511 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
18512 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 18513 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 18514 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd 18515
ee06c6a5
DE
18516#ifdef NO_DOLLAR_IN_LABEL
18517/* Return a GGC-allocated character string translating dollar signs in
18518 input NAME to underscores. Used by XCOFF ASM_OUTPUT_LABELREF. */
18519
18520const char *
18521rs6000_xcoff_strip_dollar (const char *name)
18522{
18523 char *strip, *p;
18524 int len;
18525
18526 p = strchr (name, '$');
18527
18528 if (p == 0 || p == name)
18529 return name;
18530
18531 len = strlen (name);
18532 strip = (char *) alloca (len + 1);
18533 strcpy (strip, name);
18534 p = strchr (strip, '$');
18535 while (p)
18536 {
18537 *p = '_';
18538 p = strchr (p + 1, '$');
18539 }
18540
18541 return ggc_alloc_string (strip, len);
18542}
18543#endif
18544
28e510bd 18545void
a2369ed3 18546rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
18547{
18548 /* Currently C++ toc references to vtables can be emitted before it
18549 is decided whether the vtable is public or private. If this is
18550 the case, then the linker will eventually complain that there is
f676971a 18551 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
18552 we emit the TOC reference to reference the symbol and not the
18553 section. */
18554 const char *name = XSTR (x, 0);
54ee9799 18555
f676971a 18556 if (VTABLE_NAME_P (name))
54ee9799
DE
18557 {
18558 RS6000_OUTPUT_BASENAME (file, name);
18559 }
18560 else
18561 assemble_name (file, name);
28e510bd
MM
18562}
18563
a4f6c312
SS
18564/* Output a TOC entry. We derive the entry name from what is being
18565 written. */
9878760c
RK
18566
18567void
a2369ed3 18568output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
18569{
18570 char buf[256];
3cce094d 18571 const char *name = buf;
9878760c 18572 rtx base = x;
16fdeb48 18573 HOST_WIDE_INT offset = 0;
9878760c 18574
37409796 18575 gcc_assert (!TARGET_NO_TOC);
4697a36c 18576
9ebbca7d
GK
18577 /* When the linker won't eliminate them, don't output duplicate
18578 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
18579 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
18580 CODE_LABELs. */
18581 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
18582 {
18583 struct toc_hash_struct *h;
18584 void * * found;
f676971a 18585
17211ab5 18586 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 18587 time because GGC is not initialized at that point. */
17211ab5 18588 if (toc_hash_table == NULL)
f676971a 18589 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
18590 toc_hash_eq, NULL);
18591
5ead67f6 18592 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 18593 h->key = x;
a9098fd0 18594 h->key_mode = mode;
9ebbca7d 18595 h->labelno = labelno;
f676971a 18596
bbbbb16a 18597 found = htab_find_slot (toc_hash_table, h, INSERT);
9ebbca7d
GK
18598 if (*found == NULL)
18599 *found = h;
f676971a 18600 else /* This is indeed a duplicate.
9ebbca7d
GK
18601 Set this label equal to that label. */
18602 {
18603 fputs ("\t.set ", file);
18604 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18605 fprintf (file, "%d,", labelno);
18606 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 18607 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
18608 found)->labelno));
18609 return;
18610 }
18611 }
18612
18613 /* If we're going to put a double constant in the TOC, make sure it's
18614 aligned properly when strict alignment is on. */
ff1720ed
RK
18615 if (GET_CODE (x) == CONST_DOUBLE
18616 && STRICT_ALIGNMENT
a9098fd0 18617 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
18618 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
18619 ASM_OUTPUT_ALIGN (file, 3);
18620 }
18621
4977bab6 18622 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 18623
37c37a57
RK
18624 /* Handle FP constants specially. Note that if we have a minimal
18625 TOC, things we put here aren't actually in the TOC, so we can allow
18626 FP constants. */
00b79d54
BE
18627 if (GET_CODE (x) == CONST_DOUBLE &&
18628 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
18629 {
18630 REAL_VALUE_TYPE rv;
18631 long k[4];
18632
18633 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18634 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18635 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
18636 else
18637 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
18638
18639 if (TARGET_64BIT)
18640 {
18641 if (TARGET_MINIMAL_TOC)
18642 fputs (DOUBLE_INT_ASM_OP, file);
18643 else
18644 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18645 k[0] & 0xffffffff, k[1] & 0xffffffff,
18646 k[2] & 0xffffffff, k[3] & 0xffffffff);
18647 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
18648 k[0] & 0xffffffff, k[1] & 0xffffffff,
18649 k[2] & 0xffffffff, k[3] & 0xffffffff);
18650 return;
18651 }
18652 else
18653 {
18654 if (TARGET_MINIMAL_TOC)
18655 fputs ("\t.long ", file);
18656 else
18657 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18658 k[0] & 0xffffffff, k[1] & 0xffffffff,
18659 k[2] & 0xffffffff, k[3] & 0xffffffff);
18660 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18661 k[0] & 0xffffffff, k[1] & 0xffffffff,
18662 k[2] & 0xffffffff, k[3] & 0xffffffff);
18663 return;
18664 }
18665 }
00b79d54
BE
18666 else if (GET_CODE (x) == CONST_DOUBLE &&
18667 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 18668 {
042259f2
DE
18669 REAL_VALUE_TYPE rv;
18670 long k[2];
0adc764e 18671
042259f2 18672 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18673
18674 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18675 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18676 else
18677 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 18678
13ded975
DE
18679 if (TARGET_64BIT)
18680 {
18681 if (TARGET_MINIMAL_TOC)
2bfcf297 18682 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18683 else
2f0552b6
AM
18684 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18685 k[0] & 0xffffffff, k[1] & 0xffffffff);
18686 fprintf (file, "0x%lx%08lx\n",
18687 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18688 return;
18689 }
1875cc88 18690 else
13ded975
DE
18691 {
18692 if (TARGET_MINIMAL_TOC)
2bfcf297 18693 fputs ("\t.long ", file);
13ded975 18694 else
2f0552b6
AM
18695 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18696 k[0] & 0xffffffff, k[1] & 0xffffffff);
18697 fprintf (file, "0x%lx,0x%lx\n",
18698 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18699 return;
18700 }
9878760c 18701 }
00b79d54
BE
18702 else if (GET_CODE (x) == CONST_DOUBLE &&
18703 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 18704 {
042259f2
DE
18705 REAL_VALUE_TYPE rv;
18706 long l;
9878760c 18707
042259f2 18708 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18709 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18710 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18711 else
18712 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 18713
31bfaa0b
DE
18714 if (TARGET_64BIT)
18715 {
18716 if (TARGET_MINIMAL_TOC)
2bfcf297 18717 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 18718 else
2f0552b6
AM
18719 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18720 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
18721 return;
18722 }
042259f2 18723 else
31bfaa0b
DE
18724 {
18725 if (TARGET_MINIMAL_TOC)
2bfcf297 18726 fputs ("\t.long ", file);
31bfaa0b 18727 else
2f0552b6
AM
18728 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18729 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
18730 return;
18731 }
042259f2 18732 }
f176e826 18733 else if (GET_MODE (x) == VOIDmode
a9098fd0 18734 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 18735 {
e2c953b6 18736 unsigned HOST_WIDE_INT low;
042259f2
DE
18737 HOST_WIDE_INT high;
18738
18739 if (GET_CODE (x) == CONST_DOUBLE)
18740 {
18741 low = CONST_DOUBLE_LOW (x);
18742 high = CONST_DOUBLE_HIGH (x);
18743 }
18744 else
18745#if HOST_BITS_PER_WIDE_INT == 32
18746 {
18747 low = INTVAL (x);
0858c623 18748 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18749 }
18750#else
18751 {
c4ad648e
AM
18752 low = INTVAL (x) & 0xffffffff;
18753 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18754 }
18755#endif
9878760c 18756
a9098fd0
GK
18757 /* TOC entries are always Pmode-sized, but since this
18758 is a bigendian machine then if we're putting smaller
18759 integer constants in the TOC we have to pad them.
18760 (This is still a win over putting the constants in
18761 a separate constant pool, because then we'd have
02a4ec28
FS
18762 to have both a TOC entry _and_ the actual constant.)
18763
18764 For a 32-bit target, CONST_INT values are loaded and shifted
18765 entirely within `low' and can be stored in one TOC entry. */
18766
37409796
NS
18767 /* It would be easy to make this work, but it doesn't now. */
18768 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18769
18770 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18771 {
18772#if HOST_BITS_PER_WIDE_INT == 32
18773 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18774 POINTER_SIZE, &low, &high, 0);
18775#else
18776 low |= high << 32;
18777 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18778 high = (HOST_WIDE_INT) low >> 32;
18779 low &= 0xffffffff;
18780#endif
18781 }
a9098fd0 18782
13ded975
DE
18783 if (TARGET_64BIT)
18784 {
18785 if (TARGET_MINIMAL_TOC)
2bfcf297 18786 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18787 else
2f0552b6
AM
18788 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18789 (long) high & 0xffffffff, (long) low & 0xffffffff);
18790 fprintf (file, "0x%lx%08lx\n",
18791 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18792 return;
18793 }
1875cc88 18794 else
13ded975 18795 {
02a4ec28
FS
18796 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18797 {
18798 if (TARGET_MINIMAL_TOC)
2bfcf297 18799 fputs ("\t.long ", file);
02a4ec28 18800 else
2bfcf297 18801 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18802 (long) high & 0xffffffff, (long) low & 0xffffffff);
18803 fprintf (file, "0x%lx,0x%lx\n",
18804 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18805 }
13ded975 18806 else
02a4ec28
FS
18807 {
18808 if (TARGET_MINIMAL_TOC)
2bfcf297 18809 fputs ("\t.long ", file);
02a4ec28 18810 else
2f0552b6
AM
18811 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18812 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18813 }
13ded975
DE
18814 return;
18815 }
9878760c
RK
18816 }
18817
18818 if (GET_CODE (x) == CONST)
18819 {
37409796 18820 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18821
9878760c
RK
18822 base = XEXP (XEXP (x, 0), 0);
18823 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18824 }
f676971a 18825
37409796
NS
18826 switch (GET_CODE (base))
18827 {
18828 case SYMBOL_REF:
18829 name = XSTR (base, 0);
18830 break;
18831
18832 case LABEL_REF:
18833 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18834 CODE_LABEL_NUMBER (XEXP (base, 0)));
18835 break;
18836
18837 case CODE_LABEL:
18838 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18839 break;
18840
18841 default:
18842 gcc_unreachable ();
18843 }
9878760c 18844
1875cc88 18845 if (TARGET_MINIMAL_TOC)
2bfcf297 18846 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18847 else
18848 {
5773a50f
DE
18849 fputs ("\t.tc ", file);
18850 RS6000_OUTPUT_BASENAME (file, name);
9878760c 18851
1875cc88 18852 if (offset < 0)
16fdeb48 18853 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18854 else if (offset)
16fdeb48 18855 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18856
19d2d16f 18857 fputs ("[TC],", file);
1875cc88 18858 }
581bc4de
MM
18859
18860 /* Currently C++ toc references to vtables can be emitted before it
18861 is decided whether the vtable is public or private. If this is
18862 the case, then the linker will eventually complain that there is
18863 a TOC reference to an unknown section. Thus, for vtables only,
18864 we emit the TOC reference to reference the symbol and not the
18865 section. */
28e510bd 18866 if (VTABLE_NAME_P (name))
581bc4de 18867 {
54ee9799 18868 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18869 if (offset < 0)
16fdeb48 18870 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18871 else if (offset > 0)
16fdeb48 18872 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18873 }
18874 else
18875 output_addr_const (file, x);
19d2d16f 18876 putc ('\n', file);
9878760c
RK
18877}
18878\f
18879/* Output an assembler pseudo-op to write an ASCII string of N characters
18880 starting at P to FILE.
18881
18882 On the RS/6000, we have to do this using the .byte operation and
18883 write out special characters outside the quoted string.
18884 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18885 so we must artificially break them up early. */
9878760c
RK
18886
18887void
a2369ed3 18888output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18889{
18890 char c;
18891 int i, count_string;
d330fd93
KG
18892 const char *for_string = "\t.byte \"";
18893 const char *for_decimal = "\t.byte ";
18894 const char *to_close = NULL;
9878760c
RK
18895
18896 count_string = 0;
18897 for (i = 0; i < n; i++)
18898 {
18899 c = *p++;
18900 if (c >= ' ' && c < 0177)
18901 {
18902 if (for_string)
18903 fputs (for_string, file);
18904 putc (c, file);
18905
18906 /* Write two quotes to get one. */
18907 if (c == '"')
18908 {
18909 putc (c, file);
18910 ++count_string;
18911 }
18912
18913 for_string = NULL;
18914 for_decimal = "\"\n\t.byte ";
18915 to_close = "\"\n";
18916 ++count_string;
18917
18918 if (count_string >= 512)
18919 {
18920 fputs (to_close, file);
18921
18922 for_string = "\t.byte \"";
18923 for_decimal = "\t.byte ";
18924 to_close = NULL;
18925 count_string = 0;
18926 }
18927 }
18928 else
18929 {
18930 if (for_decimal)
18931 fputs (for_decimal, file);
18932 fprintf (file, "%d", c);
18933
18934 for_string = "\n\t.byte \"";
18935 for_decimal = ", ";
18936 to_close = "\n";
18937 count_string = 0;
18938 }
18939 }
18940
18941 /* Now close the string if we have written one. Then end the line. */
18942 if (to_close)
9ebbca7d 18943 fputs (to_close, file);
9878760c
RK
18944}
18945\f
18946/* Generate a unique section name for FILENAME for a section type
18947 represented by SECTION_DESC. Output goes into BUF.
18948
18949 SECTION_DESC can be any string, as long as it is different for each
18950 possible section type.
18951
18952 We name the section in the same manner as xlc. The name begins with an
18953 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18954 names) with the last period replaced by the string SECTION_DESC. If
18955 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18956 the name. */
9878760c
RK
18957
18958void
f676971a 18959rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18960 const char *section_desc)
9878760c 18961{
9ebbca7d 18962 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18963 char *p;
18964 int len;
9878760c
RK
18965
18966 after_last_slash = filename;
18967 for (q = filename; *q; q++)
11e5fe42
RK
18968 {
18969 if (*q == '/')
18970 after_last_slash = q + 1;
18971 else if (*q == '.')
18972 last_period = q;
18973 }
9878760c 18974
11e5fe42 18975 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18976 *buf = (char *) xmalloc (len);
9878760c
RK
18977
18978 p = *buf;
18979 *p++ = '_';
18980
18981 for (q = after_last_slash; *q; q++)
18982 {
11e5fe42 18983 if (q == last_period)
c4ad648e 18984 {
9878760c
RK
18985 strcpy (p, section_desc);
18986 p += strlen (section_desc);
e3981aab 18987 break;
c4ad648e 18988 }
9878760c 18989
e9a780ec 18990 else if (ISALNUM (*q))
c4ad648e 18991 *p++ = *q;
9878760c
RK
18992 }
18993
11e5fe42 18994 if (last_period == 0)
9878760c
RK
18995 strcpy (p, section_desc);
18996 else
18997 *p = '\0';
18998}
e165f3f0 18999\f
a4f6c312 19000/* Emit profile function. */
411707f4 19001
411707f4 19002void
a2369ed3 19003output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 19004{
858081ad
AH
19005 /* Non-standard profiling for kernels, which just saves LR then calls
19006 _mcount without worrying about arg saves. The idea is to change
19007 the function prologue as little as possible as it isn't easy to
19008 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
19009 if (TARGET_PROFILE_KERNEL)
19010 return;
19011
8480e480
CC
19012 if (DEFAULT_ABI == ABI_AIX)
19013 {
9739c90c
JJ
19014#ifndef NO_PROFILE_COUNTERS
19015# define NO_PROFILE_COUNTERS 0
19016#endif
f676971a 19017 if (NO_PROFILE_COUNTERS)
bbbbb16a
ILT
19018 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
19019 LCT_NORMAL, VOIDmode, 0);
9739c90c
JJ
19020 else
19021 {
19022 char buf[30];
19023 const char *label_name;
19024 rtx fun;
411707f4 19025
9739c90c
JJ
19026 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
19027 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
19028 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 19029
bbbbb16a
ILT
19030 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
19031 LCT_NORMAL, VOIDmode, 1, fun, Pmode);
9739c90c 19032 }
8480e480 19033 }
ee890fe2
SS
19034 else if (DEFAULT_ABI == ABI_DARWIN)
19035 {
d5fa86ba 19036 const char *mcount_name = RS6000_MCOUNT;
1de43f85 19037 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
19038
19039 /* Be conservative and always set this, at least for now. */
e3b5732b 19040 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
19041
19042#if TARGET_MACHO
19043 /* For PIC code, set up a stub and collect the caller's address
19044 from r0, which is where the prologue puts it. */
11abc112 19045 if (MACHOPIC_INDIRECT
e3b5732b 19046 && crtl->uses_pic_offset_table)
11abc112 19047 caller_addr_regno = 0;
ee890fe2
SS
19048#endif
19049 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
bbbbb16a 19050 LCT_NORMAL, VOIDmode, 1,
ee890fe2
SS
19051 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
19052 }
411707f4
CC
19053}
19054
a4f6c312 19055/* Write function profiler code. */
e165f3f0
RK
19056
19057void
a2369ed3 19058output_function_profiler (FILE *file, int labelno)
e165f3f0 19059{
3daf36a4 19060 char buf[100];
e165f3f0 19061
38c1f2d7 19062 switch (DEFAULT_ABI)
3daf36a4 19063 {
38c1f2d7 19064 default:
37409796 19065 gcc_unreachable ();
38c1f2d7
MM
19066
19067 case ABI_V4:
09eeeacb
AM
19068 if (!TARGET_32BIT)
19069 {
d4ee4d25 19070 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
19071 return;
19072 }
ffcfcb5f 19073 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 19074 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
19075 if (NO_PROFILE_COUNTERS)
19076 {
19077 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
19078 reg_names[0], reg_names[1]);
19079 }
19080 else if (TARGET_SECURE_PLT && flag_pic)
19081 {
19082 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
19083 reg_names[0], reg_names[1]);
19084 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
19085 asm_fprintf (file, "\t{cau|addis} %s,%s,",
19086 reg_names[12], reg_names[12]);
19087 assemble_name (file, buf);
19088 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
19089 assemble_name (file, buf);
19090 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
19091 }
19092 else if (flag_pic == 1)
38c1f2d7 19093 {
dfdfa60f 19094 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
19095 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
19096 reg_names[0], reg_names[1]);
17167fd8 19097 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 19098 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 19099 assemble_name (file, buf);
17167fd8 19100 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 19101 }
9ebbca7d 19102 else if (flag_pic > 1)
38c1f2d7 19103 {
71625f3d
AM
19104 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
19105 reg_names[0], reg_names[1]);
9ebbca7d 19106 /* Now, we need to get the address of the label. */
71625f3d 19107 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 19108 assemble_name (file, buf);
9ebbca7d
GK
19109 fputs ("-.\n1:", file);
19110 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 19111 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
19112 reg_names[0], reg_names[11]);
19113 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
19114 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 19115 }
38c1f2d7
MM
19116 else
19117 {
17167fd8 19118 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 19119 assemble_name (file, buf);
dfdfa60f 19120 fputs ("@ha\n", file);
71625f3d
AM
19121 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
19122 reg_names[0], reg_names[1]);
a260abc9 19123 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 19124 assemble_name (file, buf);
17167fd8 19125 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
19126 }
19127
50d440bc 19128 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
19129 fprintf (file, "\tbl %s%s\n",
19130 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
19131 break;
19132
19133 case ABI_AIX:
ee890fe2 19134 case ABI_DARWIN:
ffcfcb5f
AM
19135 if (!TARGET_PROFILE_KERNEL)
19136 {
a3c9585f 19137 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
19138 }
19139 else
19140 {
37409796 19141 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
19142
19143 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
19144 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
19145
6de9cd9a 19146 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
19147 {
19148 asm_fprintf (file, "\tstd %s,24(%s)\n",
19149 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
19150 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
19151 asm_fprintf (file, "\tld %s,24(%s)\n",
19152 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
19153 }
19154 else
19155 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
19156 }
38c1f2d7
MM
19157 break;
19158 }
e165f3f0 19159}
a251ffd0 19160
b54cf83a 19161\f
44cd321e
PS
19162
19163/* The following variable value is the last issued insn. */
19164
19165static rtx last_scheduled_insn;
19166
19167/* The following variable helps to balance issuing of load and
19168 store instructions */
19169
19170static int load_store_pendulum;
19171
b54cf83a
DE
19172/* Power4 load update and store update instructions are cracked into a
19173 load or store and an integer insn which are executed in the same cycle.
19174 Branches have their own dispatch slot which does not count against the
19175 GCC issue rate, but it changes the program flow so there are no other
19176 instructions to issue in this cycle. */
19177
19178static int
f676971a
EC
19179rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
19180 int verbose ATTRIBUTE_UNUSED,
a2369ed3 19181 rtx insn, int more)
b54cf83a 19182{
44cd321e 19183 last_scheduled_insn = insn;
b54cf83a
DE
19184 if (GET_CODE (PATTERN (insn)) == USE
19185 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
19186 {
19187 cached_can_issue_more = more;
19188 return cached_can_issue_more;
19189 }
19190
19191 if (insn_terminates_group_p (insn, current_group))
19192 {
19193 cached_can_issue_more = 0;
19194 return cached_can_issue_more;
19195 }
b54cf83a 19196
d296e02e
AP
19197 /* If no reservation, but reach here */
19198 if (recog_memoized (insn) < 0)
19199 return more;
19200
ec507f2d 19201 if (rs6000_sched_groups)
b54cf83a 19202 {
cbe26ab8 19203 if (is_microcoded_insn (insn))
44cd321e 19204 cached_can_issue_more = 0;
cbe26ab8 19205 else if (is_cracked_insn (insn))
44cd321e
PS
19206 cached_can_issue_more = more > 2 ? more - 2 : 0;
19207 else
19208 cached_can_issue_more = more - 1;
19209
19210 return cached_can_issue_more;
b54cf83a 19211 }
165b263e 19212
d296e02e
AP
19213 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
19214 return 0;
19215
44cd321e
PS
19216 cached_can_issue_more = more - 1;
19217 return cached_can_issue_more;
b54cf83a
DE
19218}
19219
a251ffd0
TG
19220/* Adjust the cost of a scheduling dependency. Return the new cost of
19221 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
19222
c237e94a 19223static int
0a4f0294 19224rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 19225{
44cd321e 19226 enum attr_type attr_type;
a251ffd0 19227
44cd321e 19228 if (! recog_memoized (insn))
a251ffd0
TG
19229 return 0;
19230
44cd321e 19231 switch (REG_NOTE_KIND (link))
a251ffd0 19232 {
44cd321e
PS
19233 case REG_DEP_TRUE:
19234 {
19235 /* Data dependency; DEP_INSN writes a register that INSN reads
19236 some cycles later. */
19237
19238 /* Separate a load from a narrower, dependent store. */
19239 if (rs6000_sched_groups
19240 && GET_CODE (PATTERN (insn)) == SET
19241 && GET_CODE (PATTERN (dep_insn)) == SET
19242 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
19243 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
19244 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
19245 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
19246 return cost + 14;
19247
19248 attr_type = get_attr_type (insn);
19249
19250 switch (attr_type)
19251 {
19252 case TYPE_JMPREG:
19253 /* Tell the first scheduling pass about the latency between
19254 a mtctr and bctr (and mtlr and br/blr). The first
19255 scheduling pass will not know about this latency since
19256 the mtctr instruction, which has the latency associated
19257 to it, will be generated by reload. */
19258 return TARGET_POWER ? 5 : 4;
19259 case TYPE_BRANCH:
19260 /* Leave some extra cycles between a compare and its
19261 dependent branch, to inhibit expensive mispredicts. */
19262 if ((rs6000_cpu_attr == CPU_PPC603
19263 || rs6000_cpu_attr == CPU_PPC604
19264 || rs6000_cpu_attr == CPU_PPC604E
19265 || rs6000_cpu_attr == CPU_PPC620
19266 || rs6000_cpu_attr == CPU_PPC630
19267 || rs6000_cpu_attr == CPU_PPC750
19268 || rs6000_cpu_attr == CPU_PPC7400
19269 || rs6000_cpu_attr == CPU_PPC7450
19270 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
19271 || rs6000_cpu_attr == CPU_POWER5
19272 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
19273 && recog_memoized (dep_insn)
19274 && (INSN_CODE (dep_insn) >= 0))
982afe02 19275
44cd321e
PS
19276 switch (get_attr_type (dep_insn))
19277 {
19278 case TYPE_CMP:
19279 case TYPE_COMPARE:
19280 case TYPE_DELAYED_COMPARE:
19281 case TYPE_IMUL_COMPARE:
19282 case TYPE_LMUL_COMPARE:
19283 case TYPE_FPCOMPARE:
19284 case TYPE_CR_LOGICAL:
19285 case TYPE_DELAYED_CR:
19286 return cost + 2;
19287 default:
19288 break;
19289 }
19290 break;
19291
19292 case TYPE_STORE:
19293 case TYPE_STORE_U:
19294 case TYPE_STORE_UX:
19295 case TYPE_FPSTORE:
19296 case TYPE_FPSTORE_U:
19297 case TYPE_FPSTORE_UX:
19298 if ((rs6000_cpu == PROCESSOR_POWER6)
19299 && recog_memoized (dep_insn)
19300 && (INSN_CODE (dep_insn) >= 0))
19301 {
19302
19303 if (GET_CODE (PATTERN (insn)) != SET)
19304 /* If this happens, we have to extend this to schedule
19305 optimally. Return default for now. */
19306 return cost;
19307
19308 /* Adjust the cost for the case where the value written
19309 by a fixed point operation is used as the address
19310 gen value on a store. */
19311 switch (get_attr_type (dep_insn))
19312 {
19313 case TYPE_LOAD:
19314 case TYPE_LOAD_U:
19315 case TYPE_LOAD_UX:
19316 case TYPE_CNTLZ:
19317 {
19318 if (! store_data_bypass_p (dep_insn, insn))
19319 return 4;
19320 break;
19321 }
19322 case TYPE_LOAD_EXT:
19323 case TYPE_LOAD_EXT_U:
19324 case TYPE_LOAD_EXT_UX:
19325 case TYPE_VAR_SHIFT_ROTATE:
19326 case TYPE_VAR_DELAYED_COMPARE:
19327 {
19328 if (! store_data_bypass_p (dep_insn, insn))
19329 return 6;
19330 break;
19331 }
19332 case TYPE_INTEGER:
19333 case TYPE_COMPARE:
19334 case TYPE_FAST_COMPARE:
19335 case TYPE_EXTS:
19336 case TYPE_SHIFT:
19337 case TYPE_INSERT_WORD:
19338 case TYPE_INSERT_DWORD:
19339 case TYPE_FPLOAD_U:
19340 case TYPE_FPLOAD_UX:
19341 case TYPE_STORE_U:
19342 case TYPE_STORE_UX:
19343 case TYPE_FPSTORE_U:
19344 case TYPE_FPSTORE_UX:
19345 {
19346 if (! store_data_bypass_p (dep_insn, insn))
19347 return 3;
19348 break;
19349 }
19350 case TYPE_IMUL:
19351 case TYPE_IMUL2:
19352 case TYPE_IMUL3:
19353 case TYPE_LMUL:
19354 case TYPE_IMUL_COMPARE:
19355 case TYPE_LMUL_COMPARE:
19356 {
19357 if (! store_data_bypass_p (dep_insn, insn))
19358 return 17;
19359 break;
19360 }
19361 case TYPE_IDIV:
19362 {
19363 if (! store_data_bypass_p (dep_insn, insn))
19364 return 45;
19365 break;
19366 }
19367 case TYPE_LDIV:
19368 {
19369 if (! store_data_bypass_p (dep_insn, insn))
19370 return 57;
19371 break;
19372 }
19373 default:
19374 break;
19375 }
19376 }
19377 break;
19378
19379 case TYPE_LOAD:
19380 case TYPE_LOAD_U:
19381 case TYPE_LOAD_UX:
19382 case TYPE_LOAD_EXT:
19383 case TYPE_LOAD_EXT_U:
19384 case TYPE_LOAD_EXT_UX:
19385 if ((rs6000_cpu == PROCESSOR_POWER6)
19386 && recog_memoized (dep_insn)
19387 && (INSN_CODE (dep_insn) >= 0))
19388 {
19389
19390 /* Adjust the cost for the case where the value written
19391 by a fixed point instruction is used within the address
19392 gen portion of a subsequent load(u)(x) */
19393 switch (get_attr_type (dep_insn))
19394 {
19395 case TYPE_LOAD:
19396 case TYPE_LOAD_U:
19397 case TYPE_LOAD_UX:
19398 case TYPE_CNTLZ:
19399 {
19400 if (set_to_load_agen (dep_insn, insn))
19401 return 4;
19402 break;
19403 }
19404 case TYPE_LOAD_EXT:
19405 case TYPE_LOAD_EXT_U:
19406 case TYPE_LOAD_EXT_UX:
19407 case TYPE_VAR_SHIFT_ROTATE:
19408 case TYPE_VAR_DELAYED_COMPARE:
19409 {
19410 if (set_to_load_agen (dep_insn, insn))
19411 return 6;
19412 break;
19413 }
19414 case TYPE_INTEGER:
19415 case TYPE_COMPARE:
19416 case TYPE_FAST_COMPARE:
19417 case TYPE_EXTS:
19418 case TYPE_SHIFT:
19419 case TYPE_INSERT_WORD:
19420 case TYPE_INSERT_DWORD:
19421 case TYPE_FPLOAD_U:
19422 case TYPE_FPLOAD_UX:
19423 case TYPE_STORE_U:
19424 case TYPE_STORE_UX:
19425 case TYPE_FPSTORE_U:
19426 case TYPE_FPSTORE_UX:
19427 {
19428 if (set_to_load_agen (dep_insn, insn))
19429 return 3;
19430 break;
19431 }
19432 case TYPE_IMUL:
19433 case TYPE_IMUL2:
19434 case TYPE_IMUL3:
19435 case TYPE_LMUL:
19436 case TYPE_IMUL_COMPARE:
19437 case TYPE_LMUL_COMPARE:
19438 {
19439 if (set_to_load_agen (dep_insn, insn))
19440 return 17;
19441 break;
19442 }
19443 case TYPE_IDIV:
19444 {
19445 if (set_to_load_agen (dep_insn, insn))
19446 return 45;
19447 break;
19448 }
19449 case TYPE_LDIV:
19450 {
19451 if (set_to_load_agen (dep_insn, insn))
19452 return 57;
19453 break;
19454 }
19455 default:
19456 break;
19457 }
19458 }
19459 break;
19460
19461 case TYPE_FPLOAD:
19462 if ((rs6000_cpu == PROCESSOR_POWER6)
19463 && recog_memoized (dep_insn)
19464 && (INSN_CODE (dep_insn) >= 0)
19465 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
19466 return 2;
19467
19468 default:
19469 break;
19470 }
c9dbf840 19471
a251ffd0 19472 /* Fall out to return default cost. */
44cd321e
PS
19473 }
19474 break;
19475
19476 case REG_DEP_OUTPUT:
19477 /* Output dependency; DEP_INSN writes a register that INSN writes some
19478 cycles later. */
19479 if ((rs6000_cpu == PROCESSOR_POWER6)
19480 && recog_memoized (dep_insn)
19481 && (INSN_CODE (dep_insn) >= 0))
19482 {
19483 attr_type = get_attr_type (insn);
19484
19485 switch (attr_type)
19486 {
19487 case TYPE_FP:
19488 if (get_attr_type (dep_insn) == TYPE_FP)
19489 return 1;
19490 break;
19491 case TYPE_FPLOAD:
19492 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
19493 return 2;
19494 break;
19495 default:
19496 break;
19497 }
19498 }
19499 case REG_DEP_ANTI:
19500 /* Anti dependency; DEP_INSN reads a register that INSN writes some
19501 cycles later. */
19502 return 0;
19503
19504 default:
19505 gcc_unreachable ();
a251ffd0
TG
19506 }
19507
19508 return cost;
19509}
b6c9286a 19510
cbe26ab8 19511/* The function returns a true if INSN is microcoded.
839a4992 19512 Return false otherwise. */
cbe26ab8
DN
19513
19514static bool
19515is_microcoded_insn (rtx insn)
19516{
19517 if (!insn || !INSN_P (insn)
19518 || GET_CODE (PATTERN (insn)) == USE
19519 || GET_CODE (PATTERN (insn)) == CLOBBER)
19520 return false;
19521
d296e02e
AP
19522 if (rs6000_cpu_attr == CPU_CELL)
19523 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
19524
ec507f2d 19525 if (rs6000_sched_groups)
cbe26ab8
DN
19526 {
19527 enum attr_type type = get_attr_type (insn);
19528 if (type == TYPE_LOAD_EXT_U
19529 || type == TYPE_LOAD_EXT_UX
19530 || type == TYPE_LOAD_UX
19531 || type == TYPE_STORE_UX
19532 || type == TYPE_MFCR)
c4ad648e 19533 return true;
cbe26ab8
DN
19534 }
19535
19536 return false;
19537}
19538
cbe26ab8
DN
19539/* The function returns true if INSN is cracked into 2 instructions
19540 by the processor (and therefore occupies 2 issue slots). */
19541
19542static bool
19543is_cracked_insn (rtx insn)
19544{
19545 if (!insn || !INSN_P (insn)
19546 || GET_CODE (PATTERN (insn)) == USE
19547 || GET_CODE (PATTERN (insn)) == CLOBBER)
19548 return false;
19549
ec507f2d 19550 if (rs6000_sched_groups)
cbe26ab8
DN
19551 {
19552 enum attr_type type = get_attr_type (insn);
19553 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
19554 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
19555 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
19556 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
19557 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
19558 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
19559 || type == TYPE_IDIV || type == TYPE_LDIV
19560 || type == TYPE_INSERT_WORD)
19561 return true;
cbe26ab8
DN
19562 }
19563
19564 return false;
19565}
19566
19567/* The function returns true if INSN can be issued only from
a3c9585f 19568 the branch slot. */
cbe26ab8
DN
19569
19570static bool
19571is_branch_slot_insn (rtx insn)
19572{
19573 if (!insn || !INSN_P (insn)
19574 || GET_CODE (PATTERN (insn)) == USE
19575 || GET_CODE (PATTERN (insn)) == CLOBBER)
19576 return false;
19577
ec507f2d 19578 if (rs6000_sched_groups)
cbe26ab8
DN
19579 {
19580 enum attr_type type = get_attr_type (insn);
19581 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 19582 return true;
cbe26ab8
DN
19583 return false;
19584 }
19585
19586 return false;
19587}
79ae11c4 19588
44cd321e
PS
19589/* The function returns true if out_inst sets a value that is
19590 used in the address generation computation of in_insn */
19591static bool
19592set_to_load_agen (rtx out_insn, rtx in_insn)
19593{
19594 rtx out_set, in_set;
19595
19596 /* For performance reasons, only handle the simple case where
19597 both loads are a single_set. */
19598 out_set = single_set (out_insn);
19599 if (out_set)
19600 {
19601 in_set = single_set (in_insn);
19602 if (in_set)
19603 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
19604 }
19605
19606 return false;
19607}
19608
19609/* The function returns true if the target storage location of
19610 out_insn is adjacent to the target storage location of in_insn */
19611/* Return 1 if memory locations are adjacent. */
19612
19613static bool
19614adjacent_mem_locations (rtx insn1, rtx insn2)
19615{
19616
e3a0e200
PB
19617 rtx a = get_store_dest (PATTERN (insn1));
19618 rtx b = get_store_dest (PATTERN (insn2));
19619
44cd321e
PS
19620 if ((GET_CODE (XEXP (a, 0)) == REG
19621 || (GET_CODE (XEXP (a, 0)) == PLUS
19622 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
19623 && (GET_CODE (XEXP (b, 0)) == REG
19624 || (GET_CODE (XEXP (b, 0)) == PLUS
19625 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
19626 {
f98e8938 19627 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 19628 rtx reg0, reg1;
44cd321e
PS
19629
19630 if (GET_CODE (XEXP (a, 0)) == PLUS)
19631 {
19632 reg0 = XEXP (XEXP (a, 0), 0);
19633 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
19634 }
19635 else
19636 reg0 = XEXP (a, 0);
19637
19638 if (GET_CODE (XEXP (b, 0)) == PLUS)
19639 {
19640 reg1 = XEXP (XEXP (b, 0), 0);
19641 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
19642 }
19643 else
19644 reg1 = XEXP (b, 0);
19645
19646 val_diff = val1 - val0;
19647
19648 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
19649 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
19650 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
19651 }
19652
19653 return false;
19654}
19655
a4f6c312 19656/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
19657 priority INSN_PRIORITY (INSN). Increase the priority to execute the
19658 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
19659 define this macro if you do not need to adjust the scheduling
19660 priorities of insns. */
bef84347 19661
c237e94a 19662static int
a2369ed3 19663rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 19664{
a4f6c312
SS
19665 /* On machines (like the 750) which have asymmetric integer units,
19666 where one integer unit can do multiply and divides and the other
19667 can't, reduce the priority of multiply/divide so it is scheduled
19668 before other integer operations. */
bef84347
VM
19669
19670#if 0
2c3c49de 19671 if (! INSN_P (insn))
bef84347
VM
19672 return priority;
19673
19674 if (GET_CODE (PATTERN (insn)) == USE)
19675 return priority;
19676
19677 switch (rs6000_cpu_attr) {
19678 case CPU_PPC750:
19679 switch (get_attr_type (insn))
19680 {
19681 default:
19682 break;
19683
19684 case TYPE_IMUL:
19685 case TYPE_IDIV:
3cb999d8
DE
19686 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19687 priority, priority);
bef84347
VM
19688 if (priority >= 0 && priority < 0x01000000)
19689 priority >>= 3;
19690 break;
19691 }
19692 }
19693#endif
19694
44cd321e 19695 if (insn_must_be_first_in_group (insn)
79ae11c4 19696 && reload_completed
f676971a 19697 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
19698 && rs6000_sched_restricted_insns_priority)
19699 {
19700
c4ad648e
AM
19701 /* Prioritize insns that can be dispatched only in the first
19702 dispatch slot. */
79ae11c4 19703 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
19704 /* Attach highest priority to insn. This means that in
19705 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 19706 precede 'priority' (critical path) considerations. */
f676971a 19707 return current_sched_info->sched_max_insns_priority;
79ae11c4 19708 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 19709 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
19710 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19711 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
19712 return (priority + 1);
19713 }
79ae11c4 19714
44cd321e
PS
19715 if (rs6000_cpu == PROCESSOR_POWER6
19716 && ((load_store_pendulum == -2 && is_load_insn (insn))
19717 || (load_store_pendulum == 2 && is_store_insn (insn))))
19718 /* Attach highest priority to insn if the scheduler has just issued two
19719 stores and this instruction is a load, or two loads and this instruction
19720 is a store. Power6 wants loads and stores scheduled alternately
19721 when possible */
19722 return current_sched_info->sched_max_insns_priority;
19723
bef84347
VM
19724 return priority;
19725}
19726
d296e02e
AP
19727/* Return true if the instruction is nonpipelined on the Cell. */
19728static bool
19729is_nonpipeline_insn (rtx insn)
19730{
19731 enum attr_type type;
19732 if (!insn || !INSN_P (insn)
19733 || GET_CODE (PATTERN (insn)) == USE
19734 || GET_CODE (PATTERN (insn)) == CLOBBER)
19735 return false;
19736
19737 type = get_attr_type (insn);
19738 if (type == TYPE_IMUL
19739 || type == TYPE_IMUL2
19740 || type == TYPE_IMUL3
19741 || type == TYPE_LMUL
19742 || type == TYPE_IDIV
19743 || type == TYPE_LDIV
19744 || type == TYPE_SDIV
19745 || type == TYPE_DDIV
19746 || type == TYPE_SSQRT
19747 || type == TYPE_DSQRT
19748 || type == TYPE_MFCR
19749 || type == TYPE_MFCRF
19750 || type == TYPE_MFJMPR)
19751 {
19752 return true;
19753 }
19754 return false;
19755}
19756
19757
a4f6c312
SS
19758/* Return how many instructions the machine can issue per cycle. */
19759
c237e94a 19760static int
863d938c 19761rs6000_issue_rate (void)
b6c9286a 19762{
3317bab1
DE
19763 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19764 if (!reload_completed)
19765 return 1;
19766
b6c9286a 19767 switch (rs6000_cpu_attr) {
3cb999d8
DE
19768 case CPU_RIOS1: /* ? */
19769 case CPU_RS64A:
19770 case CPU_PPC601: /* ? */
ed947a96 19771 case CPU_PPC7450:
3cb999d8 19772 return 3;
b54cf83a 19773 case CPU_PPC440:
b6c9286a 19774 case CPU_PPC603:
bef84347 19775 case CPU_PPC750:
ed947a96 19776 case CPU_PPC7400:
be12c2b0 19777 case CPU_PPC8540:
d296e02e 19778 case CPU_CELL:
fa41c305
EW
19779 case CPU_PPCE300C2:
19780 case CPU_PPCE300C3:
edae5fe3 19781 case CPU_PPCE500MC:
f676971a 19782 return 2;
3cb999d8 19783 case CPU_RIOS2:
b6c9286a 19784 case CPU_PPC604:
19684119 19785 case CPU_PPC604E:
b6c9286a 19786 case CPU_PPC620:
3cb999d8 19787 case CPU_PPC630:
b6c9286a 19788 return 4;
cbe26ab8 19789 case CPU_POWER4:
ec507f2d 19790 case CPU_POWER5:
44cd321e 19791 case CPU_POWER6:
cbe26ab8 19792 return 5;
b6c9286a
MM
19793 default:
19794 return 1;
19795 }
19796}
19797
be12c2b0
VM
19798/* Return how many instructions to look ahead for better insn
19799 scheduling. */
19800
19801static int
863d938c 19802rs6000_use_sched_lookahead (void)
be12c2b0
VM
19803{
19804 if (rs6000_cpu_attr == CPU_PPC8540)
19805 return 4;
d296e02e
AP
19806 if (rs6000_cpu_attr == CPU_CELL)
19807 return (reload_completed ? 8 : 0);
be12c2b0
VM
19808 return 0;
19809}
19810
d296e02e
AP
19811/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19812static int
19813rs6000_use_sched_lookahead_guard (rtx insn)
19814{
19815 if (rs6000_cpu_attr != CPU_CELL)
19816 return 1;
19817
19818 if (insn == NULL_RTX || !INSN_P (insn))
19819 abort ();
982afe02 19820
d296e02e
AP
19821 if (!reload_completed
19822 || is_nonpipeline_insn (insn)
19823 || is_microcoded_insn (insn))
19824 return 0;
19825
19826 return 1;
19827}
19828
569fa502
DN
19829/* Determine is PAT refers to memory. */
19830
19831static bool
19832is_mem_ref (rtx pat)
19833{
19834 const char * fmt;
19835 int i, j;
19836 bool ret = false;
19837
1de59bbd
DE
19838 /* stack_tie does not produce any real memory traffic. */
19839 if (GET_CODE (pat) == UNSPEC
19840 && XINT (pat, 1) == UNSPEC_TIE)
19841 return false;
19842
569fa502
DN
19843 if (GET_CODE (pat) == MEM)
19844 return true;
19845
19846 /* Recursively process the pattern. */
19847 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19848
19849 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19850 {
19851 if (fmt[i] == 'e')
19852 ret |= is_mem_ref (XEXP (pat, i));
19853 else if (fmt[i] == 'E')
19854 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19855 ret |= is_mem_ref (XVECEXP (pat, i, j));
19856 }
19857
19858 return ret;
19859}
19860
19861/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19862
569fa502
DN
19863static bool
19864is_load_insn1 (rtx pat)
19865{
19866 if (!pat || pat == NULL_RTX)
19867 return false;
19868
19869 if (GET_CODE (pat) == SET)
19870 return is_mem_ref (SET_SRC (pat));
19871
19872 if (GET_CODE (pat) == PARALLEL)
19873 {
19874 int i;
19875
19876 for (i = 0; i < XVECLEN (pat, 0); i++)
19877 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19878 return true;
19879 }
19880
19881 return false;
19882}
19883
19884/* Determine if INSN loads from memory. */
19885
19886static bool
19887is_load_insn (rtx insn)
19888{
19889 if (!insn || !INSN_P (insn))
19890 return false;
19891
19892 if (GET_CODE (insn) == CALL_INSN)
19893 return false;
19894
19895 return is_load_insn1 (PATTERN (insn));
19896}
19897
19898/* Determine if PAT is a PATTERN of a store insn. */
19899
19900static bool
19901is_store_insn1 (rtx pat)
19902{
19903 if (!pat || pat == NULL_RTX)
19904 return false;
19905
19906 if (GET_CODE (pat) == SET)
19907 return is_mem_ref (SET_DEST (pat));
19908
19909 if (GET_CODE (pat) == PARALLEL)
19910 {
19911 int i;
19912
19913 for (i = 0; i < XVECLEN (pat, 0); i++)
19914 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19915 return true;
19916 }
19917
19918 return false;
19919}
19920
19921/* Determine if INSN stores to memory. */
19922
19923static bool
19924is_store_insn (rtx insn)
19925{
19926 if (!insn || !INSN_P (insn))
19927 return false;
19928
19929 return is_store_insn1 (PATTERN (insn));
19930}
19931
e3a0e200
PB
19932/* Return the dest of a store insn. */
19933
19934static rtx
19935get_store_dest (rtx pat)
19936{
19937 gcc_assert (is_store_insn1 (pat));
19938
19939 if (GET_CODE (pat) == SET)
19940 return SET_DEST (pat);
19941 else if (GET_CODE (pat) == PARALLEL)
19942 {
19943 int i;
19944
19945 for (i = 0; i < XVECLEN (pat, 0); i++)
19946 {
19947 rtx inner_pat = XVECEXP (pat, 0, i);
19948 if (GET_CODE (inner_pat) == SET
19949 && is_mem_ref (SET_DEST (inner_pat)))
19950 return inner_pat;
19951 }
19952 }
19953 /* We shouldn't get here, because we should have either a simple
19954 store insn or a store with update which are covered above. */
19955 gcc_unreachable();
19956}
19957
569fa502
DN
19958/* Returns whether the dependence between INSN and NEXT is considered
19959 costly by the given target. */
19960
19961static bool
b198261f 19962rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19963{
b198261f
MK
19964 rtx insn;
19965 rtx next;
19966
aabcd309 19967 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19968 allow all dependent insns in the same group.
569fa502
DN
19969 This is the most aggressive option. */
19970 if (rs6000_sched_costly_dep == no_dep_costly)
19971 return false;
19972
f676971a 19973 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19974 do not allow dependent instructions in the same group.
19975 This is the most conservative option. */
19976 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19977 return true;
569fa502 19978
b198261f
MK
19979 insn = DEP_PRO (dep);
19980 next = DEP_CON (dep);
19981
f676971a
EC
19982 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19983 && is_load_insn (next)
569fa502
DN
19984 && is_store_insn (insn))
19985 /* Prevent load after store in the same group. */
19986 return true;
19987
19988 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19989 && is_load_insn (next)
569fa502 19990 && is_store_insn (insn)
e2f6ff94 19991 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19992 /* Prevent load after store in the same group if it is a true
19993 dependence. */
569fa502 19994 return true;
f676971a
EC
19995
19996 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19997 and will not be scheduled in the same group. */
19998 if (rs6000_sched_costly_dep <= max_dep_latency
19999 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
20000 return true;
20001
20002 return false;
20003}
20004
f676971a 20005/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
20006 skipping any "non-active" insns - insns that will not actually occupy
20007 an issue slot. Return NULL_RTX if such an insn is not found. */
20008
20009static rtx
20010get_next_active_insn (rtx insn, rtx tail)
20011{
f489aff8 20012 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
20013 return NULL_RTX;
20014
f489aff8 20015 while (1)
cbe26ab8 20016 {
f489aff8
AM
20017 insn = NEXT_INSN (insn);
20018 if (insn == NULL_RTX || insn == tail)
20019 return NULL_RTX;
cbe26ab8 20020
f489aff8
AM
20021 if (CALL_P (insn)
20022 || JUMP_P (insn)
20023 || (NONJUMP_INSN_P (insn)
20024 && GET_CODE (PATTERN (insn)) != USE
20025 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 20026 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
20027 break;
20028 }
20029 return insn;
cbe26ab8
DN
20030}
20031
44cd321e
PS
20032/* We are about to begin issuing insns for this clock cycle. */
20033
20034static int
20035rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
20036 rtx *ready ATTRIBUTE_UNUSED,
20037 int *pn_ready ATTRIBUTE_UNUSED,
20038 int clock_var ATTRIBUTE_UNUSED)
20039{
d296e02e
AP
20040 int n_ready = *pn_ready;
20041
44cd321e
PS
20042 if (sched_verbose)
20043 fprintf (dump, "// rs6000_sched_reorder :\n");
20044
d296e02e
AP
20045 /* Reorder the ready list, if the second to last ready insn
20046 is a nonepipeline insn. */
20047 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
20048 {
20049 if (is_nonpipeline_insn (ready[n_ready - 1])
20050 && (recog_memoized (ready[n_ready - 2]) > 0))
20051 /* Simply swap first two insns. */
20052 {
20053 rtx tmp = ready[n_ready - 1];
20054 ready[n_ready - 1] = ready[n_ready - 2];
20055 ready[n_ready - 2] = tmp;
20056 }
20057 }
20058
44cd321e
PS
20059 if (rs6000_cpu == PROCESSOR_POWER6)
20060 load_store_pendulum = 0;
20061
20062 return rs6000_issue_rate ();
20063}
20064
20065/* Like rs6000_sched_reorder, but called after issuing each insn. */
20066
20067static int
20068rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
20069 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
20070{
20071 if (sched_verbose)
20072 fprintf (dump, "// rs6000_sched_reorder2 :\n");
20073
20074 /* For Power6, we need to handle some special cases to try and keep the
20075 store queue from overflowing and triggering expensive flushes.
20076
20077 This code monitors how load and store instructions are being issued
20078 and skews the ready list one way or the other to increase the likelihood
20079 that a desired instruction is issued at the proper time.
20080
20081 A couple of things are done. First, we maintain a "load_store_pendulum"
20082 to track the current state of load/store issue.
20083
20084 - If the pendulum is at zero, then no loads or stores have been
20085 issued in the current cycle so we do nothing.
20086
20087 - If the pendulum is 1, then a single load has been issued in this
20088 cycle and we attempt to locate another load in the ready list to
20089 issue with it.
20090
2f8e468b 20091 - If the pendulum is -2, then two stores have already been
44cd321e
PS
20092 issued in this cycle, so we increase the priority of the first load
20093 in the ready list to increase it's likelihood of being chosen first
20094 in the next cycle.
20095
20096 - If the pendulum is -1, then a single store has been issued in this
20097 cycle and we attempt to locate another store in the ready list to
20098 issue with it, preferring a store to an adjacent memory location to
20099 facilitate store pairing in the store queue.
20100
20101 - If the pendulum is 2, then two loads have already been
20102 issued in this cycle, so we increase the priority of the first store
20103 in the ready list to increase it's likelihood of being chosen first
20104 in the next cycle.
20105
20106 - If the pendulum < -2 or > 2, then do nothing.
20107
20108 Note: This code covers the most common scenarios. There exist non
20109 load/store instructions which make use of the LSU and which
20110 would need to be accounted for to strictly model the behavior
20111 of the machine. Those instructions are currently unaccounted
20112 for to help minimize compile time overhead of this code.
20113 */
20114 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
20115 {
20116 int pos;
20117 int i;
20118 rtx tmp;
20119
20120 if (is_store_insn (last_scheduled_insn))
20121 /* Issuing a store, swing the load_store_pendulum to the left */
20122 load_store_pendulum--;
20123 else if (is_load_insn (last_scheduled_insn))
20124 /* Issuing a load, swing the load_store_pendulum to the right */
20125 load_store_pendulum++;
20126 else
20127 return cached_can_issue_more;
20128
20129 /* If the pendulum is balanced, or there is only one instruction on
20130 the ready list, then all is well, so return. */
20131 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
20132 return cached_can_issue_more;
20133
20134 if (load_store_pendulum == 1)
20135 {
20136 /* A load has been issued in this cycle. Scan the ready list
20137 for another load to issue with it */
20138 pos = *pn_ready-1;
20139
20140 while (pos >= 0)
20141 {
20142 if (is_load_insn (ready[pos]))
20143 {
20144 /* Found a load. Move it to the head of the ready list,
20145 and adjust it's priority so that it is more likely to
20146 stay there */
20147 tmp = ready[pos];
20148 for (i=pos; i<*pn_ready-1; i++)
20149 ready[i] = ready[i + 1];
20150 ready[*pn_ready-1] = tmp;
e855c69d
AB
20151
20152 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
20153 INSN_PRIORITY (tmp)++;
20154 break;
20155 }
20156 pos--;
20157 }
20158 }
20159 else if (load_store_pendulum == -2)
20160 {
20161 /* Two stores have been issued in this cycle. Increase the
20162 priority of the first load in the ready list to favor it for
20163 issuing in the next cycle. */
20164 pos = *pn_ready-1;
20165
20166 while (pos >= 0)
20167 {
20168 if (is_load_insn (ready[pos])
e855c69d
AB
20169 && !sel_sched_p ()
20170 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
20171 {
20172 INSN_PRIORITY (ready[pos])++;
20173
20174 /* Adjust the pendulum to account for the fact that a load
20175 was found and increased in priority. This is to prevent
20176 increasing the priority of multiple loads */
20177 load_store_pendulum--;
20178
20179 break;
20180 }
20181 pos--;
20182 }
20183 }
20184 else if (load_store_pendulum == -1)
20185 {
20186 /* A store has been issued in this cycle. Scan the ready list for
20187 another store to issue with it, preferring a store to an adjacent
20188 memory location */
20189 int first_store_pos = -1;
20190
20191 pos = *pn_ready-1;
20192
20193 while (pos >= 0)
20194 {
20195 if (is_store_insn (ready[pos]))
20196 {
20197 /* Maintain the index of the first store found on the
20198 list */
20199 if (first_store_pos == -1)
20200 first_store_pos = pos;
20201
20202 if (is_store_insn (last_scheduled_insn)
20203 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
20204 {
20205 /* Found an adjacent store. Move it to the head of the
20206 ready list, and adjust it's priority so that it is
20207 more likely to stay there */
20208 tmp = ready[pos];
20209 for (i=pos; i<*pn_ready-1; i++)
20210 ready[i] = ready[i + 1];
20211 ready[*pn_ready-1] = tmp;
e855c69d
AB
20212
20213 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e 20214 INSN_PRIORITY (tmp)++;
e855c69d 20215
44cd321e
PS
20216 first_store_pos = -1;
20217
20218 break;
20219 };
20220 }
20221 pos--;
20222 }
20223
20224 if (first_store_pos >= 0)
20225 {
20226 /* An adjacent store wasn't found, but a non-adjacent store was,
20227 so move the non-adjacent store to the front of the ready
20228 list, and adjust its priority so that it is more likely to
20229 stay there. */
20230 tmp = ready[first_store_pos];
20231 for (i=first_store_pos; i<*pn_ready-1; i++)
20232 ready[i] = ready[i + 1];
20233 ready[*pn_ready-1] = tmp;
e855c69d 20234 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
20235 INSN_PRIORITY (tmp)++;
20236 }
20237 }
20238 else if (load_store_pendulum == 2)
20239 {
20240 /* Two loads have been issued in this cycle. Increase the priority
20241 of the first store in the ready list to favor it for issuing in
20242 the next cycle. */
20243 pos = *pn_ready-1;
20244
20245 while (pos >= 0)
20246 {
20247 if (is_store_insn (ready[pos])
e855c69d
AB
20248 && !sel_sched_p ()
20249 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
20250 {
20251 INSN_PRIORITY (ready[pos])++;
20252
20253 /* Adjust the pendulum to account for the fact that a store
20254 was found and increased in priority. This is to prevent
20255 increasing the priority of multiple stores */
20256 load_store_pendulum++;
20257
20258 break;
20259 }
20260 pos--;
20261 }
20262 }
20263 }
20264
20265 return cached_can_issue_more;
20266}
20267
839a4992 20268/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
20269 of group WHICH_GROUP.
20270
20271 If WHICH_GROUP == current_group, this function will return true if INSN
20272 causes the termination of the current group (i.e, the dispatch group to
20273 which INSN belongs). This means that INSN will be the last insn in the
20274 group it belongs to.
20275
20276 If WHICH_GROUP == previous_group, this function will return true if INSN
20277 causes the termination of the previous group (i.e, the dispatch group that
20278 precedes the group to which INSN belongs). This means that INSN will be
20279 the first insn in the group it belongs to). */
20280
20281static bool
20282insn_terminates_group_p (rtx insn, enum group_termination which_group)
20283{
44cd321e 20284 bool first, last;
cbe26ab8
DN
20285
20286 if (! insn)
20287 return false;
569fa502 20288
44cd321e
PS
20289 first = insn_must_be_first_in_group (insn);
20290 last = insn_must_be_last_in_group (insn);
cbe26ab8 20291
44cd321e 20292 if (first && last)
cbe26ab8
DN
20293 return true;
20294
20295 if (which_group == current_group)
44cd321e 20296 return last;
cbe26ab8 20297 else if (which_group == previous_group)
44cd321e
PS
20298 return first;
20299
20300 return false;
20301}
20302
20303
20304static bool
20305insn_must_be_first_in_group (rtx insn)
20306{
20307 enum attr_type type;
20308
20309 if (!insn
20310 || insn == NULL_RTX
20311 || GET_CODE (insn) == NOTE
20312 || GET_CODE (PATTERN (insn)) == USE
20313 || GET_CODE (PATTERN (insn)) == CLOBBER)
20314 return false;
20315
20316 switch (rs6000_cpu)
cbe26ab8 20317 {
44cd321e
PS
20318 case PROCESSOR_POWER5:
20319 if (is_cracked_insn (insn))
20320 return true;
20321 case PROCESSOR_POWER4:
20322 if (is_microcoded_insn (insn))
20323 return true;
20324
20325 if (!rs6000_sched_groups)
20326 return false;
20327
20328 type = get_attr_type (insn);
20329
20330 switch (type)
20331 {
20332 case TYPE_MFCR:
20333 case TYPE_MFCRF:
20334 case TYPE_MTCR:
20335 case TYPE_DELAYED_CR:
20336 case TYPE_CR_LOGICAL:
20337 case TYPE_MTJMPR:
20338 case TYPE_MFJMPR:
20339 case TYPE_IDIV:
20340 case TYPE_LDIV:
20341 case TYPE_LOAD_L:
20342 case TYPE_STORE_C:
20343 case TYPE_ISYNC:
20344 case TYPE_SYNC:
20345 return true;
20346 default:
20347 break;
20348 }
20349 break;
20350 case PROCESSOR_POWER6:
20351 type = get_attr_type (insn);
20352
20353 switch (type)
20354 {
20355 case TYPE_INSERT_DWORD:
20356 case TYPE_EXTS:
20357 case TYPE_CNTLZ:
20358 case TYPE_SHIFT:
20359 case TYPE_VAR_SHIFT_ROTATE:
20360 case TYPE_TRAP:
20361 case TYPE_IMUL:
20362 case TYPE_IMUL2:
20363 case TYPE_IMUL3:
20364 case TYPE_LMUL:
20365 case TYPE_IDIV:
20366 case TYPE_INSERT_WORD:
20367 case TYPE_DELAYED_COMPARE:
20368 case TYPE_IMUL_COMPARE:
20369 case TYPE_LMUL_COMPARE:
20370 case TYPE_FPCOMPARE:
20371 case TYPE_MFCR:
20372 case TYPE_MTCR:
20373 case TYPE_MFJMPR:
20374 case TYPE_MTJMPR:
20375 case TYPE_ISYNC:
20376 case TYPE_SYNC:
20377 case TYPE_LOAD_L:
20378 case TYPE_STORE_C:
20379 case TYPE_LOAD_U:
20380 case TYPE_LOAD_UX:
20381 case TYPE_LOAD_EXT_UX:
20382 case TYPE_STORE_U:
20383 case TYPE_STORE_UX:
20384 case TYPE_FPLOAD_U:
20385 case TYPE_FPLOAD_UX:
20386 case TYPE_FPSTORE_U:
20387 case TYPE_FPSTORE_UX:
20388 return true;
20389 default:
20390 break;
20391 }
20392 break;
20393 default:
20394 break;
20395 }
20396
20397 return false;
20398}
20399
20400static bool
20401insn_must_be_last_in_group (rtx insn)
20402{
20403 enum attr_type type;
20404
20405 if (!insn
20406 || insn == NULL_RTX
20407 || GET_CODE (insn) == NOTE
20408 || GET_CODE (PATTERN (insn)) == USE
20409 || GET_CODE (PATTERN (insn)) == CLOBBER)
20410 return false;
20411
20412 switch (rs6000_cpu) {
20413 case PROCESSOR_POWER4:
20414 case PROCESSOR_POWER5:
20415 if (is_microcoded_insn (insn))
20416 return true;
20417
20418 if (is_branch_slot_insn (insn))
20419 return true;
20420
20421 break;
20422 case PROCESSOR_POWER6:
20423 type = get_attr_type (insn);
20424
20425 switch (type)
20426 {
20427 case TYPE_EXTS:
20428 case TYPE_CNTLZ:
20429 case TYPE_SHIFT:
20430 case TYPE_VAR_SHIFT_ROTATE:
20431 case TYPE_TRAP:
20432 case TYPE_IMUL:
20433 case TYPE_IMUL2:
20434 case TYPE_IMUL3:
20435 case TYPE_LMUL:
20436 case TYPE_IDIV:
20437 case TYPE_DELAYED_COMPARE:
20438 case TYPE_IMUL_COMPARE:
20439 case TYPE_LMUL_COMPARE:
20440 case TYPE_FPCOMPARE:
20441 case TYPE_MFCR:
20442 case TYPE_MTCR:
20443 case TYPE_MFJMPR:
20444 case TYPE_MTJMPR:
20445 case TYPE_ISYNC:
20446 case TYPE_SYNC:
20447 case TYPE_LOAD_L:
20448 case TYPE_STORE_C:
20449 return true;
20450 default:
20451 break;
cbe26ab8 20452 }
44cd321e
PS
20453 break;
20454 default:
20455 break;
20456 }
cbe26ab8
DN
20457
20458 return false;
20459}
20460
839a4992 20461/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
20462 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
20463
20464static bool
20465is_costly_group (rtx *group_insns, rtx next_insn)
20466{
20467 int i;
cbe26ab8
DN
20468 int issue_rate = rs6000_issue_rate ();
20469
20470 for (i = 0; i < issue_rate; i++)
20471 {
e2f6ff94
MK
20472 sd_iterator_def sd_it;
20473 dep_t dep;
cbe26ab8 20474 rtx insn = group_insns[i];
b198261f 20475
cbe26ab8 20476 if (!insn)
c4ad648e 20477 continue;
b198261f 20478
e2f6ff94 20479 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 20480 {
b198261f
MK
20481 rtx next = DEP_CON (dep);
20482
20483 if (next == next_insn
20484 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
20485 return true;
c4ad648e 20486 }
cbe26ab8
DN
20487 }
20488
20489 return false;
20490}
20491
f676971a 20492/* Utility of the function redefine_groups.
cbe26ab8
DN
20493 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
20494 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
20495 to keep it "far" (in a separate group) from GROUP_INSNS, following
20496 one of the following schemes, depending on the value of the flag
20497 -minsert_sched_nops = X:
20498 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 20499 in order to force NEXT_INSN into a separate group.
f676971a
EC
20500 (2) X < sched_finish_regroup_exact: insert exactly X nops.
20501 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
20502 insertion (has a group just ended, how many vacant issue slots remain in the
20503 last group, and how many dispatch groups were encountered so far). */
20504
f676971a 20505static int
c4ad648e
AM
20506force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
20507 rtx next_insn, bool *group_end, int can_issue_more,
20508 int *group_count)
cbe26ab8
DN
20509{
20510 rtx nop;
20511 bool force;
20512 int issue_rate = rs6000_issue_rate ();
20513 bool end = *group_end;
20514 int i;
20515
20516 if (next_insn == NULL_RTX)
20517 return can_issue_more;
20518
20519 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
20520 return can_issue_more;
20521
20522 force = is_costly_group (group_insns, next_insn);
20523 if (!force)
20524 return can_issue_more;
20525
20526 if (sched_verbose > 6)
20527 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 20528 *group_count ,can_issue_more);
cbe26ab8
DN
20529
20530 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
20531 {
20532 if (*group_end)
c4ad648e 20533 can_issue_more = 0;
cbe26ab8
DN
20534
20535 /* Since only a branch can be issued in the last issue_slot, it is
20536 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
20537 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
20538 in this case the last nop will start a new group and the branch
20539 will be forced to the new group. */
cbe26ab8 20540 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 20541 can_issue_more--;
cbe26ab8
DN
20542
20543 while (can_issue_more > 0)
c4ad648e 20544 {
9390387d 20545 nop = gen_nop ();
c4ad648e
AM
20546 emit_insn_before (nop, next_insn);
20547 can_issue_more--;
20548 }
cbe26ab8
DN
20549
20550 *group_end = true;
20551 return 0;
f676971a 20552 }
cbe26ab8
DN
20553
20554 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
20555 {
20556 int n_nops = rs6000_sched_insert_nops;
20557
f676971a 20558 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 20559 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 20560 if (can_issue_more == 0)
c4ad648e 20561 can_issue_more = issue_rate;
cbe26ab8
DN
20562 can_issue_more--;
20563 if (can_issue_more == 0)
c4ad648e
AM
20564 {
20565 can_issue_more = issue_rate - 1;
20566 (*group_count)++;
20567 end = true;
20568 for (i = 0; i < issue_rate; i++)
20569 {
20570 group_insns[i] = 0;
20571 }
20572 }
cbe26ab8
DN
20573
20574 while (n_nops > 0)
c4ad648e
AM
20575 {
20576 nop = gen_nop ();
20577 emit_insn_before (nop, next_insn);
20578 if (can_issue_more == issue_rate - 1) /* new group begins */
20579 end = false;
20580 can_issue_more--;
20581 if (can_issue_more == 0)
20582 {
20583 can_issue_more = issue_rate - 1;
20584 (*group_count)++;
20585 end = true;
20586 for (i = 0; i < issue_rate; i++)
20587 {
20588 group_insns[i] = 0;
20589 }
20590 }
20591 n_nops--;
20592 }
cbe26ab8
DN
20593
20594 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 20595 can_issue_more++;
cbe26ab8 20596
c4ad648e
AM
20597 /* Is next_insn going to start a new group? */
20598 *group_end
20599 = (end
cbe26ab8
DN
20600 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20601 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20602 || (can_issue_more < issue_rate &&
c4ad648e 20603 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20604 if (*group_end && end)
c4ad648e 20605 (*group_count)--;
cbe26ab8
DN
20606
20607 if (sched_verbose > 6)
c4ad648e
AM
20608 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
20609 *group_count, can_issue_more);
f676971a
EC
20610 return can_issue_more;
20611 }
cbe26ab8
DN
20612
20613 return can_issue_more;
20614}
20615
20616/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 20617 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
20618 form in practice. It tries to achieve this synchronization by forcing the
20619 estimated processor grouping on the compiler (as opposed to the function
20620 'pad_goups' which tries to force the scheduler's grouping on the processor).
20621
20622 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
20623 examines the (estimated) dispatch groups that will be formed by the processor
20624 dispatcher. It marks these group boundaries to reflect the estimated
20625 processor grouping, overriding the grouping that the scheduler had marked.
20626 Depending on the value of the flag '-minsert-sched-nops' this function can
20627 force certain insns into separate groups or force a certain distance between
20628 them by inserting nops, for example, if there exists a "costly dependence"
20629 between the insns.
20630
20631 The function estimates the group boundaries that the processor will form as
0fa2e4df 20632 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
20633 each insn. A subsequent insn will start a new group if one of the following
20634 4 cases applies:
20635 - no more vacant issue slots remain in the current dispatch group.
20636 - only the last issue slot, which is the branch slot, is vacant, but the next
20637 insn is not a branch.
20638 - only the last 2 or less issue slots, including the branch slot, are vacant,
20639 which means that a cracked insn (which occupies two issue slots) can't be
20640 issued in this group.
f676971a 20641 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
20642 start a new group. */
20643
20644static int
20645redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20646{
20647 rtx insn, next_insn;
20648 int issue_rate;
20649 int can_issue_more;
20650 int slot, i;
20651 bool group_end;
20652 int group_count = 0;
20653 rtx *group_insns;
20654
20655 /* Initialize. */
20656 issue_rate = rs6000_issue_rate ();
5ead67f6 20657 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 20658 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
20659 {
20660 group_insns[i] = 0;
20661 }
20662 can_issue_more = issue_rate;
20663 slot = 0;
20664 insn = get_next_active_insn (prev_head_insn, tail);
20665 group_end = false;
20666
20667 while (insn != NULL_RTX)
20668 {
20669 slot = (issue_rate - can_issue_more);
20670 group_insns[slot] = insn;
20671 can_issue_more =
c4ad648e 20672 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 20673 if (insn_terminates_group_p (insn, current_group))
c4ad648e 20674 can_issue_more = 0;
cbe26ab8
DN
20675
20676 next_insn = get_next_active_insn (insn, tail);
20677 if (next_insn == NULL_RTX)
c4ad648e 20678 return group_count + 1;
cbe26ab8 20679
c4ad648e
AM
20680 /* Is next_insn going to start a new group? */
20681 group_end
20682 = (can_issue_more == 0
20683 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20684 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20685 || (can_issue_more < issue_rate &&
20686 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20687
f676971a 20688 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
20689 next_insn, &group_end, can_issue_more,
20690 &group_count);
cbe26ab8
DN
20691
20692 if (group_end)
c4ad648e
AM
20693 {
20694 group_count++;
20695 can_issue_more = 0;
20696 for (i = 0; i < issue_rate; i++)
20697 {
20698 group_insns[i] = 0;
20699 }
20700 }
cbe26ab8
DN
20701
20702 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 20703 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 20704 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 20705 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
20706
20707 insn = next_insn;
20708 if (can_issue_more == 0)
c4ad648e
AM
20709 can_issue_more = issue_rate;
20710 } /* while */
cbe26ab8
DN
20711
20712 return group_count;
20713}
20714
20715/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20716 dispatch group boundaries that the scheduler had marked. Pad with nops
20717 any dispatch groups which have vacant issue slots, in order to force the
20718 scheduler's grouping on the processor dispatcher. The function
20719 returns the number of dispatch groups found. */
20720
20721static int
20722pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20723{
20724 rtx insn, next_insn;
20725 rtx nop;
20726 int issue_rate;
20727 int can_issue_more;
20728 int group_end;
20729 int group_count = 0;
20730
20731 /* Initialize issue_rate. */
20732 issue_rate = rs6000_issue_rate ();
20733 can_issue_more = issue_rate;
20734
20735 insn = get_next_active_insn (prev_head_insn, tail);
20736 next_insn = get_next_active_insn (insn, tail);
20737
20738 while (insn != NULL_RTX)
20739 {
20740 can_issue_more =
20741 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20742
20743 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20744
20745 if (next_insn == NULL_RTX)
c4ad648e 20746 break;
cbe26ab8
DN
20747
20748 if (group_end)
c4ad648e
AM
20749 {
20750 /* If the scheduler had marked group termination at this location
e855c69d 20751 (between insn and next_insn), and neither insn nor next_insn will
c4ad648e
AM
20752 force group termination, pad the group with nops to force group
20753 termination. */
20754 if (can_issue_more
20755 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20756 && !insn_terminates_group_p (insn, current_group)
20757 && !insn_terminates_group_p (next_insn, previous_group))
20758 {
9390387d 20759 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20760 can_issue_more--;
20761
20762 while (can_issue_more)
20763 {
20764 nop = gen_nop ();
20765 emit_insn_before (nop, next_insn);
20766 can_issue_more--;
20767 }
20768 }
20769
20770 can_issue_more = issue_rate;
20771 group_count++;
20772 }
cbe26ab8
DN
20773
20774 insn = next_insn;
20775 next_insn = get_next_active_insn (insn, tail);
20776 }
20777
20778 return group_count;
20779}
20780
44cd321e
PS
20781/* We're beginning a new block. Initialize data structures as necessary. */
20782
20783static void
20784rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20785 int sched_verbose ATTRIBUTE_UNUSED,
20786 int max_ready ATTRIBUTE_UNUSED)
982afe02 20787{
44cd321e
PS
20788 last_scheduled_insn = NULL_RTX;
20789 load_store_pendulum = 0;
20790}
20791
cbe26ab8
DN
20792/* The following function is called at the end of scheduling BB.
20793 After reload, it inserts nops at insn group bundling. */
20794
20795static void
38f391a5 20796rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20797{
20798 int n_groups;
20799
20800 if (sched_verbose)
20801 fprintf (dump, "=== Finishing schedule.\n");
20802
ec507f2d 20803 if (reload_completed && rs6000_sched_groups)
cbe26ab8 20804 {
e855c69d
AB
20805 /* Do not run sched_finish hook when selective scheduling enabled. */
20806 if (sel_sched_p ())
20807 return;
20808
cbe26ab8 20809 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20810 return;
cbe26ab8
DN
20811
20812 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20813 n_groups = pad_groups (dump, sched_verbose,
20814 current_sched_info->prev_head,
20815 current_sched_info->next_tail);
cbe26ab8 20816 else
c4ad648e
AM
20817 n_groups = redefine_groups (dump, sched_verbose,
20818 current_sched_info->prev_head,
20819 current_sched_info->next_tail);
cbe26ab8
DN
20820
20821 if (sched_verbose >= 6)
20822 {
20823 fprintf (dump, "ngroups = %d\n", n_groups);
20824 print_rtl (dump, current_sched_info->prev_head);
20825 fprintf (dump, "Done finish_sched\n");
20826 }
20827 }
20828}
e855c69d
AB
20829
20830struct _rs6000_sched_context
20831{
20832 short cached_can_issue_more;
20833 rtx last_scheduled_insn;
20834 int load_store_pendulum;
20835};
20836
20837typedef struct _rs6000_sched_context rs6000_sched_context_def;
20838typedef rs6000_sched_context_def *rs6000_sched_context_t;
20839
20840/* Allocate store for new scheduling context. */
20841static void *
20842rs6000_alloc_sched_context (void)
20843{
20844 return xmalloc (sizeof (rs6000_sched_context_def));
20845}
20846
20847/* If CLEAN_P is true then initializes _SC with clean data,
20848 and from the global context otherwise. */
20849static void
20850rs6000_init_sched_context (void *_sc, bool clean_p)
20851{
20852 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20853
20854 if (clean_p)
20855 {
20856 sc->cached_can_issue_more = 0;
20857 sc->last_scheduled_insn = NULL_RTX;
20858 sc->load_store_pendulum = 0;
20859 }
20860 else
20861 {
20862 sc->cached_can_issue_more = cached_can_issue_more;
20863 sc->last_scheduled_insn = last_scheduled_insn;
20864 sc->load_store_pendulum = load_store_pendulum;
20865 }
20866}
20867
20868/* Sets the global scheduling context to the one pointed to by _SC. */
20869static void
20870rs6000_set_sched_context (void *_sc)
20871{
20872 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20873
20874 gcc_assert (sc != NULL);
20875
20876 cached_can_issue_more = sc->cached_can_issue_more;
20877 last_scheduled_insn = sc->last_scheduled_insn;
20878 load_store_pendulum = sc->load_store_pendulum;
20879}
20880
20881/* Free _SC. */
20882static void
20883rs6000_free_sched_context (void *_sc)
20884{
20885 gcc_assert (_sc != NULL);
20886
20887 free (_sc);
20888}
20889
b6c9286a 20890\f
b6c9286a
MM
20891/* Length in units of the trampoline for entering a nested function. */
20892
20893int
863d938c 20894rs6000_trampoline_size (void)
b6c9286a
MM
20895{
20896 int ret = 0;
20897
20898 switch (DEFAULT_ABI)
20899 {
20900 default:
37409796 20901 gcc_unreachable ();
b6c9286a
MM
20902
20903 case ABI_AIX:
8f802bfb 20904 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20905 break;
20906
4dabc42d 20907 case ABI_DARWIN:
b6c9286a 20908 case ABI_V4:
03a7e1a5 20909 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20910 break;
b6c9286a
MM
20911 }
20912
20913 return ret;
20914}
20915
20916/* Emit RTL insns to initialize the variable parts of a trampoline.
20917 FNADDR is an RTX for the address of the function's pure code.
20918 CXT is an RTX for the static chain value for the function. */
20919
20920void
a2369ed3 20921rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20922{
8bd04c56 20923 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20924 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20925
20926 switch (DEFAULT_ABI)
20927 {
20928 default:
37409796 20929 gcc_unreachable ();
b6c9286a 20930
8bd04c56 20931/* Macros to shorten the code expansions below. */
9613eaff 20932#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20933#define MEM_PLUS(addr,offset) \
9613eaff 20934 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20935
b6c9286a
MM
20936 /* Under AIX, just build the 3 word function descriptor */
20937 case ABI_AIX:
8bd04c56 20938 {
9613eaff
SH
20939 rtx fn_reg = gen_reg_rtx (Pmode);
20940 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20941 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20942 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20943 emit_move_insn (MEM_DEREF (addr), fn_reg);
20944 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20945 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20946 }
b6c9286a
MM
20947 break;
20948
4dabc42d
TC
20949 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20950 case ABI_DARWIN:
b6c9286a 20951 case ABI_V4:
9613eaff 20952 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
bbbbb16a 20953 LCT_NORMAL, VOIDmode, 4,
9613eaff 20954 addr, Pmode,
eaf1bcf1 20955 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20956 fnaddr, Pmode,
20957 ctx_reg, Pmode);
b6c9286a 20958 break;
b6c9286a
MM
20959 }
20960
20961 return;
20962}
7509c759
MM
20963
20964\f
8bb418a3
ZL
20965/* Handle the "altivec" attribute. The attribute may have
20966 arguments as follows:
f676971a 20967
8bb418a3
ZL
20968 __attribute__((altivec(vector__)))
20969 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20970 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20971
20972 and may appear more than once (e.g., 'vector bool char') in a
20973 given declaration. */
20974
20975static tree
f90ac3f0
UP
20976rs6000_handle_altivec_attribute (tree *node,
20977 tree name ATTRIBUTE_UNUSED,
20978 tree args,
8bb418a3
ZL
20979 int flags ATTRIBUTE_UNUSED,
20980 bool *no_add_attrs)
20981{
20982 tree type = *node, result = NULL_TREE;
20983 enum machine_mode mode;
20984 int unsigned_p;
20985 char altivec_type
20986 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20987 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20988 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20989 : '?');
8bb418a3
ZL
20990
20991 while (POINTER_TYPE_P (type)
20992 || TREE_CODE (type) == FUNCTION_TYPE
20993 || TREE_CODE (type) == METHOD_TYPE
20994 || TREE_CODE (type) == ARRAY_TYPE)
20995 type = TREE_TYPE (type);
20996
20997 mode = TYPE_MODE (type);
20998
f90ac3f0
UP
20999 /* Check for invalid AltiVec type qualifiers. */
21000 if (type == long_unsigned_type_node || type == long_integer_type_node)
21001 {
21002 if (TARGET_64BIT)
21003 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
21004 else if (rs6000_warn_altivec_long)
d4ee4d25 21005 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
21006 }
21007 else if (type == long_long_unsigned_type_node
21008 || type == long_long_integer_type_node)
21009 error ("use of %<long long%> in AltiVec types is invalid");
21010 else if (type == double_type_node)
21011 error ("use of %<double%> in AltiVec types is invalid");
21012 else if (type == long_double_type_node)
21013 error ("use of %<long double%> in AltiVec types is invalid");
21014 else if (type == boolean_type_node)
21015 error ("use of boolean types in AltiVec types is invalid");
21016 else if (TREE_CODE (type) == COMPLEX_TYPE)
21017 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
21018 else if (DECIMAL_FLOAT_MODE_P (mode))
21019 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
21020
21021 switch (altivec_type)
21022 {
21023 case 'v':
8df83eae 21024 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
21025 switch (mode)
21026 {
c4ad648e
AM
21027 case SImode:
21028 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
21029 break;
21030 case HImode:
21031 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
21032 break;
21033 case QImode:
21034 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
21035 break;
21036 case SFmode: result = V4SF_type_node; break;
21037 /* If the user says 'vector int bool', we may be handed the 'bool'
21038 attribute _before_ the 'vector' attribute, and so select the
21039 proper type in the 'b' case below. */
21040 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
21041 result = type;
21042 default: break;
8bb418a3
ZL
21043 }
21044 break;
21045 case 'b':
21046 switch (mode)
21047 {
c4ad648e
AM
21048 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
21049 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
21050 case QImode: case V16QImode: result = bool_V16QI_type_node;
21051 default: break;
8bb418a3
ZL
21052 }
21053 break;
21054 case 'p':
21055 switch (mode)
21056 {
c4ad648e
AM
21057 case V8HImode: result = pixel_V8HI_type_node;
21058 default: break;
8bb418a3
ZL
21059 }
21060 default: break;
21061 }
21062
4f538d42
UW
21063 /* Propagate qualifiers attached to the element type
21064 onto the vector type. */
21065 if (result && result != type && TYPE_QUALS (type))
21066 result = build_qualified_type (result, TYPE_QUALS (type));
7958a2a6 21067
8bb418a3
ZL
21068 *no_add_attrs = true; /* No need to hang on to the attribute. */
21069
f90ac3f0 21070 if (result)
5dc11954 21071 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
21072
21073 return NULL_TREE;
21074}
21075
f18eca82
ZL
21076/* AltiVec defines four built-in scalar types that serve as vector
21077 elements; we must teach the compiler how to mangle them. */
21078
21079static const char *
3101faab 21080rs6000_mangle_type (const_tree type)
f18eca82 21081{
608063c3
JB
21082 type = TYPE_MAIN_VARIANT (type);
21083
21084 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
21085 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
21086 return NULL;
21087
f18eca82
ZL
21088 if (type == bool_char_type_node) return "U6__boolc";
21089 if (type == bool_short_type_node) return "U6__bools";
21090 if (type == pixel_type_node) return "u7__pixel";
21091 if (type == bool_int_type_node) return "U6__booli";
21092
337bde91
DE
21093 /* Mangle IBM extended float long double as `g' (__float128) on
21094 powerpc*-linux where long-double-64 previously was the default. */
21095 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
21096 && TARGET_ELF
21097 && TARGET_LONG_DOUBLE_128
21098 && !TARGET_IEEEQUAD)
21099 return "g";
21100
f18eca82
ZL
21101 /* For all other types, use normal C++ mangling. */
21102 return NULL;
21103}
21104
a5c76ee6
ZW
21105/* Handle a "longcall" or "shortcall" attribute; arguments as in
21106 struct attribute_spec.handler. */
a4f6c312 21107
91d231cb 21108static tree
f676971a
EC
21109rs6000_handle_longcall_attribute (tree *node, tree name,
21110 tree args ATTRIBUTE_UNUSED,
21111 int flags ATTRIBUTE_UNUSED,
a2369ed3 21112 bool *no_add_attrs)
91d231cb
JM
21113{
21114 if (TREE_CODE (*node) != FUNCTION_TYPE
21115 && TREE_CODE (*node) != FIELD_DECL
21116 && TREE_CODE (*node) != TYPE_DECL)
21117 {
29d08eba
JM
21118 warning (OPT_Wattributes, "%qE attribute only applies to functions",
21119 name);
91d231cb
JM
21120 *no_add_attrs = true;
21121 }
6a4cee5f 21122
91d231cb 21123 return NULL_TREE;
7509c759
MM
21124}
21125
a5c76ee6
ZW
21126/* Set longcall attributes on all functions declared when
21127 rs6000_default_long_calls is true. */
21128static void
a2369ed3 21129rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
21130{
21131 if (rs6000_default_long_calls
21132 && (TREE_CODE (type) == FUNCTION_TYPE
21133 || TREE_CODE (type) == METHOD_TYPE))
21134 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
21135 NULL_TREE,
21136 TYPE_ATTRIBUTES (type));
16d6f994
EC
21137
21138#if TARGET_MACHO
21139 darwin_set_default_type_attributes (type);
21140#endif
a5c76ee6
ZW
21141}
21142
3cb999d8
DE
21143/* Return a reference suitable for calling a function with the
21144 longcall attribute. */
a4f6c312 21145
9390387d 21146rtx
a2369ed3 21147rs6000_longcall_ref (rtx call_ref)
6a4cee5f 21148{
d330fd93 21149 const char *call_name;
6a4cee5f
MM
21150 tree node;
21151
21152 if (GET_CODE (call_ref) != SYMBOL_REF)
21153 return call_ref;
21154
21155 /* System V adds '.' to the internal name, so skip them. */
21156 call_name = XSTR (call_ref, 0);
21157 if (*call_name == '.')
21158 {
21159 while (*call_name == '.')
21160 call_name++;
21161
21162 node = get_identifier (call_name);
39403d82 21163 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
21164 }
21165
21166 return force_reg (Pmode, call_ref);
21167}
7509c759 21168\f
77ccdfed
EC
21169#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
21170#define TARGET_USE_MS_BITFIELD_LAYOUT 0
21171#endif
21172
21173/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
21174 struct attribute_spec.handler. */
21175static tree
21176rs6000_handle_struct_attribute (tree *node, tree name,
21177 tree args ATTRIBUTE_UNUSED,
21178 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
21179{
21180 tree *type = NULL;
21181 if (DECL_P (*node))
21182 {
21183 if (TREE_CODE (*node) == TYPE_DECL)
21184 type = &TREE_TYPE (*node);
21185 }
21186 else
21187 type = node;
21188
21189 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
21190 || TREE_CODE (*type) == UNION_TYPE)))
21191 {
29d08eba 21192 warning (OPT_Wattributes, "%qE attribute ignored", name);
77ccdfed
EC
21193 *no_add_attrs = true;
21194 }
21195
21196 else if ((is_attribute_p ("ms_struct", name)
21197 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
21198 || ((is_attribute_p ("gcc_struct", name)
21199 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
21200 {
29d08eba
JM
21201 warning (OPT_Wattributes, "%qE incompatible attribute ignored",
21202 name);
77ccdfed
EC
21203 *no_add_attrs = true;
21204 }
21205
21206 return NULL_TREE;
21207}
21208
21209static bool
3101faab 21210rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
21211{
21212 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
21213 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
21214 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
21215}
21216\f
b64a1b53
RH
21217#ifdef USING_ELFOS_H
21218
d6b5193b 21219/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 21220
d6b5193b
RS
21221static void
21222rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21223{
21224 if (DEFAULT_ABI == ABI_AIX
21225 && TARGET_MINIMAL_TOC
21226 && !TARGET_RELOCATABLE)
21227 {
21228 if (!toc_initialized)
21229 {
21230 toc_initialized = 1;
21231 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
21232 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
21233 fprintf (asm_out_file, "\t.tc ");
21234 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
21235 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
21236 fprintf (asm_out_file, "\n");
21237
21238 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21239 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
21240 fprintf (asm_out_file, " = .+32768\n");
21241 }
21242 else
21243 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21244 }
21245 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
21246 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
21247 else
21248 {
21249 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21250 if (!toc_initialized)
21251 {
21252 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
21253 fprintf (asm_out_file, " = .+32768\n");
21254 toc_initialized = 1;
21255 }
21256 }
21257}
21258
21259/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 21260
b64a1b53 21261static void
d6b5193b
RS
21262rs6000_elf_asm_init_sections (void)
21263{
21264 toc_section
21265 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
21266
21267 sdata2_section
21268 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
21269 SDATA2_SECTION_ASM_OP);
21270}
21271
21272/* Implement TARGET_SELECT_RTX_SECTION. */
21273
21274static section *
f676971a 21275rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 21276 unsigned HOST_WIDE_INT align)
7509c759 21277{
a9098fd0 21278 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21279 return toc_section;
7509c759 21280 else
d6b5193b 21281 return default_elf_select_rtx_section (mode, x, align);
7509c759 21282}
d9407988 21283\f
d1908feb
JJ
21284/* For a SYMBOL_REF, set generic flags and then perform some
21285 target-specific processing.
21286
d1908feb
JJ
21287 When the AIX ABI is requested on a non-AIX system, replace the
21288 function name with the real name (with a leading .) rather than the
21289 function descriptor name. This saves a lot of overriding code to
21290 read the prefixes. */
d9407988 21291
fb49053f 21292static void
a2369ed3 21293rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 21294{
d1908feb 21295 default_encode_section_info (decl, rtl, first);
b2003250 21296
d1908feb
JJ
21297 if (first
21298 && TREE_CODE (decl) == FUNCTION_DECL
21299 && !TARGET_AIX
21300 && DEFAULT_ABI == ABI_AIX)
d9407988 21301 {
c6a2438a 21302 rtx sym_ref = XEXP (rtl, 0);
d1908feb 21303 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 21304 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
21305 str[0] = '.';
21306 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
21307 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 21308 }
d9407988
MM
21309}
21310
21d9bb3f 21311static inline bool
0a2aaacc 21312compare_section_name (const char *section, const char *templ)
21d9bb3f
PB
21313{
21314 int len;
21315
0a2aaacc
KG
21316 len = strlen (templ);
21317 return (strncmp (section, templ, len) == 0
21d9bb3f
PB
21318 && (section[len] == 0 || section[len] == '.'));
21319}
21320
c1b7d95a 21321bool
3101faab 21322rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
21323{
21324 if (rs6000_sdata == SDATA_NONE)
21325 return false;
21326
7482ad25
AF
21327 /* We want to merge strings, so we never consider them small data. */
21328 if (TREE_CODE (decl) == STRING_CST)
21329 return false;
21330
21331 /* Functions are never in the small data area. */
21332 if (TREE_CODE (decl) == FUNCTION_DECL)
21333 return false;
21334
0e5dbd9b
DE
21335 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
21336 {
21337 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
21338 if (compare_section_name (section, ".sdata")
21339 || compare_section_name (section, ".sdata2")
21340 || compare_section_name (section, ".gnu.linkonce.s")
21341 || compare_section_name (section, ".sbss")
21342 || compare_section_name (section, ".sbss2")
21343 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
21344 || strcmp (section, ".PPC.EMB.sdata0") == 0
21345 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
21346 return true;
21347 }
21348 else
21349 {
21350 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
21351
21352 if (size > 0
307b599c 21353 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
21354 /* If it's not public, and we're not going to reference it there,
21355 there's no need to put it in the small data section. */
0e5dbd9b
DE
21356 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
21357 return true;
21358 }
21359
21360 return false;
21361}
21362
b91da81f 21363#endif /* USING_ELFOS_H */
aacd3885
RS
21364\f
21365/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 21366
aacd3885 21367static bool
3101faab 21368rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
21369{
21370 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
21371}
a6c2a102 21372\f
000034eb 21373/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
21374 ADDR can be effectively incremented by incrementing REG.
21375
21376 r0 is special and we must not select it as an address
21377 register by this routine since our caller will try to
21378 increment the returned register via an "la" instruction. */
000034eb 21379
9390387d 21380rtx
a2369ed3 21381find_addr_reg (rtx addr)
000034eb
DE
21382{
21383 while (GET_CODE (addr) == PLUS)
21384 {
02441cd6
JL
21385 if (GET_CODE (XEXP (addr, 0)) == REG
21386 && REGNO (XEXP (addr, 0)) != 0)
000034eb 21387 addr = XEXP (addr, 0);
02441cd6
JL
21388 else if (GET_CODE (XEXP (addr, 1)) == REG
21389 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
21390 addr = XEXP (addr, 1);
21391 else if (CONSTANT_P (XEXP (addr, 0)))
21392 addr = XEXP (addr, 1);
21393 else if (CONSTANT_P (XEXP (addr, 1)))
21394 addr = XEXP (addr, 0);
21395 else
37409796 21396 gcc_unreachable ();
000034eb 21397 }
37409796
NS
21398 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
21399 return addr;
000034eb
DE
21400}
21401
a6c2a102 21402void
a2369ed3 21403rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
21404{
21405 fatal_insn ("bad address", op);
21406}
c8023011 21407
ee890fe2
SS
21408#if TARGET_MACHO
21409
efdba735 21410static tree branch_island_list = 0;
ee890fe2 21411
efdba735
SH
21412/* Remember to generate a branch island for far calls to the given
21413 function. */
ee890fe2 21414
f676971a 21415static void
c4ad648e
AM
21416add_compiler_branch_island (tree label_name, tree function_name,
21417 int line_number)
ee890fe2 21418{
efdba735 21419 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 21420 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
21421 TREE_CHAIN (branch_island) = branch_island_list;
21422 branch_island_list = branch_island;
ee890fe2
SS
21423}
21424
efdba735
SH
21425#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
21426#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
21427#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
21428 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 21429
efdba735
SH
21430/* Generate far-jump branch islands for everything on the
21431 branch_island_list. Invoked immediately after the last instruction
21432 of the epilogue has been emitted; the branch-islands must be
21433 appended to, and contiguous with, the function body. Mach-O stubs
21434 are generated in machopic_output_stub(). */
ee890fe2 21435
efdba735
SH
21436static void
21437macho_branch_islands (void)
21438{
21439 char tmp_buf[512];
21440 tree branch_island;
21441
21442 for (branch_island = branch_island_list;
21443 branch_island;
21444 branch_island = TREE_CHAIN (branch_island))
21445 {
21446 const char *label =
21447 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
21448 const char *name =
11abc112 21449 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
21450 char name_buf[512];
21451 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
21452 if (name[0] == '*' || name[0] == '&')
21453 strcpy (name_buf, name+1);
21454 else
21455 {
21456 name_buf[0] = '_';
21457 strcpy (name_buf+1, name);
21458 }
21459 strcpy (tmp_buf, "\n");
21460 strcat (tmp_buf, label);
ee890fe2 21461#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21462 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21463 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21464#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
21465 if (flag_pic)
21466 {
21467 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
21468 strcat (tmp_buf, label);
21469 strcat (tmp_buf, "_pic\n");
21470 strcat (tmp_buf, label);
21471 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 21472
efdba735
SH
21473 strcat (tmp_buf, "\taddis r11,r11,ha16(");
21474 strcat (tmp_buf, name_buf);
21475 strcat (tmp_buf, " - ");
21476 strcat (tmp_buf, label);
21477 strcat (tmp_buf, "_pic)\n");
f676971a 21478
efdba735 21479 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 21480
efdba735
SH
21481 strcat (tmp_buf, "\taddi r12,r11,lo16(");
21482 strcat (tmp_buf, name_buf);
21483 strcat (tmp_buf, " - ");
21484 strcat (tmp_buf, label);
21485 strcat (tmp_buf, "_pic)\n");
f676971a 21486
efdba735
SH
21487 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
21488 }
21489 else
21490 {
21491 strcat (tmp_buf, ":\nlis r12,hi16(");
21492 strcat (tmp_buf, name_buf);
21493 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
21494 strcat (tmp_buf, name_buf);
21495 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
21496 }
21497 output_asm_insn (tmp_buf, 0);
ee890fe2 21498#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21499 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21500 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21501#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 21502 }
ee890fe2 21503
efdba735 21504 branch_island_list = 0;
ee890fe2
SS
21505}
21506
21507/* NO_PREVIOUS_DEF checks in the link list whether the function name is
21508 already there or not. */
21509
efdba735 21510static int
a2369ed3 21511no_previous_def (tree function_name)
ee890fe2 21512{
efdba735
SH
21513 tree branch_island;
21514 for (branch_island = branch_island_list;
21515 branch_island;
21516 branch_island = TREE_CHAIN (branch_island))
21517 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
21518 return 0;
21519 return 1;
21520}
21521
21522/* GET_PREV_LABEL gets the label name from the previous definition of
21523 the function. */
21524
efdba735 21525static tree
a2369ed3 21526get_prev_label (tree function_name)
ee890fe2 21527{
efdba735
SH
21528 tree branch_island;
21529 for (branch_island = branch_island_list;
21530 branch_island;
21531 branch_island = TREE_CHAIN (branch_island))
21532 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21533 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
21534 return 0;
21535}
21536
75b1b789
MS
21537#ifndef DARWIN_LINKER_GENERATES_ISLANDS
21538#define DARWIN_LINKER_GENERATES_ISLANDS 0
21539#endif
21540
21541/* KEXTs still need branch islands. */
21542#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
21543 || flag_mkernel || flag_apple_kext)
21544
ee890fe2 21545/* INSN is either a function call or a millicode call. It may have an
f676971a 21546 unconditional jump in its delay slot.
ee890fe2
SS
21547
21548 CALL_DEST is the routine we are calling. */
21549
21550char *
c4ad648e
AM
21551output_call (rtx insn, rtx *operands, int dest_operand_number,
21552 int cookie_operand_number)
ee890fe2
SS
21553{
21554 static char buf[256];
75b1b789
MS
21555 if (DARWIN_GENERATE_ISLANDS
21556 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 21557 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
21558 {
21559 tree labelname;
efdba735 21560 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 21561
ee890fe2
SS
21562 if (no_previous_def (funname))
21563 {
ee890fe2
SS
21564 rtx label_rtx = gen_label_rtx ();
21565 char *label_buf, temp_buf[256];
21566 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
21567 CODE_LABEL_NUMBER (label_rtx));
21568 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
21569 labelname = get_identifier (label_buf);
a38e7aa5 21570 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
21571 }
21572 else
21573 labelname = get_prev_label (funname);
21574
efdba735
SH
21575 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
21576 instruction will reach 'foo', otherwise link as 'bl L42'".
21577 "L42" should be a 'branch island', that will do a far jump to
21578 'foo'. Branch islands are generated in
21579 macho_branch_islands(). */
ee890fe2 21580 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 21581 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
21582 }
21583 else
efdba735
SH
21584 sprintf (buf, "bl %%z%d", dest_operand_number);
21585 return buf;
ee890fe2
SS
21586}
21587
ee890fe2
SS
21588/* Generate PIC and indirect symbol stubs. */
21589
21590void
a2369ed3 21591machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
21592{
21593 unsigned int length;
a4f6c312
SS
21594 char *symbol_name, *lazy_ptr_name;
21595 char *local_label_0;
ee890fe2
SS
21596 static int label = 0;
21597
df56a27f 21598 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 21599 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 21600
ee890fe2 21601
ee890fe2 21602 length = strlen (symb);
5ead67f6 21603 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21604 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
21605
5ead67f6 21606 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21607 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
21608
ee890fe2 21609 if (flag_pic == 2)
56c779bc 21610 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 21611 else
56c779bc 21612 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
21613
21614 if (flag_pic == 2)
21615 {
d974312d
DJ
21616 fprintf (file, "\t.align 5\n");
21617
21618 fprintf (file, "%s:\n", stub);
21619 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21620
876455fa 21621 label++;
5ead67f6 21622 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 21623 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 21624
ee890fe2
SS
21625 fprintf (file, "\tmflr r0\n");
21626 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
21627 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
21628 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
21629 lazy_ptr_name, local_label_0);
21630 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
21631 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
21632 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
21633 lazy_ptr_name, local_label_0);
21634 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
21635 fprintf (file, "\tbctr\n");
21636 }
21637 else
d974312d
DJ
21638 {
21639 fprintf (file, "\t.align 4\n");
21640
21641 fprintf (file, "%s:\n", stub);
21642 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21643
21644 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
21645 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21646 (TARGET_64BIT ? "ldu" : "lwzu"),
21647 lazy_ptr_name);
d974312d
DJ
21648 fprintf (file, "\tmtctr r12\n");
21649 fprintf (file, "\tbctr\n");
21650 }
f676971a 21651
56c779bc 21652 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
21653 fprintf (file, "%s:\n", lazy_ptr_name);
21654 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
21655 fprintf (file, "%sdyld_stub_binding_helper\n",
21656 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
21657}
21658
21659/* Legitimize PIC addresses. If the address is already
21660 position-independent, we return ORIG. Newly generated
21661 position-independent addresses go into a reg. This is REG if non
21662 zero, otherwise we allocate register(s) as necessary. */
21663
4fbbe694 21664#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
21665
21666rtx
f676971a 21667rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 21668 rtx reg)
ee890fe2
SS
21669{
21670 rtx base, offset;
21671
21672 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21673 reg = gen_reg_rtx (Pmode);
21674
21675 if (GET_CODE (orig) == CONST)
21676 {
37409796
NS
21677 rtx reg_temp;
21678
ee890fe2
SS
21679 if (GET_CODE (XEXP (orig, 0)) == PLUS
21680 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21681 return orig;
21682
37409796 21683 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 21684
37409796
NS
21685 /* Use a different reg for the intermediate value, as
21686 it will be marked UNCHANGING. */
b3a13419 21687 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
21688 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21689 Pmode, reg_temp);
21690 offset =
21691 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21692 Pmode, reg);
bb8df8a6 21693
ee890fe2
SS
21694 if (GET_CODE (offset) == CONST_INT)
21695 {
21696 if (SMALL_INT (offset))
ed8908e7 21697 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
21698 else if (! reload_in_progress && ! reload_completed)
21699 offset = force_reg (Pmode, offset);
21700 else
c859cda6
DJ
21701 {
21702 rtx mem = force_const_mem (Pmode, orig);
21703 return machopic_legitimize_pic_address (mem, Pmode, reg);
21704 }
ee890fe2 21705 }
f1c25d3b 21706 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
21707 }
21708
21709 /* Fall back on generic machopic code. */
21710 return machopic_legitimize_pic_address (orig, mode, reg);
21711}
21712
c4e18b1c
GK
21713/* Output a .machine directive for the Darwin assembler, and call
21714 the generic start_file routine. */
21715
21716static void
21717rs6000_darwin_file_start (void)
21718{
94ff898d 21719 static const struct
c4e18b1c
GK
21720 {
21721 const char *arg;
21722 const char *name;
21723 int if_set;
21724 } mapping[] = {
55dbfb48 21725 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
21726 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21727 { "power4", "ppc970", 0 },
21728 { "G5", "ppc970", 0 },
21729 { "7450", "ppc7450", 0 },
21730 { "7400", "ppc7400", MASK_ALTIVEC },
21731 { "G4", "ppc7400", 0 },
21732 { "750", "ppc750", 0 },
21733 { "740", "ppc750", 0 },
21734 { "G3", "ppc750", 0 },
21735 { "604e", "ppc604e", 0 },
21736 { "604", "ppc604", 0 },
21737 { "603e", "ppc603", 0 },
21738 { "603", "ppc603", 0 },
21739 { "601", "ppc601", 0 },
21740 { NULL, "ppc", 0 } };
21741 const char *cpu_id = "";
21742 size_t i;
94ff898d 21743
9390387d 21744 rs6000_file_start ();
192d0f89 21745 darwin_file_start ();
c4e18b1c
GK
21746
21747 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21748 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21749 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21750 && rs6000_select[i].string[0] != '\0')
21751 cpu_id = rs6000_select[i].string;
21752
21753 /* Look through the mapping array. Pick the first name that either
21754 matches the argument, has a bit set in IF_SET that is also set
21755 in the target flags, or has a NULL name. */
21756
21757 i = 0;
21758 while (mapping[i].arg != NULL
21759 && strcmp (mapping[i].arg, cpu_id) != 0
21760 && (mapping[i].if_set & target_flags) == 0)
21761 i++;
21762
21763 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21764}
21765
ee890fe2 21766#endif /* TARGET_MACHO */
7c262518
RH
21767
21768#if TARGET_ELF
9b580a0b
RH
21769static int
21770rs6000_elf_reloc_rw_mask (void)
7c262518 21771{
9b580a0b
RH
21772 if (flag_pic)
21773 return 3;
21774 else if (DEFAULT_ABI == ABI_AIX)
21775 return 2;
21776 else
21777 return 0;
7c262518 21778}
d9f6800d
RH
21779
21780/* Record an element in the table of global constructors. SYMBOL is
21781 a SYMBOL_REF of the function to be called; PRIORITY is a number
21782 between 0 and MAX_INIT_PRIORITY.
21783
21784 This differs from default_named_section_asm_out_constructor in
21785 that we have special handling for -mrelocatable. */
21786
21787static void
a2369ed3 21788rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
21789{
21790 const char *section = ".ctors";
21791 char buf[16];
21792
21793 if (priority != DEFAULT_INIT_PRIORITY)
21794 {
21795 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21796 /* Invert the numbering so the linker puts us in the proper
21797 order; constructors are run from right to left, and the
21798 linker sorts in increasing order. */
21799 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21800 section = buf;
21801 }
21802
d6b5193b 21803 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21804 assemble_align (POINTER_SIZE);
d9f6800d
RH
21805
21806 if (TARGET_RELOCATABLE)
21807 {
21808 fputs ("\t.long (", asm_out_file);
21809 output_addr_const (asm_out_file, symbol);
21810 fputs (")@fixup\n", asm_out_file);
21811 }
21812 else
c8af3574 21813 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21814}
21815
21816static void
a2369ed3 21817rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21818{
21819 const char *section = ".dtors";
21820 char buf[16];
21821
21822 if (priority != DEFAULT_INIT_PRIORITY)
21823 {
21824 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21825 /* Invert the numbering so the linker puts us in the proper
21826 order; constructors are run from right to left, and the
21827 linker sorts in increasing order. */
21828 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21829 section = buf;
21830 }
21831
d6b5193b 21832 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21833 assemble_align (POINTER_SIZE);
d9f6800d
RH
21834
21835 if (TARGET_RELOCATABLE)
21836 {
21837 fputs ("\t.long (", asm_out_file);
21838 output_addr_const (asm_out_file, symbol);
21839 fputs (")@fixup\n", asm_out_file);
21840 }
21841 else
c8af3574 21842 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21843}
9739c90c
JJ
21844
21845void
a2369ed3 21846rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21847{
21848 if (TARGET_64BIT)
21849 {
21850 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21851 ASM_OUTPUT_LABEL (file, name);
21852 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21853 rs6000_output_function_entry (file, name);
21854 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21855 if (DOT_SYMBOLS)
9739c90c 21856 {
85b776df 21857 fputs ("\t.size\t", file);
9739c90c 21858 assemble_name (file, name);
85b776df
AM
21859 fputs (",24\n\t.type\t.", file);
21860 assemble_name (file, name);
21861 fputs (",@function\n", file);
21862 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21863 {
21864 fputs ("\t.globl\t.", file);
21865 assemble_name (file, name);
21866 putc ('\n', file);
21867 }
9739c90c 21868 }
85b776df
AM
21869 else
21870 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21871 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21872 rs6000_output_function_entry (file, name);
21873 fputs (":\n", file);
9739c90c
JJ
21874 return;
21875 }
21876
21877 if (TARGET_RELOCATABLE
7f970b70 21878 && !TARGET_SECURE_PLT
e3b5732b 21879 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21880 && uses_TOC ())
9739c90c
JJ
21881 {
21882 char buf[256];
21883
21884 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21885
21886 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21887 fprintf (file, "\t.long ");
21888 assemble_name (file, buf);
21889 putc ('-', file);
21890 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21891 assemble_name (file, buf);
21892 putc ('\n', file);
21893 }
21894
21895 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21896 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21897
21898 if (DEFAULT_ABI == ABI_AIX)
21899 {
21900 const char *desc_name, *orig_name;
21901
21902 orig_name = (*targetm.strip_name_encoding) (name);
21903 desc_name = orig_name;
21904 while (*desc_name == '.')
21905 desc_name++;
21906
21907 if (TREE_PUBLIC (decl))
21908 fprintf (file, "\t.globl %s\n", desc_name);
21909
21910 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21911 fprintf (file, "%s:\n", desc_name);
21912 fprintf (file, "\t.long %s\n", orig_name);
21913 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21914 if (DEFAULT_ABI == ABI_AIX)
21915 fputs ("\t.long 0\n", file);
21916 fprintf (file, "\t.previous\n");
21917 }
21918 ASM_OUTPUT_LABEL (file, name);
21919}
1334b570
AM
21920
21921static void
21922rs6000_elf_end_indicate_exec_stack (void)
21923{
21924 if (TARGET_32BIT)
21925 file_end_indicate_exec_stack ();
21926}
7c262518
RH
21927#endif
21928
cbaaba19 21929#if TARGET_XCOFF
0d5817b2
DE
21930static void
21931rs6000_xcoff_asm_output_anchor (rtx symbol)
21932{
21933 char buffer[100];
21934
21935 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21936 SYMBOL_REF_BLOCK_OFFSET (symbol));
21937 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21938}
21939
7c262518 21940static void
a2369ed3 21941rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21942{
21943 fputs (GLOBAL_ASM_OP, stream);
21944 RS6000_OUTPUT_BASENAME (stream, name);
21945 putc ('\n', stream);
21946}
21947
d6b5193b
RS
21948/* A get_unnamed_decl callback, used for read-only sections. PTR
21949 points to the section string variable. */
21950
21951static void
21952rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21953{
890f9edf
OH
21954 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21955 *(const char *const *) directive,
21956 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21957}
21958
21959/* Likewise for read-write sections. */
21960
21961static void
21962rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21963{
890f9edf
OH
21964 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21965 *(const char *const *) directive,
21966 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21967}
21968
21969/* A get_unnamed_section callback, used for switching to toc_section. */
21970
21971static void
21972rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21973{
21974 if (TARGET_MINIMAL_TOC)
21975 {
21976 /* toc_section is always selected at least once from
21977 rs6000_xcoff_file_start, so this is guaranteed to
21978 always be defined once and only once in each file. */
21979 if (!toc_initialized)
21980 {
21981 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21982 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21983 toc_initialized = 1;
21984 }
21985 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21986 (TARGET_32BIT ? "" : ",3"));
21987 }
21988 else
21989 fputs ("\t.toc\n", asm_out_file);
21990}
21991
21992/* Implement TARGET_ASM_INIT_SECTIONS. */
21993
21994static void
21995rs6000_xcoff_asm_init_sections (void)
21996{
21997 read_only_data_section
21998 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21999 &xcoff_read_only_section_name);
22000
22001 private_data_section
22002 = get_unnamed_section (SECTION_WRITE,
22003 rs6000_xcoff_output_readwrite_section_asm_op,
22004 &xcoff_private_data_section_name);
22005
22006 read_only_private_data_section
22007 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
22008 &xcoff_private_data_section_name);
22009
22010 toc_section
22011 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
22012
22013 readonly_data_section = read_only_data_section;
22014 exception_section = data_section;
22015}
22016
9b580a0b
RH
22017static int
22018rs6000_xcoff_reloc_rw_mask (void)
22019{
22020 return 3;
22021}
22022
b275d088 22023static void
c18a5b6c
MM
22024rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
22025 tree decl ATTRIBUTE_UNUSED)
7c262518 22026{
0e5dbd9b
DE
22027 int smclass;
22028 static const char * const suffix[3] = { "PR", "RO", "RW" };
22029
22030 if (flags & SECTION_CODE)
22031 smclass = 0;
22032 else if (flags & SECTION_WRITE)
22033 smclass = 2;
22034 else
22035 smclass = 1;
22036
5b5198f7 22037 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 22038 (flags & SECTION_CODE) ? "." : "",
5b5198f7 22039 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 22040}
ae46c4e0 22041
d6b5193b 22042static section *
f676971a 22043rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 22044 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 22045{
9b580a0b 22046 if (decl_readonly_section (decl, reloc))
ae46c4e0 22047 {
0e5dbd9b 22048 if (TREE_PUBLIC (decl))
d6b5193b 22049 return read_only_data_section;
ae46c4e0 22050 else
d6b5193b 22051 return read_only_private_data_section;
ae46c4e0
RH
22052 }
22053 else
22054 {
0e5dbd9b 22055 if (TREE_PUBLIC (decl))
d6b5193b 22056 return data_section;
ae46c4e0 22057 else
d6b5193b 22058 return private_data_section;
ae46c4e0
RH
22059 }
22060}
22061
22062static void
a2369ed3 22063rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
22064{
22065 const char *name;
ae46c4e0 22066
5b5198f7
DE
22067 /* Use select_section for private and uninitialized data. */
22068 if (!TREE_PUBLIC (decl)
22069 || DECL_COMMON (decl)
0e5dbd9b
DE
22070 || DECL_INITIAL (decl) == NULL_TREE
22071 || DECL_INITIAL (decl) == error_mark_node
22072 || (flag_zero_initialized_in_bss
22073 && initializer_zerop (DECL_INITIAL (decl))))
22074 return;
22075
22076 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
22077 name = (*targetm.strip_name_encoding) (name);
22078 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 22079}
b64a1b53 22080
fb49053f
RH
22081/* Select section for constant in constant pool.
22082
22083 On RS/6000, all constants are in the private read-only data area.
22084 However, if this is being placed in the TOC it must be output as a
22085 toc entry. */
22086
d6b5193b 22087static section *
f676971a 22088rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 22089 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
22090{
22091 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 22092 return toc_section;
b64a1b53 22093 else
d6b5193b 22094 return read_only_private_data_section;
b64a1b53 22095}
772c5265
RH
22096
22097/* Remove any trailing [DS] or the like from the symbol name. */
22098
22099static const char *
a2369ed3 22100rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
22101{
22102 size_t len;
22103 if (*name == '*')
22104 name++;
22105 len = strlen (name);
22106 if (name[len - 1] == ']')
22107 return ggc_alloc_string (name, len - 4);
22108 else
22109 return name;
22110}
22111
5add3202
DE
22112/* Section attributes. AIX is always PIC. */
22113
22114static unsigned int
a2369ed3 22115rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 22116{
5b5198f7 22117 unsigned int align;
9b580a0b 22118 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
22119
22120 /* Align to at least UNIT size. */
22121 if (flags & SECTION_CODE)
22122 align = MIN_UNITS_PER_WORD;
22123 else
22124 /* Increase alignment of large objects if not already stricter. */
22125 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
22126 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
22127 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
22128
22129 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 22130}
a5fe455b 22131
1bc7c5b6
ZW
22132/* Output at beginning of assembler file.
22133
22134 Initialize the section names for the RS/6000 at this point.
22135
22136 Specify filename, including full path, to assembler.
22137
22138 We want to go into the TOC section so at least one .toc will be emitted.
22139 Also, in order to output proper .bs/.es pairs, we need at least one static
22140 [RW] section emitted.
22141
22142 Finally, declare mcount when profiling to make the assembler happy. */
22143
22144static void
863d938c 22145rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
22146{
22147 rs6000_gen_section_name (&xcoff_bss_section_name,
22148 main_input_filename, ".bss_");
22149 rs6000_gen_section_name (&xcoff_private_data_section_name,
22150 main_input_filename, ".rw_");
22151 rs6000_gen_section_name (&xcoff_read_only_section_name,
22152 main_input_filename, ".ro_");
22153
22154 fputs ("\t.file\t", asm_out_file);
22155 output_quoted_string (asm_out_file, main_input_filename);
22156 fputc ('\n', asm_out_file);
1bc7c5b6 22157 if (write_symbols != NO_DEBUG)
d6b5193b
RS
22158 switch_to_section (private_data_section);
22159 switch_to_section (text_section);
1bc7c5b6
ZW
22160 if (profile_flag)
22161 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
22162 rs6000_file_start ();
22163}
22164
a5fe455b
ZW
22165/* Output at end of assembler file.
22166 On the RS/6000, referencing data should automatically pull in text. */
22167
22168static void
863d938c 22169rs6000_xcoff_file_end (void)
a5fe455b 22170{
d6b5193b 22171 switch_to_section (text_section);
a5fe455b 22172 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 22173 switch_to_section (data_section);
a5fe455b
ZW
22174 fputs (TARGET_32BIT
22175 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
22176 asm_out_file);
22177}
f1384257 22178#endif /* TARGET_XCOFF */
0e5dbd9b 22179
3c50106f
RH
22180/* Compute a (partial) cost for rtx X. Return true if the complete
22181 cost has been computed, and false if subexpressions should be
22182 scanned. In either case, *TOTAL contains the cost result. */
22183
22184static bool
f40751dd
JH
22185rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
22186 bool speed)
3c50106f 22187{
f0517163
RS
22188 enum machine_mode mode = GET_MODE (x);
22189
3c50106f
RH
22190 switch (code)
22191 {
30a555d9 22192 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 22193 case CONST_INT:
066cd967
DE
22194 if (((outer_code == SET
22195 || outer_code == PLUS
22196 || outer_code == MINUS)
279bb624
DE
22197 && (satisfies_constraint_I (x)
22198 || satisfies_constraint_L (x)))
066cd967 22199 || (outer_code == AND
279bb624
DE
22200 && (satisfies_constraint_K (x)
22201 || (mode == SImode
22202 ? satisfies_constraint_L (x)
22203 : satisfies_constraint_J (x))
1990cd79
AM
22204 || mask_operand (x, mode)
22205 || (mode == DImode
22206 && mask64_operand (x, DImode))))
22e54023 22207 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
22208 && (satisfies_constraint_K (x)
22209 || (mode == SImode
22210 ? satisfies_constraint_L (x)
22211 : satisfies_constraint_J (x))))
066cd967
DE
22212 || outer_code == ASHIFT
22213 || outer_code == ASHIFTRT
22214 || outer_code == LSHIFTRT
22215 || outer_code == ROTATE
22216 || outer_code == ROTATERT
d5861a7a 22217 || outer_code == ZERO_EXTRACT
066cd967 22218 || (outer_code == MULT
279bb624 22219 && satisfies_constraint_I (x))
22e54023
DE
22220 || ((outer_code == DIV || outer_code == UDIV
22221 || outer_code == MOD || outer_code == UMOD)
22222 && exact_log2 (INTVAL (x)) >= 0)
066cd967 22223 || (outer_code == COMPARE
279bb624
DE
22224 && (satisfies_constraint_I (x)
22225 || satisfies_constraint_K (x)))
22e54023 22226 || (outer_code == EQ
279bb624
DE
22227 && (satisfies_constraint_I (x)
22228 || satisfies_constraint_K (x)
22229 || (mode == SImode
22230 ? satisfies_constraint_L (x)
22231 : satisfies_constraint_J (x))))
22e54023 22232 || (outer_code == GTU
279bb624 22233 && satisfies_constraint_I (x))
22e54023 22234 || (outer_code == LTU
279bb624 22235 && satisfies_constraint_P (x)))
066cd967
DE
22236 {
22237 *total = 0;
22238 return true;
22239 }
22240 else if ((outer_code == PLUS
4ae234b0 22241 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 22242 || (outer_code == MINUS
4ae234b0 22243 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
22244 || ((outer_code == SET
22245 || outer_code == IOR
22246 || outer_code == XOR)
22247 && (INTVAL (x)
22248 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
22249 {
22250 *total = COSTS_N_INSNS (1);
22251 return true;
22252 }
22253 /* FALLTHRU */
22254
22255 case CONST_DOUBLE:
f6fe3a22 22256 if (mode == DImode && code == CONST_DOUBLE)
066cd967 22257 {
f6fe3a22
DE
22258 if ((outer_code == IOR || outer_code == XOR)
22259 && CONST_DOUBLE_HIGH (x) == 0
22260 && (CONST_DOUBLE_LOW (x)
22261 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
22262 {
22263 *total = 0;
22264 return true;
22265 }
22266 else if ((outer_code == AND && and64_2_operand (x, DImode))
22267 || ((outer_code == SET
22268 || outer_code == IOR
22269 || outer_code == XOR)
22270 && CONST_DOUBLE_HIGH (x) == 0))
22271 {
22272 *total = COSTS_N_INSNS (1);
22273 return true;
22274 }
066cd967
DE
22275 }
22276 /* FALLTHRU */
22277
3c50106f 22278 case CONST:
066cd967 22279 case HIGH:
3c50106f 22280 case SYMBOL_REF:
066cd967
DE
22281 case MEM:
22282 /* When optimizing for size, MEM should be slightly more expensive
22283 than generating address, e.g., (plus (reg) (const)).
c112cf2b 22284 L1 cache latency is about two instructions. */
f40751dd 22285 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
22286 return true;
22287
30a555d9
DE
22288 case LABEL_REF:
22289 *total = 0;
22290 return true;
22291
3c50106f 22292 case PLUS:
f0517163 22293 if (mode == DFmode)
066cd967
DE
22294 {
22295 if (GET_CODE (XEXP (x, 0)) == MULT)
22296 {
22297 /* FNMA accounted in outer NEG. */
22298 if (outer_code == NEG)
22299 *total = rs6000_cost->dmul - rs6000_cost->fp;
22300 else
22301 *total = rs6000_cost->dmul;
22302 }
22303 else
22304 *total = rs6000_cost->fp;
22305 }
f0517163 22306 else if (mode == SFmode)
066cd967
DE
22307 {
22308 /* FNMA accounted in outer NEG. */
22309 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
22310 *total = 0;
22311 else
22312 *total = rs6000_cost->fp;
22313 }
f0517163 22314 else
066cd967
DE
22315 *total = COSTS_N_INSNS (1);
22316 return false;
3c50106f 22317
52190329 22318 case MINUS:
f0517163 22319 if (mode == DFmode)
066cd967 22320 {
762c919f
JM
22321 if (GET_CODE (XEXP (x, 0)) == MULT
22322 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
22323 {
22324 /* FNMA accounted in outer NEG. */
22325 if (outer_code == NEG)
762c919f 22326 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
22327 else
22328 *total = rs6000_cost->dmul;
22329 }
22330 else
22331 *total = rs6000_cost->fp;
22332 }
f0517163 22333 else if (mode == SFmode)
066cd967
DE
22334 {
22335 /* FNMA accounted in outer NEG. */
22336 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
22337 *total = 0;
22338 else
22339 *total = rs6000_cost->fp;
22340 }
f0517163 22341 else
c4ad648e 22342 *total = COSTS_N_INSNS (1);
066cd967 22343 return false;
3c50106f
RH
22344
22345 case MULT:
c9dbf840 22346 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 22347 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 22348 {
8b897cfa
RS
22349 if (INTVAL (XEXP (x, 1)) >= -256
22350 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 22351 *total = rs6000_cost->mulsi_const9;
8b897cfa 22352 else
06a67bdd 22353 *total = rs6000_cost->mulsi_const;
3c50106f 22354 }
066cd967
DE
22355 /* FMA accounted in outer PLUS/MINUS. */
22356 else if ((mode == DFmode || mode == SFmode)
22357 && (outer_code == PLUS || outer_code == MINUS))
22358 *total = 0;
f0517163 22359 else if (mode == DFmode)
06a67bdd 22360 *total = rs6000_cost->dmul;
f0517163 22361 else if (mode == SFmode)
06a67bdd 22362 *total = rs6000_cost->fp;
f0517163 22363 else if (mode == DImode)
06a67bdd 22364 *total = rs6000_cost->muldi;
8b897cfa 22365 else
06a67bdd 22366 *total = rs6000_cost->mulsi;
066cd967 22367 return false;
3c50106f
RH
22368
22369 case DIV:
22370 case MOD:
f0517163
RS
22371 if (FLOAT_MODE_P (mode))
22372 {
06a67bdd
RS
22373 *total = mode == DFmode ? rs6000_cost->ddiv
22374 : rs6000_cost->sdiv;
066cd967 22375 return false;
f0517163 22376 }
5efb1046 22377 /* FALLTHRU */
3c50106f
RH
22378
22379 case UDIV:
22380 case UMOD:
627b6fe2
DJ
22381 if (GET_CODE (XEXP (x, 1)) == CONST_INT
22382 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
22383 {
22384 if (code == DIV || code == MOD)
22385 /* Shift, addze */
22386 *total = COSTS_N_INSNS (2);
22387 else
22388 /* Shift */
22389 *total = COSTS_N_INSNS (1);
22390 }
c4ad648e 22391 else
627b6fe2
DJ
22392 {
22393 if (GET_MODE (XEXP (x, 1)) == DImode)
22394 *total = rs6000_cost->divdi;
22395 else
22396 *total = rs6000_cost->divsi;
22397 }
22398 /* Add in shift and subtract for MOD. */
22399 if (code == MOD || code == UMOD)
22400 *total += COSTS_N_INSNS (2);
066cd967 22401 return false;
3c50106f 22402
32f56aad 22403 case CTZ:
3c50106f
RH
22404 case FFS:
22405 *total = COSTS_N_INSNS (4);
066cd967 22406 return false;
3c50106f 22407
32f56aad
DE
22408 case POPCOUNT:
22409 *total = COSTS_N_INSNS (6);
22410 return false;
22411
06a67bdd 22412 case NOT:
066cd967
DE
22413 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
22414 {
22415 *total = 0;
22416 return false;
22417 }
22418 /* FALLTHRU */
22419
22420 case AND:
32f56aad 22421 case CLZ:
066cd967
DE
22422 case IOR:
22423 case XOR:
d5861a7a
DE
22424 case ZERO_EXTRACT:
22425 *total = COSTS_N_INSNS (1);
22426 return false;
22427
066cd967
DE
22428 case ASHIFT:
22429 case ASHIFTRT:
22430 case LSHIFTRT:
22431 case ROTATE:
22432 case ROTATERT:
d5861a7a 22433 /* Handle mul_highpart. */
066cd967
DE
22434 if (outer_code == TRUNCATE
22435 && GET_CODE (XEXP (x, 0)) == MULT)
22436 {
22437 if (mode == DImode)
22438 *total = rs6000_cost->muldi;
22439 else
22440 *total = rs6000_cost->mulsi;
22441 return true;
22442 }
d5861a7a
DE
22443 else if (outer_code == AND)
22444 *total = 0;
22445 else
22446 *total = COSTS_N_INSNS (1);
22447 return false;
22448
22449 case SIGN_EXTEND:
22450 case ZERO_EXTEND:
22451 if (GET_CODE (XEXP (x, 0)) == MEM)
22452 *total = 0;
22453 else
22454 *total = COSTS_N_INSNS (1);
066cd967 22455 return false;
06a67bdd 22456
066cd967
DE
22457 case COMPARE:
22458 case NEG:
22459 case ABS:
22460 if (!FLOAT_MODE_P (mode))
22461 {
22462 *total = COSTS_N_INSNS (1);
22463 return false;
22464 }
22465 /* FALLTHRU */
22466
22467 case FLOAT:
22468 case UNSIGNED_FLOAT:
22469 case FIX:
22470 case UNSIGNED_FIX:
06a67bdd
RS
22471 case FLOAT_TRUNCATE:
22472 *total = rs6000_cost->fp;
066cd967 22473 return false;
06a67bdd 22474
a2af5043
DJ
22475 case FLOAT_EXTEND:
22476 if (mode == DFmode)
22477 *total = 0;
22478 else
22479 *total = rs6000_cost->fp;
22480 return false;
22481
06a67bdd
RS
22482 case UNSPEC:
22483 switch (XINT (x, 1))
22484 {
22485 case UNSPEC_FRSP:
22486 *total = rs6000_cost->fp;
22487 return true;
22488
22489 default:
22490 break;
22491 }
22492 break;
22493
22494 case CALL:
22495 case IF_THEN_ELSE:
f40751dd 22496 if (!speed)
06a67bdd
RS
22497 {
22498 *total = COSTS_N_INSNS (1);
22499 return true;
22500 }
066cd967
DE
22501 else if (FLOAT_MODE_P (mode)
22502 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
22503 {
22504 *total = rs6000_cost->fp;
22505 return false;
22506 }
06a67bdd
RS
22507 break;
22508
c0600ecd
DE
22509 case EQ:
22510 case GTU:
22511 case LTU:
22e54023
DE
22512 /* Carry bit requires mode == Pmode.
22513 NEG or PLUS already counted so only add one. */
22514 if (mode == Pmode
22515 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 22516 {
22e54023
DE
22517 *total = COSTS_N_INSNS (1);
22518 return true;
22519 }
22520 if (outer_code == SET)
22521 {
22522 if (XEXP (x, 1) == const0_rtx)
c0600ecd 22523 {
22e54023 22524 *total = COSTS_N_INSNS (2);
c0600ecd 22525 return true;
c0600ecd 22526 }
22e54023
DE
22527 else if (mode == Pmode)
22528 {
22529 *total = COSTS_N_INSNS (3);
22530 return false;
22531 }
22532 }
22533 /* FALLTHRU */
22534
22535 case GT:
22536 case LT:
22537 case UNORDERED:
22538 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
22539 {
22540 *total = COSTS_N_INSNS (2);
22541 return true;
c0600ecd 22542 }
22e54023
DE
22543 /* CC COMPARE. */
22544 if (outer_code == COMPARE)
22545 {
22546 *total = 0;
22547 return true;
22548 }
22549 break;
c0600ecd 22550
3c50106f 22551 default:
06a67bdd 22552 break;
3c50106f 22553 }
06a67bdd
RS
22554
22555 return false;
3c50106f
RH
22556}
22557
34bb030a
DE
22558/* A C expression returning the cost of moving data from a register of class
22559 CLASS1 to one of CLASS2. */
22560
22561int
f676971a 22562rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 22563 enum reg_class from, enum reg_class to)
34bb030a
DE
22564{
22565 /* Moves from/to GENERAL_REGS. */
22566 if (reg_classes_intersect_p (to, GENERAL_REGS)
22567 || reg_classes_intersect_p (from, GENERAL_REGS))
22568 {
22569 if (! reg_classes_intersect_p (to, GENERAL_REGS))
22570 from = to;
22571
22572 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
22573 return (rs6000_memory_move_cost (mode, from, 0)
22574 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
22575
c4ad648e
AM
22576 /* It's more expensive to move CR_REGS than CR0_REGS because of the
22577 shift. */
34bb030a
DE
22578 else if (from == CR_REGS)
22579 return 4;
22580
aafc759a
PH
22581 /* Power6 has slower LR/CTR moves so make them more expensive than
22582 memory in order to bias spills to memory .*/
22583 else if (rs6000_cpu == PROCESSOR_POWER6
22584 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
22585 return 6 * hard_regno_nregs[0][mode];
22586
34bb030a 22587 else
c4ad648e 22588 /* A move will cost one instruction per GPR moved. */
c8b622ff 22589 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
22590 }
22591
c4ad648e 22592 /* Moving between two similar registers is just one instruction. */
34bb030a 22593 else if (reg_classes_intersect_p (to, from))
7393f7f8 22594 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 22595
c4ad648e 22596 /* Everything else has to go through GENERAL_REGS. */
34bb030a 22597 else
f676971a 22598 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
22599 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
22600}
22601
22602/* A C expressions returning the cost of moving data of MODE from a register to
22603 or from memory. */
22604
22605int
0a2aaacc 22606rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
a2369ed3 22607 int in ATTRIBUTE_UNUSED)
34bb030a 22608{
0a2aaacc 22609 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
c8b622ff 22610 return 4 * hard_regno_nregs[0][mode];
0a2aaacc 22611 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
c8b622ff 22612 return 4 * hard_regno_nregs[32][mode];
0a2aaacc 22613 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
c8b622ff 22614 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a 22615 else
0a2aaacc 22616 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
34bb030a
DE
22617}
22618
9c78b944
DE
22619/* Returns a code for a target-specific builtin that implements
22620 reciprocal of the function, or NULL_TREE if not available. */
22621
22622static tree
22623rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
22624 bool sqrt ATTRIBUTE_UNUSED)
22625{
22626 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
22627 && flag_finite_math_only && !flag_trapping_math
22628 && flag_unsafe_math_optimizations))
22629 return NULL_TREE;
22630
22631 if (md_fn)
22632 return NULL_TREE;
22633 else
22634 switch (fn)
22635 {
22636 case BUILT_IN_SQRTF:
22637 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
22638
22639 default:
22640 return NULL_TREE;
22641 }
22642}
22643
ef765ea9
DE
22644/* Newton-Raphson approximation of single-precision floating point divide n/d.
22645 Assumes no trapping math and finite arguments. */
22646
22647void
9c78b944 22648rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22649{
22650 rtx x0, e0, e1, y1, u0, v0, one;
22651
22652 x0 = gen_reg_rtx (SFmode);
22653 e0 = gen_reg_rtx (SFmode);
22654 e1 = gen_reg_rtx (SFmode);
22655 y1 = gen_reg_rtx (SFmode);
22656 u0 = gen_reg_rtx (SFmode);
22657 v0 = gen_reg_rtx (SFmode);
22658 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22659
22660 /* x0 = 1./d estimate */
22661 emit_insn (gen_rtx_SET (VOIDmode, x0,
22662 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22663 UNSPEC_FRES)));
22664 /* e0 = 1. - d * x0 */
22665 emit_insn (gen_rtx_SET (VOIDmode, e0,
22666 gen_rtx_MINUS (SFmode, one,
22667 gen_rtx_MULT (SFmode, d, x0))));
22668 /* e1 = e0 + e0 * e0 */
22669 emit_insn (gen_rtx_SET (VOIDmode, e1,
22670 gen_rtx_PLUS (SFmode,
22671 gen_rtx_MULT (SFmode, e0, e0), e0)));
22672 /* y1 = x0 + e1 * x0 */
22673 emit_insn (gen_rtx_SET (VOIDmode, y1,
22674 gen_rtx_PLUS (SFmode,
22675 gen_rtx_MULT (SFmode, e1, x0), x0)));
22676 /* u0 = n * y1 */
22677 emit_insn (gen_rtx_SET (VOIDmode, u0,
22678 gen_rtx_MULT (SFmode, n, y1)));
22679 /* v0 = n - d * u0 */
22680 emit_insn (gen_rtx_SET (VOIDmode, v0,
22681 gen_rtx_MINUS (SFmode, n,
22682 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
22683 /* dst = u0 + v0 * y1 */
22684 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22685 gen_rtx_PLUS (SFmode,
22686 gen_rtx_MULT (SFmode, v0, y1), u0)));
22687}
22688
22689/* Newton-Raphson approximation of double-precision floating point divide n/d.
22690 Assumes no trapping math and finite arguments. */
22691
22692void
9c78b944 22693rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22694{
22695 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22696
22697 x0 = gen_reg_rtx (DFmode);
22698 e0 = gen_reg_rtx (DFmode);
22699 e1 = gen_reg_rtx (DFmode);
22700 e2 = gen_reg_rtx (DFmode);
22701 y1 = gen_reg_rtx (DFmode);
22702 y2 = gen_reg_rtx (DFmode);
22703 y3 = gen_reg_rtx (DFmode);
22704 u0 = gen_reg_rtx (DFmode);
22705 v0 = gen_reg_rtx (DFmode);
22706 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22707
22708 /* x0 = 1./d estimate */
22709 emit_insn (gen_rtx_SET (VOIDmode, x0,
22710 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22711 UNSPEC_FRES)));
22712 /* e0 = 1. - d * x0 */
22713 emit_insn (gen_rtx_SET (VOIDmode, e0,
22714 gen_rtx_MINUS (DFmode, one,
22715 gen_rtx_MULT (SFmode, d, x0))));
22716 /* y1 = x0 + e0 * x0 */
22717 emit_insn (gen_rtx_SET (VOIDmode, y1,
22718 gen_rtx_PLUS (DFmode,
22719 gen_rtx_MULT (DFmode, e0, x0), x0)));
22720 /* e1 = e0 * e0 */
22721 emit_insn (gen_rtx_SET (VOIDmode, e1,
22722 gen_rtx_MULT (DFmode, e0, e0)));
22723 /* y2 = y1 + e1 * y1 */
22724 emit_insn (gen_rtx_SET (VOIDmode, y2,
22725 gen_rtx_PLUS (DFmode,
22726 gen_rtx_MULT (DFmode, e1, y1), y1)));
22727 /* e2 = e1 * e1 */
22728 emit_insn (gen_rtx_SET (VOIDmode, e2,
22729 gen_rtx_MULT (DFmode, e1, e1)));
22730 /* y3 = y2 + e2 * y2 */
22731 emit_insn (gen_rtx_SET (VOIDmode, y3,
22732 gen_rtx_PLUS (DFmode,
22733 gen_rtx_MULT (DFmode, e2, y2), y2)));
22734 /* u0 = n * y3 */
22735 emit_insn (gen_rtx_SET (VOIDmode, u0,
22736 gen_rtx_MULT (DFmode, n, y3)));
22737 /* v0 = n - d * u0 */
22738 emit_insn (gen_rtx_SET (VOIDmode, v0,
22739 gen_rtx_MINUS (DFmode, n,
22740 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
22741 /* dst = u0 + v0 * y3 */
22742 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22743 gen_rtx_PLUS (DFmode,
22744 gen_rtx_MULT (DFmode, v0, y3), u0)));
22745}
22746
565ef4ba 22747
9c78b944
DE
22748/* Newton-Raphson approximation of single-precision floating point rsqrt.
22749 Assumes no trapping math and finite arguments. */
22750
22751void
22752rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22753{
22754 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22755 half, one, halfthree, c1, cond, label;
22756
22757 x0 = gen_reg_rtx (SFmode);
22758 x1 = gen_reg_rtx (SFmode);
22759 x2 = gen_reg_rtx (SFmode);
22760 y1 = gen_reg_rtx (SFmode);
22761 u0 = gen_reg_rtx (SFmode);
22762 u1 = gen_reg_rtx (SFmode);
22763 u2 = gen_reg_rtx (SFmode);
22764 v0 = gen_reg_rtx (SFmode);
22765 v1 = gen_reg_rtx (SFmode);
22766 v2 = gen_reg_rtx (SFmode);
22767 t0 = gen_reg_rtx (SFmode);
22768 halfthree = gen_reg_rtx (SFmode);
22769 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22770 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22771
22772 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22773 emit_insn (gen_rtx_SET (VOIDmode, t0,
22774 gen_rtx_MULT (SFmode, src, src)));
22775
22776 emit_insn (gen_rtx_SET (VOIDmode, cond,
22777 gen_rtx_COMPARE (CCFPmode, t0, src)));
22778 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22779 emit_unlikely_jump (c1, label);
22780
22781 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22782 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22783
22784 /* halfthree = 1.5 = 1.0 + 0.5 */
22785 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22786 gen_rtx_PLUS (SFmode, one, half)));
22787
22788 /* x0 = rsqrt estimate */
22789 emit_insn (gen_rtx_SET (VOIDmode, x0,
22790 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22791 UNSPEC_RSQRT)));
22792
22793 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22794 emit_insn (gen_rtx_SET (VOIDmode, y1,
22795 gen_rtx_MINUS (SFmode,
22796 gen_rtx_MULT (SFmode, src, halfthree),
22797 src)));
22798
22799 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22800 emit_insn (gen_rtx_SET (VOIDmode, u0,
22801 gen_rtx_MULT (SFmode, x0, x0)));
22802 emit_insn (gen_rtx_SET (VOIDmode, v0,
22803 gen_rtx_MINUS (SFmode,
22804 halfthree,
22805 gen_rtx_MULT (SFmode, y1, u0))));
22806 emit_insn (gen_rtx_SET (VOIDmode, x1,
22807 gen_rtx_MULT (SFmode, x0, v0)));
22808
22809 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22810 emit_insn (gen_rtx_SET (VOIDmode, u1,
22811 gen_rtx_MULT (SFmode, x1, x1)));
22812 emit_insn (gen_rtx_SET (VOIDmode, v1,
22813 gen_rtx_MINUS (SFmode,
22814 halfthree,
22815 gen_rtx_MULT (SFmode, y1, u1))));
22816 emit_insn (gen_rtx_SET (VOIDmode, x2,
22817 gen_rtx_MULT (SFmode, x1, v1)));
22818
22819 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22820 emit_insn (gen_rtx_SET (VOIDmode, u2,
22821 gen_rtx_MULT (SFmode, x2, x2)));
22822 emit_insn (gen_rtx_SET (VOIDmode, v2,
22823 gen_rtx_MINUS (SFmode,
22824 halfthree,
22825 gen_rtx_MULT (SFmode, y1, u2))));
22826 emit_insn (gen_rtx_SET (VOIDmode, dst,
22827 gen_rtx_MULT (SFmode, x2, v2)));
22828
22829 emit_label (XEXP (label, 0));
22830}
22831
565ef4ba
RS
22832/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22833 target, and SRC is the argument operand. */
22834
22835void
22836rs6000_emit_popcount (rtx dst, rtx src)
22837{
22838 enum machine_mode mode = GET_MODE (dst);
22839 rtx tmp1, tmp2;
22840
22841 tmp1 = gen_reg_rtx (mode);
22842
22843 if (mode == SImode)
22844 {
22845 emit_insn (gen_popcntbsi2 (tmp1, src));
22846 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22847 NULL_RTX, 0);
22848 tmp2 = force_reg (SImode, tmp2);
22849 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22850 }
22851 else
22852 {
22853 emit_insn (gen_popcntbdi2 (tmp1, src));
22854 tmp2 = expand_mult (DImode, tmp1,
22855 GEN_INT ((HOST_WIDE_INT)
22856 0x01010101 << 32 | 0x01010101),
22857 NULL_RTX, 0);
22858 tmp2 = force_reg (DImode, tmp2);
22859 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22860 }
22861}
22862
22863
22864/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22865 target, and SRC is the argument operand. */
22866
22867void
22868rs6000_emit_parity (rtx dst, rtx src)
22869{
22870 enum machine_mode mode = GET_MODE (dst);
22871 rtx tmp;
22872
22873 tmp = gen_reg_rtx (mode);
22874 if (mode == SImode)
22875 {
22876 /* Is mult+shift >= shift+xor+shift+xor? */
22877 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22878 {
22879 rtx tmp1, tmp2, tmp3, tmp4;
22880
22881 tmp1 = gen_reg_rtx (SImode);
22882 emit_insn (gen_popcntbsi2 (tmp1, src));
22883
22884 tmp2 = gen_reg_rtx (SImode);
22885 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22886 tmp3 = gen_reg_rtx (SImode);
22887 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22888
22889 tmp4 = gen_reg_rtx (SImode);
22890 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22891 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22892 }
22893 else
22894 rs6000_emit_popcount (tmp, src);
22895 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22896 }
22897 else
22898 {
22899 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22900 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22901 {
22902 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22903
22904 tmp1 = gen_reg_rtx (DImode);
22905 emit_insn (gen_popcntbdi2 (tmp1, src));
22906
22907 tmp2 = gen_reg_rtx (DImode);
22908 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22909 tmp3 = gen_reg_rtx (DImode);
22910 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22911
22912 tmp4 = gen_reg_rtx (DImode);
22913 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22914 tmp5 = gen_reg_rtx (DImode);
22915 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22916
22917 tmp6 = gen_reg_rtx (DImode);
22918 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22919 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22920 }
22921 else
22922 rs6000_emit_popcount (tmp, src);
22923 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22924 }
22925}
22926
ded9bf77
AH
22927/* Return an RTX representing where to find the function value of a
22928 function returning MODE. */
22929static rtx
22930rs6000_complex_function_value (enum machine_mode mode)
22931{
22932 unsigned int regno;
22933 rtx r1, r2;
22934 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22935 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22936
18f63bfa
AH
22937 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22938 regno = FP_ARG_RETURN;
354ed18f
AH
22939 else
22940 {
18f63bfa 22941 regno = GP_ARG_RETURN;
ded9bf77 22942
18f63bfa
AH
22943 /* 32-bit is OK since it'll go in r3/r4. */
22944 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22945 return gen_rtx_REG (mode, regno);
22946 }
22947
18f63bfa
AH
22948 if (inner_bytes >= 8)
22949 return gen_rtx_REG (mode, regno);
22950
ded9bf77
AH
22951 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22952 const0_rtx);
22953 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22954 GEN_INT (inner_bytes));
ded9bf77
AH
22955 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22956}
22957
a6ebc39a
AH
22958/* Define how to find the value returned by a function.
22959 VALTYPE is the data type of the value (as a tree).
22960 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22961 otherwise, FUNC is 0.
22962
22963 On the SPE, both FPs and vectors are returned in r3.
22964
22965 On RS/6000 an integer value is in r3 and a floating-point value is in
22966 fp1, unless -msoft-float. */
22967
22968rtx
586de218 22969rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22970{
22971 enum machine_mode mode;
2a8fa26c 22972 unsigned int regno;
a6ebc39a 22973
594a51fe
SS
22974 /* Special handling for structs in darwin64. */
22975 if (rs6000_darwin64_abi
22976 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22977 && TREE_CODE (valtype) == RECORD_TYPE
22978 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22979 {
22980 CUMULATIVE_ARGS valcum;
22981 rtx valret;
22982
0b5383eb 22983 valcum.words = 0;
594a51fe
SS
22984 valcum.fregno = FP_ARG_MIN_REG;
22985 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22986 /* Do a trial code generation as if this were going to be passed as
22987 an argument; if any part goes in memory, we return NULL. */
22988 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22989 if (valret)
22990 return valret;
22991 /* Otherwise fall through to standard ABI rules. */
22992 }
22993
0e67400a
FJ
22994 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22995 {
22996 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22997 return gen_rtx_PARALLEL (DImode,
22998 gen_rtvec (2,
22999 gen_rtx_EXPR_LIST (VOIDmode,
23000 gen_rtx_REG (SImode, GP_ARG_RETURN),
23001 const0_rtx),
23002 gen_rtx_EXPR_LIST (VOIDmode,
23003 gen_rtx_REG (SImode,
23004 GP_ARG_RETURN + 1),
23005 GEN_INT (4))));
23006 }
0f086e42
FJ
23007 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
23008 {
23009 return gen_rtx_PARALLEL (DCmode,
23010 gen_rtvec (4,
23011 gen_rtx_EXPR_LIST (VOIDmode,
23012 gen_rtx_REG (SImode, GP_ARG_RETURN),
23013 const0_rtx),
23014 gen_rtx_EXPR_LIST (VOIDmode,
23015 gen_rtx_REG (SImode,
23016 GP_ARG_RETURN + 1),
23017 GEN_INT (4)),
23018 gen_rtx_EXPR_LIST (VOIDmode,
23019 gen_rtx_REG (SImode,
23020 GP_ARG_RETURN + 2),
23021 GEN_INT (8)),
23022 gen_rtx_EXPR_LIST (VOIDmode,
23023 gen_rtx_REG (SImode,
23024 GP_ARG_RETURN + 3),
23025 GEN_INT (12))));
23026 }
602ea4d3 23027
7348aa7f
FXC
23028 mode = TYPE_MODE (valtype);
23029 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 23030 || POINTER_TYPE_P (valtype))
b78d48dd 23031 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 23032
e41b2a33
PB
23033 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
23034 /* _Decimal128 must use an even/odd register pair. */
23035 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
2c5cac98
ME
23036 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS
23037 && ((TARGET_SINGLE_FLOAT && (mode == SFmode)) || TARGET_DOUBLE_FLOAT))
2a8fa26c 23038 regno = FP_ARG_RETURN;
ded9bf77 23039 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 23040 && targetm.calls.split_complex_arg)
ded9bf77 23041 return rs6000_complex_function_value (mode);
44688022 23042 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 23043 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 23044 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 23045 regno = ALTIVEC_ARG_RETURN;
18f63bfa 23046 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
23047 && (mode == DFmode || mode == DCmode
23048 || mode == TFmode || mode == TCmode))
18f63bfa 23049 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
23050 else
23051 regno = GP_ARG_RETURN;
23052
23053 return gen_rtx_REG (mode, regno);
23054}
23055
ded9bf77
AH
23056/* Define how to find the value returned by a library function
23057 assuming the value has mode MODE. */
23058rtx
23059rs6000_libcall_value (enum machine_mode mode)
23060{
23061 unsigned int regno;
23062
2e6c9641
FJ
23063 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
23064 {
23065 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
23066 return gen_rtx_PARALLEL (DImode,
23067 gen_rtvec (2,
23068 gen_rtx_EXPR_LIST (VOIDmode,
23069 gen_rtx_REG (SImode, GP_ARG_RETURN),
23070 const0_rtx),
23071 gen_rtx_EXPR_LIST (VOIDmode,
23072 gen_rtx_REG (SImode,
23073 GP_ARG_RETURN + 1),
23074 GEN_INT (4))));
23075 }
23076
e41b2a33
PB
23077 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
23078 /* _Decimal128 must use an even/odd register pair. */
23079 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 23080 else if (SCALAR_FLOAT_MODE_P (mode)
d083fbba
ME
23081 && TARGET_HARD_FLOAT && TARGET_FPRS
23082 && ((TARGET_SINGLE_FLOAT && mode == SFmode) || TARGET_DOUBLE_FLOAT))
ded9bf77 23083 regno = FP_ARG_RETURN;
44688022
AM
23084 else if (ALTIVEC_VECTOR_MODE (mode)
23085 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 23086 regno = ALTIVEC_ARG_RETURN;
42ba5130 23087 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 23088 return rs6000_complex_function_value (mode);
18f63bfa 23089 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
23090 && (mode == DFmode || mode == DCmode
23091 || mode == TFmode || mode == TCmode))
18f63bfa 23092 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
23093 else
23094 regno = GP_ARG_RETURN;
23095
23096 return gen_rtx_REG (mode, regno);
23097}
23098
d1d0c603
JJ
23099/* Define the offset between two registers, FROM to be eliminated and its
23100 replacement TO, at the start of a routine. */
23101HOST_WIDE_INT
23102rs6000_initial_elimination_offset (int from, int to)
23103{
23104 rs6000_stack_t *info = rs6000_stack_info ();
23105 HOST_WIDE_INT offset;
23106
7d5175e1 23107 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 23108 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
23109 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
23110 {
23111 offset = info->push_p ? 0 : -info->total_size;
23112 if (FRAME_GROWS_DOWNWARD)
5b667039 23113 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
23114 }
23115 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
23116 offset = FRAME_GROWS_DOWNWARD
5b667039 23117 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
23118 : 0;
23119 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
23120 offset = info->total_size;
23121 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
23122 offset = info->push_p ? info->total_size : 0;
23123 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
23124 offset = 0;
23125 else
37409796 23126 gcc_unreachable ();
d1d0c603
JJ
23127
23128 return offset;
23129}
23130
96714395 23131static rtx
a2369ed3 23132rs6000_dwarf_register_span (rtx reg)
96714395 23133{
6cd1d2e2
DJ
23134 rtx parts[8];
23135 int i, words;
23136 unsigned regno = REGNO (reg);
23137 enum machine_mode mode = GET_MODE (reg);
96714395 23138
4d4cbc0e 23139 if (TARGET_SPE
6cd1d2e2 23140 && regno < 32
4d4cbc0e 23141 && (SPE_VECTOR_MODE (GET_MODE (reg))
6cd1d2e2
DJ
23142 || (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode)
23143 && mode != SFmode && mode != SDmode && mode != SCmode)))
4d4cbc0e
AH
23144 ;
23145 else
96714395
AH
23146 return NULL_RTX;
23147
23148 regno = REGNO (reg);
23149
23150 /* The duality of the SPE register size wreaks all kinds of havoc.
23151 This is a way of distinguishing r0 in 32-bits from r0 in
23152 64-bits. */
6cd1d2e2
DJ
23153 words = (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
23154 gcc_assert (words <= 4);
23155 for (i = 0; i < words; i++, regno++)
23156 {
23157 if (BYTES_BIG_ENDIAN)
23158 {
23159 parts[2 * i] = gen_rtx_REG (SImode, regno + 1200);
23160 parts[2 * i + 1] = gen_rtx_REG (SImode, regno);
23161 }
23162 else
23163 {
23164 parts[2 * i] = gen_rtx_REG (SImode, regno);
23165 parts[2 * i + 1] = gen_rtx_REG (SImode, regno + 1200);
23166 }
23167 }
23168
23169 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (words * 2, parts));
96714395
AH
23170}
23171
37ea0b7e
JM
23172/* Fill in sizes for SPE register high parts in table used by unwinder. */
23173
23174static void
23175rs6000_init_dwarf_reg_sizes_extra (tree address)
23176{
23177 if (TARGET_SPE)
23178 {
23179 int i;
23180 enum machine_mode mode = TYPE_MODE (char_type_node);
bbbbb16a 23181 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, EXPAND_NORMAL);
37ea0b7e
JM
23182 rtx mem = gen_rtx_MEM (BLKmode, addr);
23183 rtx value = gen_int_mode (4, mode);
23184
23185 for (i = 1201; i < 1232; i++)
23186 {
23187 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
23188 HOST_WIDE_INT offset
23189 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
23190
23191 emit_move_insn (adjust_address (mem, mode, offset), value);
23192 }
23193 }
23194}
23195
93c9d1ba
AM
23196/* Map internal gcc register numbers to DWARF2 register numbers. */
23197
23198unsigned int
23199rs6000_dbx_register_number (unsigned int regno)
23200{
23201 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
23202 return regno;
23203 if (regno == MQ_REGNO)
23204 return 100;
1de43f85 23205 if (regno == LR_REGNO)
93c9d1ba 23206 return 108;
1de43f85 23207 if (regno == CTR_REGNO)
93c9d1ba
AM
23208 return 109;
23209 if (CR_REGNO_P (regno))
23210 return regno - CR0_REGNO + 86;
23211 if (regno == XER_REGNO)
23212 return 101;
23213 if (ALTIVEC_REGNO_P (regno))
23214 return regno - FIRST_ALTIVEC_REGNO + 1124;
23215 if (regno == VRSAVE_REGNO)
23216 return 356;
23217 if (regno == VSCR_REGNO)
23218 return 67;
23219 if (regno == SPE_ACC_REGNO)
23220 return 99;
23221 if (regno == SPEFSCR_REGNO)
23222 return 612;
23223 /* SPE high reg number. We get these values of regno from
23224 rs6000_dwarf_register_span. */
37409796
NS
23225 gcc_assert (regno >= 1200 && regno < 1232);
23226 return regno;
93c9d1ba
AM
23227}
23228
93f90be6 23229/* target hook eh_return_filter_mode */
f676971a 23230static enum machine_mode
93f90be6
FJ
23231rs6000_eh_return_filter_mode (void)
23232{
23233 return TARGET_32BIT ? SImode : word_mode;
23234}
23235
00b79d54
BE
23236/* Target hook for scalar_mode_supported_p. */
23237static bool
23238rs6000_scalar_mode_supported_p (enum machine_mode mode)
23239{
23240 if (DECIMAL_FLOAT_MODE_P (mode))
23241 return true;
23242 else
23243 return default_scalar_mode_supported_p (mode);
23244}
23245
f676971a
EC
23246/* Target hook for vector_mode_supported_p. */
23247static bool
23248rs6000_vector_mode_supported_p (enum machine_mode mode)
23249{
23250
96038623
DE
23251 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
23252 return true;
23253
f676971a
EC
23254 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
23255 return true;
23256
23257 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
23258 return true;
23259
23260 else
23261 return false;
23262}
23263
bb8df8a6
EC
23264/* Target hook for invalid_arg_for_unprototyped_fn. */
23265static const char *
3101faab 23266invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
23267{
23268 return (!rs6000_darwin64_abi
23269 && typelist == 0
23270 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
23271 && (funcdecl == NULL_TREE
23272 || (TREE_CODE (funcdecl) == FUNCTION_DECL
23273 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
23274 ? N_("AltiVec argument passed to unprototyped function")
23275 : NULL;
23276}
23277
3aebbe5f
JJ
23278/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
23279 setup by using __stack_chk_fail_local hidden function instead of
23280 calling __stack_chk_fail directly. Otherwise it is better to call
23281 __stack_chk_fail directly. */
23282
23283static tree
23284rs6000_stack_protect_fail (void)
23285{
23286 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
23287 ? default_hidden_stack_protect_fail ()
23288 : default_external_stack_protect_fail ();
23289}
23290
c921bad8
AP
23291void
23292rs6000_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
23293 int num_operands ATTRIBUTE_UNUSED)
23294{
23295 if (rs6000_warn_cell_microcode)
23296 {
23297 const char *temp;
23298 int insn_code_number = recog_memoized (insn);
23299 location_t location = locator_location (INSN_LOCATOR (insn));
23300
23301 /* Punt on insns we cannot recognize. */
23302 if (insn_code_number < 0)
23303 return;
23304
23305 temp = get_insn_template (insn_code_number, insn);
23306
23307 if (get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS)
23308 warning_at (location, OPT_mwarn_cell_microcode,
23309 "emitting microcode insn %s\t[%s] #%d",
23310 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
23311 else if (get_attr_cell_micro (insn) == CELL_MICRO_CONDITIONAL)
23312 warning_at (location, OPT_mwarn_cell_microcode,
23313 "emitting conditional microcode insn %s\t[%s] #%d",
23314 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
23315 }
23316}
23317
17211ab5 23318#include "gt-rs6000.h"