]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
In gcc/: PR 23067
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
337bde91
DE
3 2000, 2001, 2002, 2003, 2004, 2005, 2006
4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 2, or (at your
12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf
NC
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the
39d14dda
KC
21 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
22 MA 02110-1301, USA. */
9878760c 23
956d6950 24#include "config.h"
c4d38ccb 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
9878760c
RK
28#include "rtl.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
9878760c
RK
34#include "insn-attr.h"
35#include "flags.h"
36#include "recog.h"
9878760c 37#include "obstack.h"
9b30bae2 38#include "tree.h"
dfafc897 39#include "expr.h"
2fc1c679 40#include "optabs.h"
2a430ec1 41#include "except.h"
a7df97e6 42#include "function.h"
296b8152 43#include "output.h"
d5fa86ba 44#include "basic-block.h"
d0101753 45#include "integrate.h"
296b8152 46#include "toplev.h"
c8023011 47#include "ggc.h"
9ebbca7d
GK
48#include "hashtab.h"
49#include "tm_p.h"
672a6f42
NB
50#include "target.h"
51#include "target-def.h"
3ac88239 52#include "langhooks.h"
24ea750e 53#include "reload.h"
117dca74 54#include "cfglayout.h"
79ae11c4 55#include "sched-int.h"
cd3ce9b4 56#include "tree-gimple.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
129} machine_function;
130
5248c961
RK
131/* Target cpu type */
132
133enum processor_type rs6000_cpu;
8e3f41e7
MM
134struct rs6000_cpu_select rs6000_select[3] =
135{
815cdc52
MM
136 /* switch name, tune arch */
137 { (const char *)0, "--with-cpu=", 1, 1 },
138 { (const char *)0, "-mcpu=", 1, 1 },
139 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 140};
5248c961 141
ec507f2d
DE
142/* Always emit branch hint bits. */
143static GTY(()) bool rs6000_always_hint;
144
145/* Schedule instructions for group formation. */
146static GTY(()) bool rs6000_sched_groups;
147
569fa502
DN
148/* Support for -msched-costly-dep option. */
149const char *rs6000_sched_costly_dep_str;
150enum rs6000_dependence_cost rs6000_sched_costly_dep;
151
cbe26ab8
DN
152/* Support for -minsert-sched-nops option. */
153const char *rs6000_sched_insert_nops_str;
154enum rs6000_nop_insertion rs6000_sched_insert_nops;
155
7ccf35ed 156/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 157static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 158
602ea4d3 159/* Size of long double. */
6fa3f289
ZW
160int rs6000_long_double_type_size;
161
602ea4d3
JJ
162/* IEEE quad extended precision long double. */
163int rs6000_ieeequad;
164
165/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
166int rs6000_altivec_abi;
167
a3170dc6
AH
168/* Nonzero if we want SPE ABI extensions. */
169int rs6000_spe_abi;
170
5da702b1
AH
171/* Nonzero if floating point operations are done in the GPRs. */
172int rs6000_float_gprs = 0;
173
594a51fe
SS
174/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
175int rs6000_darwin64_abi;
176
a0ab749a 177/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 178static GTY(()) int common_mode_defined;
c81bebd7 179
9878760c
RK
180/* Save information from a "cmpxx" operation until the branch or scc is
181 emitted. */
9878760c
RK
182rtx rs6000_compare_op0, rs6000_compare_op1;
183int rs6000_compare_fp_p;
874a0744 184
874a0744
MM
185/* Label number of label created for -mrelocatable, to call to so we can
186 get the address of the GOT section */
187int rs6000_pic_labelno;
c81bebd7 188
b91da81f 189#ifdef USING_ELFOS_H
c81bebd7 190/* Which abi to adhere to */
9739c90c 191const char *rs6000_abi_name;
d9407988
MM
192
193/* Semantics of the small data area */
194enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
195
196/* Which small data model to use */
815cdc52 197const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
198
199/* Counter for labels which are to be placed in .fixup. */
200int fixuplabelno = 0;
874a0744 201#endif
4697a36c 202
c4501e62
JJ
203/* Bit size of immediate TLS offsets and string from which it is decoded. */
204int rs6000_tls_size = 32;
205const char *rs6000_tls_size_string;
206
b6c9286a
MM
207/* ABI enumeration available for subtarget to use. */
208enum rs6000_abi rs6000_current_abi;
209
85b776df
AM
210/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
211int dot_symbols;
212
38c1f2d7 213/* Debug flags */
815cdc52 214const char *rs6000_debug_name;
38c1f2d7
MM
215int rs6000_debug_stack; /* debug stack applications */
216int rs6000_debug_arg; /* debug argument handling */
217
aabcd309 218/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
219bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
220
58646b77
PB
221/* Built in types. */
222
223tree rs6000_builtin_types[RS6000_BTI_MAX];
224tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 225
57ac7be9
AM
226const char *rs6000_traceback_name;
227static enum {
228 traceback_default = 0,
229 traceback_none,
230 traceback_part,
231 traceback_full
232} rs6000_traceback;
233
38c1f2d7
MM
234/* Flag to say the TOC is initialized */
235int toc_initialized;
9ebbca7d 236char toc_label_name[10];
38c1f2d7 237
d6b5193b
RS
238static GTY(()) section *read_only_data_section;
239static GTY(()) section *private_data_section;
240static GTY(()) section *read_only_private_data_section;
241static GTY(()) section *sdata2_section;
242static GTY(()) section *toc_section;
243
a3c9585f
KH
244/* Control alignment for fields within structures. */
245/* String from -malign-XXXXX. */
025d9908
KH
246int rs6000_alignment_flags;
247
78f5898b
AH
248/* True for any options that were explicitly set. */
249struct {
df01da37 250 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 251 bool alignment; /* True if -malign- was used. */
d3603e8c 252 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
253 bool spe; /* True if -mspe= was used. */
254 bool float_gprs; /* True if -mfloat-gprs= was used. */
255 bool isel; /* True if -misel was used. */
256 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 257 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
258} rs6000_explicit_options;
259
a3170dc6
AH
260struct builtin_description
261{
262 /* mask is not const because we're going to alter it below. This
263 nonsense will go away when we rewrite the -march infrastructure
264 to give us more target flag bits. */
265 unsigned int mask;
266 const enum insn_code icode;
267 const char *const name;
268 const enum rs6000_builtins code;
269};
8b897cfa
RS
270\f
271/* Target cpu costs. */
272
273struct processor_costs {
c4ad648e 274 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
275 const int mulsi_const; /* cost of SImode multiplication by constant. */
276 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
277 const int muldi; /* cost of DImode multiplication. */
278 const int divsi; /* cost of SImode division. */
279 const int divdi; /* cost of DImode division. */
280 const int fp; /* cost of simple SFmode and DFmode insns. */
281 const int dmul; /* cost of DFmode multiplication (and fmadd). */
282 const int sdiv; /* cost of SFmode division (fdivs). */
283 const int ddiv; /* cost of DFmode division (fdiv). */
8b897cfa
RS
284};
285
286const struct processor_costs *rs6000_cost;
287
288/* Processor costs (relative to an add) */
289
290/* Instruction size costs on 32bit processors. */
291static const
292struct processor_costs size32_cost = {
06a67bdd
RS
293 COSTS_N_INSNS (1), /* mulsi */
294 COSTS_N_INSNS (1), /* mulsi_const */
295 COSTS_N_INSNS (1), /* mulsi_const9 */
296 COSTS_N_INSNS (1), /* muldi */
297 COSTS_N_INSNS (1), /* divsi */
298 COSTS_N_INSNS (1), /* divdi */
299 COSTS_N_INSNS (1), /* fp */
300 COSTS_N_INSNS (1), /* dmul */
301 COSTS_N_INSNS (1), /* sdiv */
302 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
303};
304
305/* Instruction size costs on 64bit processors. */
306static const
307struct processor_costs size64_cost = {
06a67bdd
RS
308 COSTS_N_INSNS (1), /* mulsi */
309 COSTS_N_INSNS (1), /* mulsi_const */
310 COSTS_N_INSNS (1), /* mulsi_const9 */
311 COSTS_N_INSNS (1), /* muldi */
312 COSTS_N_INSNS (1), /* divsi */
313 COSTS_N_INSNS (1), /* divdi */
314 COSTS_N_INSNS (1), /* fp */
315 COSTS_N_INSNS (1), /* dmul */
316 COSTS_N_INSNS (1), /* sdiv */
317 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
318};
319
320/* Instruction costs on RIOS1 processors. */
321static const
322struct processor_costs rios1_cost = {
06a67bdd
RS
323 COSTS_N_INSNS (5), /* mulsi */
324 COSTS_N_INSNS (4), /* mulsi_const */
325 COSTS_N_INSNS (3), /* mulsi_const9 */
326 COSTS_N_INSNS (5), /* muldi */
327 COSTS_N_INSNS (19), /* divsi */
328 COSTS_N_INSNS (19), /* divdi */
329 COSTS_N_INSNS (2), /* fp */
330 COSTS_N_INSNS (2), /* dmul */
331 COSTS_N_INSNS (19), /* sdiv */
332 COSTS_N_INSNS (19), /* ddiv */
8b897cfa
RS
333};
334
335/* Instruction costs on RIOS2 processors. */
336static const
337struct processor_costs rios2_cost = {
06a67bdd
RS
338 COSTS_N_INSNS (2), /* mulsi */
339 COSTS_N_INSNS (2), /* mulsi_const */
340 COSTS_N_INSNS (2), /* mulsi_const9 */
341 COSTS_N_INSNS (2), /* muldi */
342 COSTS_N_INSNS (13), /* divsi */
343 COSTS_N_INSNS (13), /* divdi */
344 COSTS_N_INSNS (2), /* fp */
345 COSTS_N_INSNS (2), /* dmul */
346 COSTS_N_INSNS (17), /* sdiv */
347 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
348};
349
350/* Instruction costs on RS64A processors. */
351static const
352struct processor_costs rs64a_cost = {
06a67bdd
RS
353 COSTS_N_INSNS (20), /* mulsi */
354 COSTS_N_INSNS (12), /* mulsi_const */
355 COSTS_N_INSNS (8), /* mulsi_const9 */
356 COSTS_N_INSNS (34), /* muldi */
357 COSTS_N_INSNS (65), /* divsi */
358 COSTS_N_INSNS (67), /* divdi */
359 COSTS_N_INSNS (4), /* fp */
360 COSTS_N_INSNS (4), /* dmul */
361 COSTS_N_INSNS (31), /* sdiv */
362 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
363};
364
365/* Instruction costs on MPCCORE processors. */
366static const
367struct processor_costs mpccore_cost = {
06a67bdd
RS
368 COSTS_N_INSNS (2), /* mulsi */
369 COSTS_N_INSNS (2), /* mulsi_const */
370 COSTS_N_INSNS (2), /* mulsi_const9 */
371 COSTS_N_INSNS (2), /* muldi */
372 COSTS_N_INSNS (6), /* divsi */
373 COSTS_N_INSNS (6), /* divdi */
374 COSTS_N_INSNS (4), /* fp */
375 COSTS_N_INSNS (5), /* dmul */
376 COSTS_N_INSNS (10), /* sdiv */
377 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
378};
379
380/* Instruction costs on PPC403 processors. */
381static const
382struct processor_costs ppc403_cost = {
06a67bdd
RS
383 COSTS_N_INSNS (4), /* mulsi */
384 COSTS_N_INSNS (4), /* mulsi_const */
385 COSTS_N_INSNS (4), /* mulsi_const9 */
386 COSTS_N_INSNS (4), /* muldi */
387 COSTS_N_INSNS (33), /* divsi */
388 COSTS_N_INSNS (33), /* divdi */
389 COSTS_N_INSNS (11), /* fp */
390 COSTS_N_INSNS (11), /* dmul */
391 COSTS_N_INSNS (11), /* sdiv */
392 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
393};
394
395/* Instruction costs on PPC405 processors. */
396static const
397struct processor_costs ppc405_cost = {
06a67bdd
RS
398 COSTS_N_INSNS (5), /* mulsi */
399 COSTS_N_INSNS (4), /* mulsi_const */
400 COSTS_N_INSNS (3), /* mulsi_const9 */
401 COSTS_N_INSNS (5), /* muldi */
402 COSTS_N_INSNS (35), /* divsi */
403 COSTS_N_INSNS (35), /* divdi */
404 COSTS_N_INSNS (11), /* fp */
405 COSTS_N_INSNS (11), /* dmul */
406 COSTS_N_INSNS (11), /* sdiv */
407 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
408};
409
410/* Instruction costs on PPC440 processors. */
411static const
412struct processor_costs ppc440_cost = {
06a67bdd
RS
413 COSTS_N_INSNS (3), /* mulsi */
414 COSTS_N_INSNS (2), /* mulsi_const */
415 COSTS_N_INSNS (2), /* mulsi_const9 */
416 COSTS_N_INSNS (3), /* muldi */
417 COSTS_N_INSNS (34), /* divsi */
418 COSTS_N_INSNS (34), /* divdi */
419 COSTS_N_INSNS (5), /* fp */
420 COSTS_N_INSNS (5), /* dmul */
421 COSTS_N_INSNS (19), /* sdiv */
422 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
423};
424
425/* Instruction costs on PPC601 processors. */
426static const
427struct processor_costs ppc601_cost = {
06a67bdd
RS
428 COSTS_N_INSNS (5), /* mulsi */
429 COSTS_N_INSNS (5), /* mulsi_const */
430 COSTS_N_INSNS (5), /* mulsi_const9 */
431 COSTS_N_INSNS (5), /* muldi */
432 COSTS_N_INSNS (36), /* divsi */
433 COSTS_N_INSNS (36), /* divdi */
434 COSTS_N_INSNS (4), /* fp */
435 COSTS_N_INSNS (5), /* dmul */
436 COSTS_N_INSNS (17), /* sdiv */
437 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
438};
439
440/* Instruction costs on PPC603 processors. */
441static const
442struct processor_costs ppc603_cost = {
06a67bdd
RS
443 COSTS_N_INSNS (5), /* mulsi */
444 COSTS_N_INSNS (3), /* mulsi_const */
445 COSTS_N_INSNS (2), /* mulsi_const9 */
446 COSTS_N_INSNS (5), /* muldi */
447 COSTS_N_INSNS (37), /* divsi */
448 COSTS_N_INSNS (37), /* divdi */
449 COSTS_N_INSNS (3), /* fp */
450 COSTS_N_INSNS (4), /* dmul */
451 COSTS_N_INSNS (18), /* sdiv */
452 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
453};
454
455/* Instruction costs on PPC604 processors. */
456static const
457struct processor_costs ppc604_cost = {
06a67bdd
RS
458 COSTS_N_INSNS (4), /* mulsi */
459 COSTS_N_INSNS (4), /* mulsi_const */
460 COSTS_N_INSNS (4), /* mulsi_const9 */
461 COSTS_N_INSNS (4), /* muldi */
462 COSTS_N_INSNS (20), /* divsi */
463 COSTS_N_INSNS (20), /* divdi */
464 COSTS_N_INSNS (3), /* fp */
465 COSTS_N_INSNS (3), /* dmul */
466 COSTS_N_INSNS (18), /* sdiv */
467 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
468};
469
470/* Instruction costs on PPC604e processors. */
471static const
472struct processor_costs ppc604e_cost = {
06a67bdd
RS
473 COSTS_N_INSNS (2), /* mulsi */
474 COSTS_N_INSNS (2), /* mulsi_const */
475 COSTS_N_INSNS (2), /* mulsi_const9 */
476 COSTS_N_INSNS (2), /* muldi */
477 COSTS_N_INSNS (20), /* divsi */
478 COSTS_N_INSNS (20), /* divdi */
479 COSTS_N_INSNS (3), /* fp */
480 COSTS_N_INSNS (3), /* dmul */
481 COSTS_N_INSNS (18), /* sdiv */
482 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
483};
484
f0517163 485/* Instruction costs on PPC620 processors. */
8b897cfa
RS
486static const
487struct processor_costs ppc620_cost = {
06a67bdd
RS
488 COSTS_N_INSNS (5), /* mulsi */
489 COSTS_N_INSNS (4), /* mulsi_const */
490 COSTS_N_INSNS (3), /* mulsi_const9 */
491 COSTS_N_INSNS (7), /* muldi */
492 COSTS_N_INSNS (21), /* divsi */
493 COSTS_N_INSNS (37), /* divdi */
494 COSTS_N_INSNS (3), /* fp */
495 COSTS_N_INSNS (3), /* dmul */
496 COSTS_N_INSNS (18), /* sdiv */
497 COSTS_N_INSNS (32), /* ddiv */
f0517163
RS
498};
499
500/* Instruction costs on PPC630 processors. */
501static const
502struct processor_costs ppc630_cost = {
06a67bdd
RS
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (4), /* mulsi_const */
505 COSTS_N_INSNS (3), /* mulsi_const9 */
506 COSTS_N_INSNS (7), /* muldi */
507 COSTS_N_INSNS (21), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (3), /* dmul */
511 COSTS_N_INSNS (17), /* sdiv */
512 COSTS_N_INSNS (21), /* ddiv */
8b897cfa
RS
513};
514
515/* Instruction costs on PPC750 and PPC7400 processors. */
516static const
517struct processor_costs ppc750_cost = {
06a67bdd
RS
518 COSTS_N_INSNS (5), /* mulsi */
519 COSTS_N_INSNS (3), /* mulsi_const */
520 COSTS_N_INSNS (2), /* mulsi_const9 */
521 COSTS_N_INSNS (5), /* muldi */
522 COSTS_N_INSNS (17), /* divsi */
523 COSTS_N_INSNS (17), /* divdi */
524 COSTS_N_INSNS (3), /* fp */
525 COSTS_N_INSNS (3), /* dmul */
526 COSTS_N_INSNS (17), /* sdiv */
527 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
528};
529
530/* Instruction costs on PPC7450 processors. */
531static const
532struct processor_costs ppc7450_cost = {
06a67bdd
RS
533 COSTS_N_INSNS (4), /* mulsi */
534 COSTS_N_INSNS (3), /* mulsi_const */
535 COSTS_N_INSNS (3), /* mulsi_const9 */
536 COSTS_N_INSNS (4), /* muldi */
537 COSTS_N_INSNS (23), /* divsi */
538 COSTS_N_INSNS (23), /* divdi */
539 COSTS_N_INSNS (5), /* fp */
540 COSTS_N_INSNS (5), /* dmul */
541 COSTS_N_INSNS (21), /* sdiv */
542 COSTS_N_INSNS (35), /* ddiv */
8b897cfa 543};
a3170dc6 544
8b897cfa
RS
545/* Instruction costs on PPC8540 processors. */
546static const
547struct processor_costs ppc8540_cost = {
06a67bdd
RS
548 COSTS_N_INSNS (4), /* mulsi */
549 COSTS_N_INSNS (4), /* mulsi_const */
550 COSTS_N_INSNS (4), /* mulsi_const9 */
551 COSTS_N_INSNS (4), /* muldi */
552 COSTS_N_INSNS (19), /* divsi */
553 COSTS_N_INSNS (19), /* divdi */
554 COSTS_N_INSNS (4), /* fp */
555 COSTS_N_INSNS (4), /* dmul */
556 COSTS_N_INSNS (29), /* sdiv */
557 COSTS_N_INSNS (29), /* ddiv */
8b897cfa
RS
558};
559
560/* Instruction costs on POWER4 and POWER5 processors. */
561static const
562struct processor_costs power4_cost = {
06a67bdd
RS
563 COSTS_N_INSNS (3), /* mulsi */
564 COSTS_N_INSNS (2), /* mulsi_const */
565 COSTS_N_INSNS (2), /* mulsi_const9 */
566 COSTS_N_INSNS (4), /* muldi */
567 COSTS_N_INSNS (18), /* divsi */
568 COSTS_N_INSNS (34), /* divdi */
569 COSTS_N_INSNS (3), /* fp */
570 COSTS_N_INSNS (3), /* dmul */
571 COSTS_N_INSNS (17), /* sdiv */
572 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
573};
574
575\f
a2369ed3 576static bool rs6000_function_ok_for_sibcall (tree, tree);
2ffa9a0c 577static const char *rs6000_invalid_within_doloop (rtx);
a2369ed3
DJ
578static rtx rs6000_generate_compare (enum rtx_code);
579static void rs6000_maybe_dead (rtx);
580static void rs6000_emit_stack_tie (void);
581static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
582static rtx spe_synthesize_frame_save (rtx);
583static bool spe_func_has_64bit_regs_p (void);
b20a9cca 584static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 585 int, HOST_WIDE_INT);
a2369ed3
DJ
586static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
587static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
588static unsigned rs6000_hash_constant (rtx);
589static unsigned toc_hash_function (const void *);
590static int toc_hash_eq (const void *, const void *);
591static int constant_pool_expr_1 (rtx, int *, int *);
592static bool constant_pool_expr_p (rtx);
d04b6e6e 593static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3 594static bool legitimate_indexed_address_p (rtx, int);
a2369ed3
DJ
595static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
596static struct machine_function * rs6000_init_machine_status (void);
597static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 598static bool no_global_regs_above (int);
5add3202 599#ifdef HAVE_GAS_HIDDEN
a2369ed3 600static void rs6000_assemble_visibility (tree, int);
5add3202 601#endif
a2369ed3
DJ
602static int rs6000_ra_ever_killed (void);
603static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 604static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
77ccdfed
EC
605static bool rs6000_ms_bitfield_layout_p (tree);
606static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 607static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
f18eca82 608static const char *rs6000_mangle_fundamental_type (tree);
b86fe7b4 609extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3
DJ
610static void rs6000_set_default_type_attributes (tree);
611static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
612static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
613static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
614 tree);
a2369ed3 615static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
c6e8c921 616static bool rs6000_return_in_memory (tree, tree);
a2369ed3 617static void rs6000_file_start (void);
7c262518 618#if TARGET_ELF
a2369ed3
DJ
619static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
620static void rs6000_elf_asm_out_constructor (rtx, int);
621static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 622static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b
RS
623static void rs6000_elf_asm_init_sections (void);
624static section *rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
a2369ed3 625static void rs6000_elf_unique_section (tree, int);
d6b5193b
RS
626static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
627 unsigned HOST_WIDE_INT);
a56d7372 628static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 629 ATTRIBUTE_UNUSED;
7c262518 630#endif
aacd3885 631static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
cbaaba19 632#if TARGET_XCOFF
0d5817b2 633static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 634static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 635static void rs6000_xcoff_asm_init_sections (void);
8210e4c4 636static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 637static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 638 unsigned HOST_WIDE_INT);
d6b5193b
RS
639static void rs6000_xcoff_unique_section (tree, int);
640static section *rs6000_xcoff_select_rtx_section
641 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
642static const char * rs6000_xcoff_strip_name_encoding (const char *);
643static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
644static void rs6000_xcoff_file_start (void);
645static void rs6000_xcoff_file_end (void);
f1384257 646#endif
a2369ed3
DJ
647static int rs6000_variable_issue (FILE *, int, rtx, int);
648static bool rs6000_rtx_costs (rtx, int, int, int *);
649static int rs6000_adjust_cost (rtx, rtx, rtx, int);
cbe26ab8 650static bool is_microcoded_insn (rtx);
79ae11c4 651static int is_dispatch_slot_restricted (rtx);
cbe26ab8
DN
652static bool is_cracked_insn (rtx);
653static bool is_branch_slot_insn (rtx);
a2369ed3
DJ
654static int rs6000_adjust_priority (rtx, int);
655static int rs6000_issue_rate (void);
569fa502 656static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
cbe26ab8
DN
657static rtx get_next_active_insn (rtx, rtx);
658static bool insn_terminates_group_p (rtx , enum group_termination);
659static bool is_costly_group (rtx *, rtx);
660static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
661static int redefine_groups (FILE *, int, rtx, rtx);
662static int pad_groups (FILE *, int, rtx, rtx);
663static void rs6000_sched_finish (FILE *, int);
a2369ed3 664static int rs6000_use_sched_lookahead (void);
7ccf35ed 665static tree rs6000_builtin_mask_for_load (void);
a2369ed3 666
58646b77 667static void def_builtin (int, const char *, tree, int);
a2369ed3
DJ
668static void rs6000_init_builtins (void);
669static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
670static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
671static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
672static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
673static void altivec_init_builtins (void);
674static void rs6000_common_init_builtins (void);
c15c90bb 675static void rs6000_init_libfuncs (void);
a2369ed3 676
b20a9cca
AM
677static void enable_mask_for_builtins (struct builtin_description *, int,
678 enum rs6000_builtins,
679 enum rs6000_builtins);
7c62e993 680static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
681static void spe_init_builtins (void);
682static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 683static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
684static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
685static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
686static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
687static rs6000_stack_t *rs6000_stack_info (void);
688static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
689
690static rtx altivec_expand_builtin (tree, rtx, bool *);
691static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
692static rtx altivec_expand_st_builtin (tree, rtx, bool *);
693static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
694static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 695static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 696 const char *, tree, rtx);
b4a62fa0 697static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 698static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
699static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
700static rtx altivec_expand_vec_set_builtin (tree);
701static rtx altivec_expand_vec_ext_builtin (tree, rtx);
702static int get_element_number (tree, tree);
78f5898b 703static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 704static void rs6000_parse_tls_size_option (void);
5da702b1 705static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
706static int first_altivec_reg_to_save (void);
707static unsigned int compute_vrsave_mask (void);
9390387d 708static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
709static void is_altivec_return_reg (rtx, void *);
710static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
711int easy_vector_constant (rtx, enum machine_mode);
58646b77 712static bool rs6000_is_opaque_type (tree);
a2369ed3
DJ
713static rtx rs6000_dwarf_register_span (rtx);
714static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 715static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
716static rtx rs6000_tls_get_addr (void);
717static rtx rs6000_got_sym (void);
9390387d 718static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
719static const char *rs6000_get_some_local_dynamic_name (void);
720static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 721static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 722static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 723 enum machine_mode, tree);
0b5383eb
DJ
724static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
725 HOST_WIDE_INT);
726static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
727 tree, HOST_WIDE_INT);
728static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
729 HOST_WIDE_INT,
730 rtx[], int *);
731static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
732 tree, HOST_WIDE_INT,
733 rtx[], int *);
734static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
ec6376ab 735static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 736static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
737static void setup_incoming_varargs (CUMULATIVE_ARGS *,
738 enum machine_mode, tree,
739 int *, int);
8cd5a4e0
RH
740static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
741 tree, bool);
78a52f11
RH
742static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
743 tree, bool);
4d3e6fae 744static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
efdba735
SH
745#if TARGET_MACHO
746static void macho_branch_islands (void);
747static void add_compiler_branch_island (tree, tree, int);
748static int no_previous_def (tree function_name);
749static tree get_prev_label (tree function_name);
c4e18b1c 750static void rs6000_darwin_file_start (void);
efdba735
SH
751#endif
752
c35d187f 753static tree rs6000_build_builtin_va_list (void);
23a60a04 754static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
fe984136 755static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
00b79d54 756static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 757static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 758static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 759 enum machine_mode);
94ff898d 760static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
761 enum machine_mode);
762static int get_vsel_insn (enum machine_mode);
763static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 764static tree rs6000_stack_protect_fail (void);
21213b4c
DP
765
766const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
767static enum machine_mode rs6000_eh_return_filter_mode (void);
768
17211ab5
GK
769/* Hash table stuff for keeping track of TOC entries. */
770
771struct toc_hash_struct GTY(())
772{
773 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
774 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
775 rtx key;
776 enum machine_mode key_mode;
777 int labelno;
778};
779
780static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
781\f
782/* Default register names. */
783char rs6000_reg_names[][8] =
784{
802a0058
MM
785 "0", "1", "2", "3", "4", "5", "6", "7",
786 "8", "9", "10", "11", "12", "13", "14", "15",
787 "16", "17", "18", "19", "20", "21", "22", "23",
788 "24", "25", "26", "27", "28", "29", "30", "31",
789 "0", "1", "2", "3", "4", "5", "6", "7",
790 "8", "9", "10", "11", "12", "13", "14", "15",
791 "16", "17", "18", "19", "20", "21", "22", "23",
792 "24", "25", "26", "27", "28", "29", "30", "31",
793 "mq", "lr", "ctr","ap",
794 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
795 "xer",
796 /* AltiVec registers. */
0cd5e3a1
AH
797 "0", "1", "2", "3", "4", "5", "6", "7",
798 "8", "9", "10", "11", "12", "13", "14", "15",
799 "16", "17", "18", "19", "20", "21", "22", "23",
800 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
801 "vrsave", "vscr",
802 /* SPE registers. */
7d5175e1
JJ
803 "spe_acc", "spefscr",
804 /* Soft frame pointer. */
805 "sfp"
c81bebd7
MM
806};
807
808#ifdef TARGET_REGNAMES
8b60264b 809static const char alt_reg_names[][8] =
c81bebd7 810{
802a0058
MM
811 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
812 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
813 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
814 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
815 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
816 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
817 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
818 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
819 "mq", "lr", "ctr", "ap",
820 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 821 "xer",
59a4c851 822 /* AltiVec registers. */
0ac081f6 823 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
824 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
825 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
826 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
827 "vrsave", "vscr",
828 /* SPE registers. */
7d5175e1
JJ
829 "spe_acc", "spefscr",
830 /* Soft frame pointer. */
831 "sfp"
c81bebd7
MM
832};
833#endif
9878760c 834\f
daf11973
MM
835#ifndef MASK_STRICT_ALIGN
836#define MASK_STRICT_ALIGN 0
837#endif
ffcfcb5f
AM
838#ifndef TARGET_PROFILE_KERNEL
839#define TARGET_PROFILE_KERNEL 0
840#endif
3961e8fe
RH
841
842/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
843#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
844\f
845/* Initialize the GCC target structure. */
91d231cb
JM
846#undef TARGET_ATTRIBUTE_TABLE
847#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
848#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
849#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 850
301d03af
RS
851#undef TARGET_ASM_ALIGNED_DI_OP
852#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
853
854/* Default unaligned ops are only provided for ELF. Find the ops needed
855 for non-ELF systems. */
856#ifndef OBJECT_FORMAT_ELF
cbaaba19 857#if TARGET_XCOFF
ae6c1efd 858/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
859 64-bit targets. */
860#undef TARGET_ASM_UNALIGNED_HI_OP
861#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
862#undef TARGET_ASM_UNALIGNED_SI_OP
863#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
864#undef TARGET_ASM_UNALIGNED_DI_OP
865#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
866#else
867/* For Darwin. */
868#undef TARGET_ASM_UNALIGNED_HI_OP
869#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
870#undef TARGET_ASM_UNALIGNED_SI_OP
871#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
872#undef TARGET_ASM_UNALIGNED_DI_OP
873#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
874#undef TARGET_ASM_ALIGNED_DI_OP
875#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
876#endif
877#endif
878
879/* This hook deals with fixups for relocatable code and DI-mode objects
880 in 64-bit code. */
881#undef TARGET_ASM_INTEGER
882#define TARGET_ASM_INTEGER rs6000_assemble_integer
883
93638d7a
AM
884#ifdef HAVE_GAS_HIDDEN
885#undef TARGET_ASM_ASSEMBLE_VISIBILITY
886#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
887#endif
888
c4501e62
JJ
889#undef TARGET_HAVE_TLS
890#define TARGET_HAVE_TLS HAVE_AS_TLS
891
892#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 893#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 894
08c148a8
NB
895#undef TARGET_ASM_FUNCTION_PROLOGUE
896#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
897#undef TARGET_ASM_FUNCTION_EPILOGUE
898#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
899
b54cf83a
DE
900#undef TARGET_SCHED_VARIABLE_ISSUE
901#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
902
c237e94a
ZW
903#undef TARGET_SCHED_ISSUE_RATE
904#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
905#undef TARGET_SCHED_ADJUST_COST
906#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
907#undef TARGET_SCHED_ADJUST_PRIORITY
908#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 909#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 910#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
cbe26ab8
DN
911#undef TARGET_SCHED_FINISH
912#define TARGET_SCHED_FINISH rs6000_sched_finish
c237e94a 913
be12c2b0
VM
914#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
915#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
916
7ccf35ed
DN
917#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
918#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
919
0ac081f6
AH
920#undef TARGET_INIT_BUILTINS
921#define TARGET_INIT_BUILTINS rs6000_init_builtins
922
923#undef TARGET_EXPAND_BUILTIN
924#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
925
f18eca82
ZL
926#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
927#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
928
c15c90bb
ZW
929#undef TARGET_INIT_LIBFUNCS
930#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
931
f1384257 932#if TARGET_MACHO
0e5dbd9b 933#undef TARGET_BINDS_LOCAL_P
31920d83 934#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 935#endif
0e5dbd9b 936
77ccdfed
EC
937#undef TARGET_MS_BITFIELD_LAYOUT_P
938#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
939
3961e8fe
RH
940#undef TARGET_ASM_OUTPUT_MI_THUNK
941#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
942
3961e8fe 943#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5b71a4e7 944#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
00b960c7 945
4977bab6
ZW
946#undef TARGET_FUNCTION_OK_FOR_SIBCALL
947#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
948
2e3f0db6
DJ
949#undef TARGET_INVALID_WITHIN_DOLOOP
950#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 951
3c50106f
RH
952#undef TARGET_RTX_COSTS
953#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
954#undef TARGET_ADDRESS_COST
955#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 956
c8e4f0e9 957#undef TARGET_VECTOR_OPAQUE_P
58646b77 958#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 959
96714395
AH
960#undef TARGET_DWARF_REGISTER_SPAN
961#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
962
c6e8c921
GK
963/* On rs6000, function arguments are promoted, as are function return
964 values. */
965#undef TARGET_PROMOTE_FUNCTION_ARGS
966#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
967#undef TARGET_PROMOTE_FUNCTION_RETURN
968#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
969
c6e8c921
GK
970#undef TARGET_RETURN_IN_MEMORY
971#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
972
973#undef TARGET_SETUP_INCOMING_VARARGS
974#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
975
976/* Always strict argument naming on rs6000. */
977#undef TARGET_STRICT_ARGUMENT_NAMING
978#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
979#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
980#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130
RH
981#undef TARGET_SPLIT_COMPLEX_ARG
982#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
fe984136
RH
983#undef TARGET_MUST_PASS_IN_STACK
984#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
985#undef TARGET_PASS_BY_REFERENCE
986#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
987#undef TARGET_ARG_PARTIAL_BYTES
988#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 989
c35d187f
RH
990#undef TARGET_BUILD_BUILTIN_VA_LIST
991#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
992
cd3ce9b4
JM
993#undef TARGET_GIMPLIFY_VA_ARG_EXPR
994#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
995
93f90be6
FJ
996#undef TARGET_EH_RETURN_FILTER_MODE
997#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
998
00b79d54
BE
999#undef TARGET_SCALAR_MODE_SUPPORTED_P
1000#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1001
f676971a
EC
1002#undef TARGET_VECTOR_MODE_SUPPORTED_P
1003#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1004
4d3e6fae
FJ
1005#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1006#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1007
78f5898b
AH
1008#undef TARGET_HANDLE_OPTION
1009#define TARGET_HANDLE_OPTION rs6000_handle_option
1010
1011#undef TARGET_DEFAULT_TARGET_FLAGS
1012#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1013 (TARGET_DEFAULT)
78f5898b 1014
3aebbe5f
JJ
1015#undef TARGET_STACK_PROTECT_FAIL
1016#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1017
445cf5eb
JM
1018/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1019 The PowerPC architecture requires only weak consistency among
1020 processors--that is, memory accesses between processors need not be
1021 sequentially consistent and memory accesses among processors can occur
1022 in any order. The ability to order memory accesses weakly provides
1023 opportunities for more efficient use of the system bus. Unless a
1024 dependency exists, the 604e allows read operations to precede store
1025 operations. */
1026#undef TARGET_RELAXED_ORDERING
1027#define TARGET_RELAXED_ORDERING true
1028
fdbe66f2
EB
1029#ifdef HAVE_AS_TLS
1030#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1031#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1032#endif
1033
aacd3885
RS
1034/* Use a 32-bit anchor range. This leads to sequences like:
1035
1036 addis tmp,anchor,high
1037 add dest,tmp,low
1038
1039 where tmp itself acts as an anchor, and can be shared between
1040 accesses to the same 64k page. */
1041#undef TARGET_MIN_ANCHOR_OFFSET
1042#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1043#undef TARGET_MAX_ANCHOR_OFFSET
1044#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1045#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1046#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1047
f6897b10 1048struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1049\f
0d1fbc8c
AH
1050
1051/* Value is 1 if hard register REGNO can hold a value of machine-mode
1052 MODE. */
1053static int
1054rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1055{
1056 /* The GPRs can hold any mode, but values bigger than one register
1057 cannot go past R31. */
1058 if (INT_REGNO_P (regno))
1059 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1060
a5a97921
BE
1061 /* The float registers can only hold floating modes and DImode.
1062 This also excludes decimal float modes. */
0d1fbc8c
AH
1063 if (FP_REGNO_P (regno))
1064 return
ebb109ad 1065 (SCALAR_FLOAT_MODE_P (mode)
a5a97921 1066 && !DECIMAL_FLOAT_MODE_P (mode)
0d1fbc8c
AH
1067 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1068 || (GET_MODE_CLASS (mode) == MODE_INT
1069 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1070
1071 /* The CR register can only hold CC modes. */
1072 if (CR_REGNO_P (regno))
1073 return GET_MODE_CLASS (mode) == MODE_CC;
1074
1075 if (XER_REGNO_P (regno))
1076 return mode == PSImode;
1077
1078 /* AltiVec only in AldyVec registers. */
1079 if (ALTIVEC_REGNO_P (regno))
1080 return ALTIVEC_VECTOR_MODE (mode);
1081
1082 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1083 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1084 return 1;
1085
1086 /* We cannot put TImode anywhere except general register and it must be
1087 able to fit within the register set. */
1088
1089 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1090}
1091
1092/* Initialize rs6000_hard_regno_mode_ok_p table. */
1093static void
1094rs6000_init_hard_regno_mode_ok (void)
1095{
1096 int r, m;
1097
1098 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1099 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1100 if (rs6000_hard_regno_mode_ok (r, m))
1101 rs6000_hard_regno_mode_ok_p[m][r] = true;
1102}
1103
c1e55850
GK
1104/* If not otherwise specified by a target, make 'long double' equivalent to
1105 'double'. */
1106
1107#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1108#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1109#endif
1110
5248c961
RK
1111/* Override command line options. Mostly we process the processor
1112 type and sometimes adjust other TARGET_ options. */
1113
1114void
d779d0dc 1115rs6000_override_options (const char *default_cpu)
5248c961 1116{
c4d38ccb 1117 size_t i, j;
8e3f41e7 1118 struct rs6000_cpu_select *ptr;
66188a7e 1119 int set_masks;
5248c961 1120
66188a7e 1121 /* Simplifications for entries below. */
85638c0d 1122
66188a7e
GK
1123 enum {
1124 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1125 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1126 };
85638c0d 1127
66188a7e
GK
1128 /* This table occasionally claims that a processor does not support
1129 a particular feature even though it does, but the feature is slower
1130 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1131 complete description of the processor's support.
66188a7e
GK
1132
1133 Please keep this list in order, and don't forget to update the
1134 documentation in invoke.texi when adding a new processor or
1135 flag. */
5248c961
RK
1136 static struct ptt
1137 {
8b60264b
KG
1138 const char *const name; /* Canonical processor name. */
1139 const enum processor_type processor; /* Processor type enum value. */
1140 const int target_enable; /* Target flags to enable. */
8b60264b 1141 } const processor_target_table[]
66188a7e 1142 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1143 {"403", PROCESSOR_PPC403,
66188a7e 1144 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1145 {"405", PROCESSOR_PPC405,
716019c0
JM
1146 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1147 {"405fp", PROCESSOR_PPC405,
1148 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1149 {"440", PROCESSOR_PPC440,
716019c0
JM
1150 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1151 {"440fp", PROCESSOR_PPC440,
1152 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1153 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1154 {"601", PROCESSOR_PPC601,
66188a7e
GK
1155 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1156 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1157 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1158 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1159 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1160 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1161 {"620", PROCESSOR_PPC620,
1162 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1163 {"630", PROCESSOR_PPC630,
1164 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1165 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1166 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1167 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1168 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1169 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1170 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1171 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1172 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
4d4cbc0e
AH
1173 /* 8548 has a dummy entry for now. */
1174 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1175 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1176 {"970", PROCESSOR_POWER4,
66188a7e
GK
1177 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1178 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1179 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1180 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1181 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1182 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1183 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1184 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1185 {"power2", PROCESSOR_POWER,
1186 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1187 {"power3", PROCESSOR_PPC630,
1188 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1189 {"power4", PROCESSOR_POWER4,
fc091c8e 1190 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1191 {"power5", PROCESSOR_POWER5,
432218ba
DE
1192 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1193 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1194 {"power5+", PROCESSOR_POWER5,
1195 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1196 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
e118597e
PS
1197 {"power6", PROCESSOR_POWER5,
1198 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
1199 | MASK_FPRND},
66188a7e
GK
1200 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1201 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1202 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1203 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1204 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1205 {"rios2", PROCESSOR_RIOS2,
1206 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1207 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1208 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1209 {"rs64", PROCESSOR_RS64A,
1210 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1211 };
5248c961 1212
ca7558fc 1213 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1214
66188a7e
GK
1215 /* Some OSs don't support saving the high part of 64-bit registers on
1216 context switch. Other OSs don't support saving Altivec registers.
1217 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1218 settings; if the user wants either, the user must explicitly specify
1219 them and we won't interfere with the user's specification. */
1220
1221 enum {
1222 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
f676971a 1223 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
66188a7e 1224 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0
JM
1225 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
1226 | MASK_DLMZB)
66188a7e 1227 };
0d1fbc8c
AH
1228
1229 rs6000_init_hard_regno_mode_ok ();
1230
c4ad648e 1231 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1232#ifdef OS_MISSING_POWERPC64
1233 if (OS_MISSING_POWERPC64)
1234 set_masks &= ~MASK_POWERPC64;
1235#endif
1236#ifdef OS_MISSING_ALTIVEC
1237 if (OS_MISSING_ALTIVEC)
1238 set_masks &= ~MASK_ALTIVEC;
1239#endif
1240
768875a8
AM
1241 /* Don't override by the processor default if given explicitly. */
1242 set_masks &= ~target_flags_explicit;
957211c3 1243
a4f6c312 1244 /* Identify the processor type. */
8e3f41e7 1245 rs6000_select[0].string = default_cpu;
3cb999d8 1246 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1247
b6a1cbae 1248 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1249 {
8e3f41e7
MM
1250 ptr = &rs6000_select[i];
1251 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1252 {
8e3f41e7
MM
1253 for (j = 0; j < ptt_size; j++)
1254 if (! strcmp (ptr->string, processor_target_table[j].name))
1255 {
1256 if (ptr->set_tune_p)
1257 rs6000_cpu = processor_target_table[j].processor;
1258
1259 if (ptr->set_arch_p)
1260 {
66188a7e
GK
1261 target_flags &= ~set_masks;
1262 target_flags |= (processor_target_table[j].target_enable
1263 & set_masks);
8e3f41e7
MM
1264 }
1265 break;
1266 }
1267
4406229e 1268 if (j == ptt_size)
8e3f41e7 1269 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1270 }
1271 }
8a61d227 1272
993f19a8 1273 if (TARGET_E500)
a3170dc6
AH
1274 rs6000_isel = 1;
1275
dff9f1b6
DE
1276 /* If we are optimizing big endian systems for space, use the load/store
1277 multiple and string instructions. */
ef792183 1278 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1279 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1280
a4f6c312
SS
1281 /* Don't allow -mmultiple or -mstring on little endian systems
1282 unless the cpu is a 750, because the hardware doesn't support the
1283 instructions used in little endian mode, and causes an alignment
1284 trap. The 750 does not cause an alignment trap (except when the
1285 target is unaligned). */
bef84347 1286
b21fb038 1287 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1288 {
1289 if (TARGET_MULTIPLE)
1290 {
1291 target_flags &= ~MASK_MULTIPLE;
b21fb038 1292 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1293 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1294 }
1295
1296 if (TARGET_STRING)
1297 {
1298 target_flags &= ~MASK_STRING;
b21fb038 1299 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1300 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1301 }
1302 }
3933e0e1 1303
38c1f2d7
MM
1304 /* Set debug flags */
1305 if (rs6000_debug_name)
1306 {
bfc79d3b 1307 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1308 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1309 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1310 rs6000_debug_stack = 1;
bfc79d3b 1311 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1312 rs6000_debug_arg = 1;
1313 else
c725bd79 1314 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1315 }
1316
57ac7be9
AM
1317 if (rs6000_traceback_name)
1318 {
1319 if (! strncmp (rs6000_traceback_name, "full", 4))
1320 rs6000_traceback = traceback_full;
1321 else if (! strncmp (rs6000_traceback_name, "part", 4))
1322 rs6000_traceback = traceback_part;
1323 else if (! strncmp (rs6000_traceback_name, "no", 2))
1324 rs6000_traceback = traceback_none;
1325 else
9e637a26 1326 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1327 rs6000_traceback_name);
1328 }
1329
78f5898b
AH
1330 if (!rs6000_explicit_options.long_double)
1331 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1332
602ea4d3 1333#ifndef POWERPC_LINUX
d3603e8c 1334 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1335 rs6000_ieeequad = 1;
1336#endif
1337
6d0ef01e
HP
1338 /* Set Altivec ABI as default for powerpc64 linux. */
1339 if (TARGET_ELF && TARGET_64BIT)
1340 {
1341 rs6000_altivec_abi = 1;
78f5898b 1342 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1343 }
1344
594a51fe
SS
1345 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1346 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1347 {
1348 rs6000_darwin64_abi = 1;
9c7956fd 1349#if TARGET_MACHO
6ac49599 1350 darwin_one_byte_bool = 1;
9c7956fd 1351#endif
d9168963
SS
1352 /* Default to natural alignment, for better performance. */
1353 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1354 }
1355
194c524a
DE
1356 /* Place FP constants in the constant pool instead of TOC
1357 if section anchors enabled. */
1358 if (flag_section_anchors)
1359 TARGET_NO_FP_IN_TOC = 1;
1360
c4501e62
JJ
1361 /* Handle -mtls-size option. */
1362 rs6000_parse_tls_size_option ();
1363
a7ae18e2
AH
1364#ifdef SUBTARGET_OVERRIDE_OPTIONS
1365 SUBTARGET_OVERRIDE_OPTIONS;
1366#endif
1367#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1368 SUBSUBTARGET_OVERRIDE_OPTIONS;
1369#endif
4d4cbc0e
AH
1370#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1371 SUB3TARGET_OVERRIDE_OPTIONS;
1372#endif
a7ae18e2 1373
5da702b1
AH
1374 if (TARGET_E500)
1375 {
e4463bf1
AH
1376 if (TARGET_ALTIVEC)
1377 error ("AltiVec and E500 instructions cannot coexist");
1378
5da702b1
AH
1379 /* The e500 does not have string instructions, and we set
1380 MASK_STRING above when optimizing for size. */
1381 if ((target_flags & MASK_STRING) != 0)
1382 target_flags = target_flags & ~MASK_STRING;
1383 }
1384 else if (rs6000_select[1].string != NULL)
1385 {
1386 /* For the powerpc-eabispe configuration, we set all these by
1387 default, so let's unset them if we manually set another
1388 CPU that is not the E500. */
78f5898b 1389 if (!rs6000_explicit_options.abi)
5da702b1 1390 rs6000_spe_abi = 0;
78f5898b 1391 if (!rs6000_explicit_options.spe)
5da702b1 1392 rs6000_spe = 0;
78f5898b 1393 if (!rs6000_explicit_options.float_gprs)
5da702b1 1394 rs6000_float_gprs = 0;
78f5898b 1395 if (!rs6000_explicit_options.isel)
5da702b1 1396 rs6000_isel = 0;
78f5898b 1397 if (!rs6000_explicit_options.long_double)
c1e55850 1398 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
5da702b1 1399 }
b5044283 1400
ec507f2d
DE
1401 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1402 && rs6000_cpu != PROCESSOR_POWER5);
1403 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1404 || rs6000_cpu == PROCESSOR_POWER5);
1405
ec507f2d
DE
1406 rs6000_sched_restricted_insns_priority
1407 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1408
569fa502 1409 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1410 rs6000_sched_costly_dep
1411 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1412
569fa502
DN
1413 if (rs6000_sched_costly_dep_str)
1414 {
f676971a 1415 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1416 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1417 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1418 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1419 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1420 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1421 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1422 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1423 else
c4ad648e 1424 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1425 }
1426
1427 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1428 rs6000_sched_insert_nops
1429 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1430
cbe26ab8
DN
1431 if (rs6000_sched_insert_nops_str)
1432 {
1433 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1434 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1435 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1436 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1437 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1438 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1439 else
c4ad648e 1440 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1441 }
1442
c81bebd7 1443#ifdef TARGET_REGNAMES
a4f6c312
SS
1444 /* If the user desires alternate register names, copy in the
1445 alternate names now. */
c81bebd7 1446 if (TARGET_REGNAMES)
4e135bdd 1447 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1448#endif
1449
df01da37 1450 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1451 If -maix-struct-return or -msvr4-struct-return was explicitly
1452 used, don't override with the ABI default. */
df01da37
DE
1453 if (!rs6000_explicit_options.aix_struct_ret)
1454 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1455
602ea4d3 1456 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1457 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1458
f676971a 1459 if (TARGET_TOC)
9ebbca7d 1460 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1461
301d03af
RS
1462 /* We can only guarantee the availability of DI pseudo-ops when
1463 assembling for 64-bit targets. */
ae6c1efd 1464 if (!TARGET_64BIT)
301d03af
RS
1465 {
1466 targetm.asm_out.aligned_op.di = NULL;
1467 targetm.asm_out.unaligned_op.di = NULL;
1468 }
1469
1494c534
DE
1470 /* Set branch target alignment, if not optimizing for size. */
1471 if (!optimize_size)
1472 {
1473 if (rs6000_sched_groups)
1474 {
1475 if (align_functions <= 0)
1476 align_functions = 16;
1477 if (align_jumps <= 0)
1478 align_jumps = 16;
1479 if (align_loops <= 0)
1480 align_loops = 16;
1481 }
1482 if (align_jumps_max_skip <= 0)
1483 align_jumps_max_skip = 15;
1484 if (align_loops_max_skip <= 0)
1485 align_loops_max_skip = 15;
1486 }
2792d578 1487
71f123ca
FS
1488 /* Arrange to save and restore machine status around nested functions. */
1489 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1490
1491 /* We should always be splitting complex arguments, but we can't break
1492 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1493 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1494 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1495
1496 /* Initialize rs6000_cost with the appropriate target costs. */
1497 if (optimize_size)
1498 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1499 else
1500 switch (rs6000_cpu)
1501 {
1502 case PROCESSOR_RIOS1:
1503 rs6000_cost = &rios1_cost;
1504 break;
1505
1506 case PROCESSOR_RIOS2:
1507 rs6000_cost = &rios2_cost;
1508 break;
1509
1510 case PROCESSOR_RS64A:
1511 rs6000_cost = &rs64a_cost;
1512 break;
1513
1514 case PROCESSOR_MPCCORE:
1515 rs6000_cost = &mpccore_cost;
1516 break;
1517
1518 case PROCESSOR_PPC403:
1519 rs6000_cost = &ppc403_cost;
1520 break;
1521
1522 case PROCESSOR_PPC405:
1523 rs6000_cost = &ppc405_cost;
1524 break;
1525
1526 case PROCESSOR_PPC440:
1527 rs6000_cost = &ppc440_cost;
1528 break;
1529
1530 case PROCESSOR_PPC601:
1531 rs6000_cost = &ppc601_cost;
1532 break;
1533
1534 case PROCESSOR_PPC603:
1535 rs6000_cost = &ppc603_cost;
1536 break;
1537
1538 case PROCESSOR_PPC604:
1539 rs6000_cost = &ppc604_cost;
1540 break;
1541
1542 case PROCESSOR_PPC604e:
1543 rs6000_cost = &ppc604e_cost;
1544 break;
1545
1546 case PROCESSOR_PPC620:
8b897cfa
RS
1547 rs6000_cost = &ppc620_cost;
1548 break;
1549
f0517163
RS
1550 case PROCESSOR_PPC630:
1551 rs6000_cost = &ppc630_cost;
1552 break;
1553
8b897cfa
RS
1554 case PROCESSOR_PPC750:
1555 case PROCESSOR_PPC7400:
1556 rs6000_cost = &ppc750_cost;
1557 break;
1558
1559 case PROCESSOR_PPC7450:
1560 rs6000_cost = &ppc7450_cost;
1561 break;
1562
1563 case PROCESSOR_PPC8540:
1564 rs6000_cost = &ppc8540_cost;
1565 break;
1566
1567 case PROCESSOR_POWER4:
1568 case PROCESSOR_POWER5:
1569 rs6000_cost = &power4_cost;
1570 break;
1571
1572 default:
37409796 1573 gcc_unreachable ();
8b897cfa 1574 }
5248c961 1575}
5accd822 1576
7ccf35ed
DN
1577/* Implement targetm.vectorize.builtin_mask_for_load. */
1578static tree
1579rs6000_builtin_mask_for_load (void)
1580{
1581 if (TARGET_ALTIVEC)
1582 return altivec_builtin_mask_for_load;
1583 else
1584 return 0;
1585}
1586
5da702b1
AH
1587/* Handle generic options of the form -mfoo=yes/no.
1588 NAME is the option name.
1589 VALUE is the option value.
1590 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1591 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1592static void
5da702b1 1593rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1594{
5da702b1 1595 if (value == 0)
993f19a8 1596 return;
5da702b1
AH
1597 else if (!strcmp (value, "yes"))
1598 *flag = 1;
1599 else if (!strcmp (value, "no"))
1600 *flag = 0;
08b57fb3 1601 else
5da702b1 1602 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1603}
1604
c4501e62
JJ
1605/* Validate and record the size specified with the -mtls-size option. */
1606
1607static void
863d938c 1608rs6000_parse_tls_size_option (void)
c4501e62
JJ
1609{
1610 if (rs6000_tls_size_string == 0)
1611 return;
1612 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1613 rs6000_tls_size = 16;
1614 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1615 rs6000_tls_size = 32;
1616 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1617 rs6000_tls_size = 64;
1618 else
9e637a26 1619 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1620}
1621
5accd822 1622void
a2369ed3 1623optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1624{
2e3f0db6
DJ
1625 if (DEFAULT_ABI == ABI_DARWIN)
1626 /* The Darwin libraries never set errno, so we might as well
1627 avoid calling them when that's the only reason we would. */
1628 flag_errno_math = 0;
59d6560b
DE
1629
1630 /* Double growth factor to counter reduced min jump length. */
1631 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
1632
1633 /* Enable section anchors by default.
1634 Skip section anchors for Objective C and Objective C++
1635 until front-ends fixed. */
23f99493 1636 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 1637 flag_section_anchors = 1;
5accd822 1638}
78f5898b
AH
1639
1640/* Implement TARGET_HANDLE_OPTION. */
1641
1642static bool
1643rs6000_handle_option (size_t code, const char *arg, int value)
1644{
1645 switch (code)
1646 {
1647 case OPT_mno_power:
1648 target_flags &= ~(MASK_POWER | MASK_POWER2
1649 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
1650 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1651 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
1652 break;
1653 case OPT_mno_powerpc:
1654 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1655 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
1656 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1657 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
1658 break;
1659 case OPT_mfull_toc:
d2894ab5
DE
1660 target_flags &= ~MASK_MINIMAL_TOC;
1661 TARGET_NO_FP_IN_TOC = 0;
1662 TARGET_NO_SUM_IN_TOC = 0;
1663 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1664#ifdef TARGET_USES_SYSV4_OPT
1665 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1666 just the same as -mminimal-toc. */
1667 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1668 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1669#endif
1670 break;
1671
1672#ifdef TARGET_USES_SYSV4_OPT
1673 case OPT_mtoc:
1674 /* Make -mtoc behave like -mminimal-toc. */
1675 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1676 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1677 break;
1678#endif
1679
1680#ifdef TARGET_USES_AIX64_OPT
1681 case OPT_maix64:
1682#else
1683 case OPT_m64:
1684#endif
2c9c9afd
AM
1685 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1686 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1687 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
1688 break;
1689
1690#ifdef TARGET_USES_AIX64_OPT
1691 case OPT_maix32:
1692#else
1693 case OPT_m32:
1694#endif
1695 target_flags &= ~MASK_POWERPC64;
c2dba4ab 1696 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
1697 break;
1698
1699 case OPT_minsert_sched_nops_:
1700 rs6000_sched_insert_nops_str = arg;
1701 break;
1702
1703 case OPT_mminimal_toc:
1704 if (value == 1)
1705 {
d2894ab5
DE
1706 TARGET_NO_FP_IN_TOC = 0;
1707 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
1708 }
1709 break;
1710
1711 case OPT_mpower:
1712 if (value == 1)
c2dba4ab
AH
1713 {
1714 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1715 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1716 }
78f5898b
AH
1717 break;
1718
1719 case OPT_mpower2:
1720 if (value == 1)
c2dba4ab
AH
1721 {
1722 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1723 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1724 }
78f5898b
AH
1725 break;
1726
1727 case OPT_mpowerpc_gpopt:
1728 case OPT_mpowerpc_gfxopt:
1729 if (value == 1)
c2dba4ab
AH
1730 {
1731 target_flags |= MASK_POWERPC;
1732 target_flags_explicit |= MASK_POWERPC;
1733 }
78f5898b
AH
1734 break;
1735
df01da37
DE
1736 case OPT_maix_struct_return:
1737 case OPT_msvr4_struct_return:
1738 rs6000_explicit_options.aix_struct_ret = true;
1739 break;
1740
78f5898b
AH
1741 case OPT_mvrsave_:
1742 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1743 break;
78f5898b
AH
1744
1745 case OPT_misel_:
1746 rs6000_explicit_options.isel = true;
1747 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1748 break;
1749
1750 case OPT_mspe_:
1751 rs6000_explicit_options.spe = true;
1752 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1753 /* No SPE means 64-bit long doubles, even if an E500. */
1754 if (!rs6000_spe)
1755 rs6000_long_double_type_size = 64;
1756 break;
1757
1758 case OPT_mdebug_:
1759 rs6000_debug_name = arg;
1760 break;
1761
1762#ifdef TARGET_USES_SYSV4_OPT
1763 case OPT_mcall_:
1764 rs6000_abi_name = arg;
1765 break;
1766
1767 case OPT_msdata_:
1768 rs6000_sdata_name = arg;
1769 break;
1770
1771 case OPT_mtls_size_:
1772 rs6000_tls_size_string = arg;
1773 break;
1774
1775 case OPT_mrelocatable:
1776 if (value == 1)
c2dba4ab 1777 {
e0bf274f
AM
1778 target_flags |= MASK_MINIMAL_TOC;
1779 target_flags_explicit |= MASK_MINIMAL_TOC;
1780 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 1781 }
78f5898b
AH
1782 break;
1783
1784 case OPT_mrelocatable_lib:
1785 if (value == 1)
c2dba4ab 1786 {
e0bf274f
AM
1787 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1788 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1789 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 1790 }
78f5898b 1791 else
c2dba4ab
AH
1792 {
1793 target_flags &= ~MASK_RELOCATABLE;
1794 target_flags_explicit |= MASK_RELOCATABLE;
1795 }
78f5898b
AH
1796 break;
1797#endif
1798
1799 case OPT_mabi_:
78f5898b
AH
1800 if (!strcmp (arg, "altivec"))
1801 {
d3603e8c 1802 rs6000_explicit_options.abi = true;
78f5898b
AH
1803 rs6000_altivec_abi = 1;
1804 rs6000_spe_abi = 0;
1805 }
1806 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
1807 {
1808 /* ??? Don't set rs6000_explicit_options.abi here, to allow
1809 the default for rs6000_spe_abi to be chosen later. */
1810 rs6000_altivec_abi = 0;
1811 }
78f5898b
AH
1812 else if (! strcmp (arg, "spe"))
1813 {
d3603e8c 1814 rs6000_explicit_options.abi = true;
78f5898b
AH
1815 rs6000_spe_abi = 1;
1816 rs6000_altivec_abi = 0;
1817 if (!TARGET_SPE_ABI)
1818 error ("not configured for ABI: '%s'", arg);
1819 }
1820 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
1821 {
1822 rs6000_explicit_options.abi = true;
1823 rs6000_spe_abi = 0;
1824 }
78f5898b
AH
1825
1826 /* These are here for testing during development only, do not
1827 document in the manual please. */
1828 else if (! strcmp (arg, "d64"))
1829 {
1830 rs6000_darwin64_abi = 1;
1831 warning (0, "Using darwin64 ABI");
1832 }
1833 else if (! strcmp (arg, "d32"))
1834 {
1835 rs6000_darwin64_abi = 0;
1836 warning (0, "Using old darwin ABI");
1837 }
1838
602ea4d3
JJ
1839 else if (! strcmp (arg, "ibmlongdouble"))
1840 {
d3603e8c 1841 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
1842 rs6000_ieeequad = 0;
1843 warning (0, "Using IBM extended precision long double");
1844 }
1845 else if (! strcmp (arg, "ieeelongdouble"))
1846 {
d3603e8c 1847 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
1848 rs6000_ieeequad = 1;
1849 warning (0, "Using IEEE extended precision long double");
1850 }
1851
78f5898b
AH
1852 else
1853 {
1854 error ("unknown ABI specified: '%s'", arg);
1855 return false;
1856 }
1857 break;
1858
1859 case OPT_mcpu_:
1860 rs6000_select[1].string = arg;
1861 break;
1862
1863 case OPT_mtune_:
1864 rs6000_select[2].string = arg;
1865 break;
1866
1867 case OPT_mtraceback_:
1868 rs6000_traceback_name = arg;
1869 break;
1870
1871 case OPT_mfloat_gprs_:
1872 rs6000_explicit_options.float_gprs = true;
1873 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1874 rs6000_float_gprs = 1;
1875 else if (! strcmp (arg, "double"))
1876 rs6000_float_gprs = 2;
1877 else if (! strcmp (arg, "no"))
1878 rs6000_float_gprs = 0;
1879 else
1880 {
1881 error ("invalid option for -mfloat-gprs: '%s'", arg);
1882 return false;
1883 }
1884 break;
1885
1886 case OPT_mlong_double_:
1887 rs6000_explicit_options.long_double = true;
1888 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1889 if (value != 64 && value != 128)
1890 {
1891 error ("Unknown switch -mlong-double-%s", arg);
1892 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1893 return false;
1894 }
1895 else
1896 rs6000_long_double_type_size = value;
1897 break;
1898
1899 case OPT_msched_costly_dep_:
1900 rs6000_sched_costly_dep_str = arg;
1901 break;
1902
1903 case OPT_malign_:
1904 rs6000_explicit_options.alignment = true;
1905 if (! strcmp (arg, "power"))
1906 {
1907 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1908 some C library functions, so warn about it. The flag may be
1909 useful for performance studies from time to time though, so
1910 don't disable it entirely. */
1911 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1912 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1913 " it is incompatible with the installed C and C++ libraries");
1914 rs6000_alignment_flags = MASK_ALIGN_POWER;
1915 }
1916 else if (! strcmp (arg, "natural"))
1917 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1918 else
1919 {
1920 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1921 return false;
1922 }
1923 break;
1924 }
1925 return true;
1926}
3cfa4909
MM
1927\f
1928/* Do anything needed at the start of the asm file. */
1929
1bc7c5b6 1930static void
863d938c 1931rs6000_file_start (void)
3cfa4909 1932{
c4d38ccb 1933 size_t i;
3cfa4909 1934 char buffer[80];
d330fd93 1935 const char *start = buffer;
3cfa4909 1936 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
1937 const char *default_cpu = TARGET_CPU_DEFAULT;
1938 FILE *file = asm_out_file;
1939
1940 default_file_start ();
1941
1942#ifdef TARGET_BI_ARCH
1943 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1944 default_cpu = 0;
1945#endif
3cfa4909
MM
1946
1947 if (flag_verbose_asm)
1948 {
1949 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1950 rs6000_select[0].string = default_cpu;
1951
b6a1cbae 1952 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
1953 {
1954 ptr = &rs6000_select[i];
1955 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1956 {
1957 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1958 start = "";
1959 }
1960 }
1961
9c6b4ed9 1962 if (PPC405_ERRATUM77)
b0bfee6e 1963 {
9c6b4ed9 1964 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
1965 start = "";
1966 }
b0bfee6e 1967
b91da81f 1968#ifdef USING_ELFOS_H
3cfa4909
MM
1969 switch (rs6000_sdata)
1970 {
1971 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1972 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1973 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1974 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1975 }
1976
1977 if (rs6000_sdata && g_switch_value)
1978 {
307b599c
MK
1979 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1980 g_switch_value);
3cfa4909
MM
1981 start = "";
1982 }
1983#endif
1984
1985 if (*start == '\0')
949ea356 1986 putc ('\n', file);
3cfa4909 1987 }
b723e82f
JJ
1988
1989 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1990 {
d6b5193b
RS
1991 switch_to_section (toc_section);
1992 switch_to_section (text_section);
b723e82f 1993 }
3cfa4909 1994}
c4e18b1c 1995
5248c961 1996\f
a0ab749a 1997/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
1998
1999int
863d938c 2000direct_return (void)
9878760c 2001{
4697a36c
MM
2002 if (reload_completed)
2003 {
2004 rs6000_stack_t *info = rs6000_stack_info ();
2005
2006 if (info->first_gp_reg_save == 32
2007 && info->first_fp_reg_save == 64
00b960c7 2008 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2009 && ! info->lr_save_p
2010 && ! info->cr_save_p
00b960c7 2011 && info->vrsave_mask == 0
c81fc13e 2012 && ! info->push_p)
4697a36c
MM
2013 return 1;
2014 }
2015
2016 return 0;
9878760c
RK
2017}
2018
4e74d8ec
MM
2019/* Return the number of instructions it takes to form a constant in an
2020 integer register. */
2021
48d72335 2022int
a2369ed3 2023num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2024{
2025 /* signed constant loadable with {cal|addi} */
547b216d 2026 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2027 return 1;
2028
4e74d8ec 2029 /* constant loadable with {cau|addis} */
547b216d
DE
2030 else if ((value & 0xffff) == 0
2031 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2032 return 1;
2033
5f59ecb7 2034#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2035 else if (TARGET_POWERPC64)
4e74d8ec 2036 {
a65c591c
DE
2037 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2038 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2039
a65c591c 2040 if (high == 0 || high == -1)
4e74d8ec
MM
2041 return 2;
2042
a65c591c 2043 high >>= 1;
4e74d8ec 2044
a65c591c 2045 if (low == 0)
4e74d8ec 2046 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2047 else
2048 return (num_insns_constant_wide (high)
e396202a 2049 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2050 }
2051#endif
2052
2053 else
2054 return 2;
2055}
2056
2057int
a2369ed3 2058num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2059{
37409796 2060 HOST_WIDE_INT low, high;
bb8df8a6 2061
37409796 2062 switch (GET_CODE (op))
0d30d435 2063 {
37409796 2064 case CONST_INT:
0d30d435 2065#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2066 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2067 && mask64_operand (op, mode))
c4ad648e 2068 return 2;
0d30d435
DE
2069 else
2070#endif
2071 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2072
37409796
NS
2073 case CONST_DOUBLE:
2074 if (mode == SFmode)
2075 {
2076 long l;
2077 REAL_VALUE_TYPE rv;
bb8df8a6 2078
37409796
NS
2079 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2080 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2081 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2082 }
a260abc9 2083
37409796
NS
2084 if (mode == VOIDmode || mode == DImode)
2085 {
2086 high = CONST_DOUBLE_HIGH (op);
2087 low = CONST_DOUBLE_LOW (op);
2088 }
2089 else
2090 {
2091 long l[2];
2092 REAL_VALUE_TYPE rv;
bb8df8a6 2093
37409796
NS
2094 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2095 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2096 high = l[WORDS_BIG_ENDIAN == 0];
2097 low = l[WORDS_BIG_ENDIAN != 0];
2098 }
47ad8c61 2099
37409796
NS
2100 if (TARGET_32BIT)
2101 return (num_insns_constant_wide (low)
2102 + num_insns_constant_wide (high));
2103 else
2104 {
2105 if ((high == 0 && low >= 0)
2106 || (high == -1 && low < 0))
2107 return num_insns_constant_wide (low);
bb8df8a6 2108
1990cd79 2109 else if (mask64_operand (op, mode))
37409796 2110 return 2;
bb8df8a6 2111
37409796
NS
2112 else if (low == 0)
2113 return num_insns_constant_wide (high) + 1;
bb8df8a6 2114
37409796
NS
2115 else
2116 return (num_insns_constant_wide (high)
2117 + num_insns_constant_wide (low) + 1);
2118 }
bb8df8a6 2119
37409796
NS
2120 default:
2121 gcc_unreachable ();
4e74d8ec 2122 }
4e74d8ec
MM
2123}
2124
0972012c
RS
2125/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2126 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2127 corresponding element of the vector, but for V4SFmode and V2SFmode,
2128 the corresponding "float" is interpreted as an SImode integer. */
2129
2130static HOST_WIDE_INT
2131const_vector_elt_as_int (rtx op, unsigned int elt)
2132{
2133 rtx tmp = CONST_VECTOR_ELT (op, elt);
2134 if (GET_MODE (op) == V4SFmode
2135 || GET_MODE (op) == V2SFmode)
2136 tmp = gen_lowpart (SImode, tmp);
2137 return INTVAL (tmp);
2138}
452a7d36 2139
77ccdfed 2140/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2141 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2142 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2143 all items are set to the same value and contain COPIES replicas of the
2144 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2145 operand and the others are set to the value of the operand's msb. */
2146
2147static bool
2148vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2149{
66180ff3
PB
2150 enum machine_mode mode = GET_MODE (op);
2151 enum machine_mode inner = GET_MODE_INNER (mode);
2152
2153 unsigned i;
2154 unsigned nunits = GET_MODE_NUNITS (mode);
2155 unsigned bitsize = GET_MODE_BITSIZE (inner);
2156 unsigned mask = GET_MODE_MASK (inner);
2157
0972012c 2158 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2159 HOST_WIDE_INT splat_val = val;
2160 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2161
2162 /* Construct the value to be splatted, if possible. If not, return 0. */
2163 for (i = 2; i <= copies; i *= 2)
452a7d36 2164 {
66180ff3
PB
2165 HOST_WIDE_INT small_val;
2166 bitsize /= 2;
2167 small_val = splat_val >> bitsize;
2168 mask >>= bitsize;
2169 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2170 return false;
2171 splat_val = small_val;
2172 }
c4ad648e 2173
66180ff3
PB
2174 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2175 if (EASY_VECTOR_15 (splat_val))
2176 ;
2177
2178 /* Also check if we can splat, and then add the result to itself. Do so if
2179 the value is positive, of if the splat instruction is using OP's mode;
2180 for splat_val < 0, the splat and the add should use the same mode. */
2181 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2182 && (splat_val >= 0 || (step == 1 && copies == 1)))
2183 ;
2184
2185 else
2186 return false;
2187
2188 /* Check if VAL is present in every STEP-th element, and the
2189 other elements are filled with its most significant bit. */
2190 for (i = 0; i < nunits - 1; ++i)
2191 {
2192 HOST_WIDE_INT desired_val;
2193 if (((i + 1) & (step - 1)) == 0)
2194 desired_val = val;
2195 else
2196 desired_val = msb_val;
2197
0972012c 2198 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2199 return false;
452a7d36 2200 }
66180ff3
PB
2201
2202 return true;
452a7d36
HP
2203}
2204
69ef87e2 2205
77ccdfed 2206/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2207 with a vspltisb, vspltish or vspltisw. */
2208
2209bool
2210easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2211{
66180ff3 2212 unsigned step, copies;
d744e06e 2213
66180ff3
PB
2214 if (mode == VOIDmode)
2215 mode = GET_MODE (op);
2216 else if (mode != GET_MODE (op))
2217 return false;
d744e06e 2218
66180ff3
PB
2219 /* Start with a vspltisw. */
2220 step = GET_MODE_NUNITS (mode) / 4;
2221 copies = 1;
2222
2223 if (vspltis_constant (op, step, copies))
2224 return true;
2225
2226 /* Then try with a vspltish. */
2227 if (step == 1)
2228 copies <<= 1;
2229 else
2230 step >>= 1;
2231
2232 if (vspltis_constant (op, step, copies))
2233 return true;
2234
2235 /* And finally a vspltisb. */
2236 if (step == 1)
2237 copies <<= 1;
2238 else
2239 step >>= 1;
2240
2241 if (vspltis_constant (op, step, copies))
2242 return true;
2243
2244 return false;
d744e06e
AH
2245}
2246
66180ff3
PB
2247/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2248 result is OP. Abort if it is not possible. */
d744e06e 2249
f676971a 2250rtx
66180ff3 2251gen_easy_altivec_constant (rtx op)
452a7d36 2252{
66180ff3
PB
2253 enum machine_mode mode = GET_MODE (op);
2254 int nunits = GET_MODE_NUNITS (mode);
2255 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2256 unsigned step = nunits / 4;
2257 unsigned copies = 1;
2258
2259 /* Start with a vspltisw. */
2260 if (vspltis_constant (op, step, copies))
2261 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2262
2263 /* Then try with a vspltish. */
2264 if (step == 1)
2265 copies <<= 1;
2266 else
2267 step >>= 1;
2268
2269 if (vspltis_constant (op, step, copies))
2270 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2271
2272 /* And finally a vspltisb. */
2273 if (step == 1)
2274 copies <<= 1;
2275 else
2276 step >>= 1;
2277
2278 if (vspltis_constant (op, step, copies))
2279 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2280
2281 gcc_unreachable ();
d744e06e
AH
2282}
2283
2284const char *
a2369ed3 2285output_vec_const_move (rtx *operands)
d744e06e
AH
2286{
2287 int cst, cst2;
2288 enum machine_mode mode;
2289 rtx dest, vec;
2290
2291 dest = operands[0];
2292 vec = operands[1];
d744e06e 2293 mode = GET_MODE (dest);
69ef87e2 2294
d744e06e
AH
2295 if (TARGET_ALTIVEC)
2296 {
66180ff3 2297 rtx splat_vec;
d744e06e
AH
2298 if (zero_constant (vec, mode))
2299 return "vxor %0,%0,%0";
37409796 2300
66180ff3
PB
2301 splat_vec = gen_easy_altivec_constant (vec);
2302 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2303 operands[1] = XEXP (splat_vec, 0);
2304 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2305 return "#";
bb8df8a6 2306
66180ff3 2307 switch (GET_MODE (splat_vec))
98ef3137 2308 {
37409796 2309 case V4SImode:
66180ff3 2310 return "vspltisw %0,%1";
c4ad648e 2311
37409796 2312 case V8HImode:
66180ff3 2313 return "vspltish %0,%1";
c4ad648e 2314
37409796 2315 case V16QImode:
66180ff3 2316 return "vspltisb %0,%1";
bb8df8a6 2317
37409796
NS
2318 default:
2319 gcc_unreachable ();
98ef3137 2320 }
69ef87e2
AH
2321 }
2322
37409796 2323 gcc_assert (TARGET_SPE);
bb8df8a6 2324
37409796
NS
2325 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2326 pattern of V1DI, V4HI, and V2SF.
2327
2328 FIXME: We should probably return # and add post reload
2329 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2330 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2331 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2332 operands[1] = CONST_VECTOR_ELT (vec, 0);
2333 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2334 if (cst == cst2)
2335 return "li %0,%1\n\tevmergelo %0,%0,%0";
2336 else
2337 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2338}
2339
7a4eca66
DE
2340/* Initialize vector TARGET to VALS. */
2341
2342void
2343rs6000_expand_vector_init (rtx target, rtx vals)
2344{
2345 enum machine_mode mode = GET_MODE (target);
2346 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2347 int n_elts = GET_MODE_NUNITS (mode);
2348 int n_var = 0, one_var = -1;
2349 bool all_same = true, all_const_zero = true;
2350 rtx x, mem;
2351 int i;
2352
2353 for (i = 0; i < n_elts; ++i)
2354 {
2355 x = XVECEXP (vals, 0, i);
2356 if (!CONSTANT_P (x))
2357 ++n_var, one_var = i;
2358 else if (x != CONST0_RTX (inner_mode))
2359 all_const_zero = false;
2360
2361 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2362 all_same = false;
2363 }
2364
2365 if (n_var == 0)
2366 {
2367 if (mode != V4SFmode && all_const_zero)
2368 {
2369 /* Zero register. */
2370 emit_insn (gen_rtx_SET (VOIDmode, target,
2371 gen_rtx_XOR (mode, target, target)));
2372 return;
2373 }
66180ff3 2374 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2375 {
2376 /* Splat immediate. */
66180ff3 2377 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2378 return;
2379 }
2380 else if (all_same)
2381 ; /* Splat vector element. */
2382 else
2383 {
2384 /* Load from constant pool. */
2385 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2386 return;
2387 }
2388 }
2389
2390 /* Store value to stack temp. Load vector element. Splat. */
2391 if (all_same)
2392 {
2393 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2394 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2395 XVECEXP (vals, 0, 0));
2396 x = gen_rtx_UNSPEC (VOIDmode,
2397 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2398 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2399 gen_rtvec (2,
2400 gen_rtx_SET (VOIDmode,
2401 target, mem),
2402 x)));
2403 x = gen_rtx_VEC_SELECT (inner_mode, target,
2404 gen_rtx_PARALLEL (VOIDmode,
2405 gen_rtvec (1, const0_rtx)));
2406 emit_insn (gen_rtx_SET (VOIDmode, target,
2407 gen_rtx_VEC_DUPLICATE (mode, x)));
2408 return;
2409 }
2410
2411 /* One field is non-constant. Load constant then overwrite
2412 varying field. */
2413 if (n_var == 1)
2414 {
2415 rtx copy = copy_rtx (vals);
2416
57b51d4d 2417 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2418 varying element. */
2419 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2420 rs6000_expand_vector_init (target, copy);
2421
2422 /* Insert variable. */
2423 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2424 return;
2425 }
2426
2427 /* Construct the vector in memory one field at a time
2428 and load the whole vector. */
2429 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2430 for (i = 0; i < n_elts; i++)
2431 emit_move_insn (adjust_address_nv (mem, inner_mode,
2432 i * GET_MODE_SIZE (inner_mode)),
2433 XVECEXP (vals, 0, i));
2434 emit_move_insn (target, mem);
2435}
2436
2437/* Set field ELT of TARGET to VAL. */
2438
2439void
2440rs6000_expand_vector_set (rtx target, rtx val, int elt)
2441{
2442 enum machine_mode mode = GET_MODE (target);
2443 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2444 rtx reg = gen_reg_rtx (mode);
2445 rtx mask, mem, x;
2446 int width = GET_MODE_SIZE (inner_mode);
2447 int i;
2448
2449 /* Load single variable value. */
2450 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2451 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2452 x = gen_rtx_UNSPEC (VOIDmode,
2453 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2454 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2455 gen_rtvec (2,
2456 gen_rtx_SET (VOIDmode,
2457 reg, mem),
2458 x)));
2459
2460 /* Linear sequence. */
2461 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2462 for (i = 0; i < 16; ++i)
2463 XVECEXP (mask, 0, i) = GEN_INT (i);
2464
2465 /* Set permute mask to insert element into target. */
2466 for (i = 0; i < width; ++i)
2467 XVECEXP (mask, 0, elt*width + i)
2468 = GEN_INT (i + 0x10);
2469 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2470 x = gen_rtx_UNSPEC (mode,
2471 gen_rtvec (3, target, reg,
2472 force_reg (V16QImode, x)),
2473 UNSPEC_VPERM);
2474 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2475}
2476
2477/* Extract field ELT from VEC into TARGET. */
2478
2479void
2480rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2481{
2482 enum machine_mode mode = GET_MODE (vec);
2483 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2484 rtx mem, x;
2485
2486 /* Allocate mode-sized buffer. */
2487 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2488
2489 /* Add offset to field within buffer matching vector element. */
2490 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2491
2492 /* Store single field into mode-sized buffer. */
2493 x = gen_rtx_UNSPEC (VOIDmode,
2494 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2495 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2496 gen_rtvec (2,
2497 gen_rtx_SET (VOIDmode,
2498 mem, vec),
2499 x)));
2500 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2501}
2502
0ba1b2ff
AM
2503/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2504 implement ANDing by the mask IN. */
2505void
a2369ed3 2506build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2507{
2508#if HOST_BITS_PER_WIDE_INT >= 64
2509 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2510 int shift;
2511
37409796 2512 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2513
2514 c = INTVAL (in);
2515 if (c & 1)
2516 {
2517 /* Assume c initially something like 0x00fff000000fffff. The idea
2518 is to rotate the word so that the middle ^^^^^^ group of zeros
2519 is at the MS end and can be cleared with an rldicl mask. We then
2520 rotate back and clear off the MS ^^ group of zeros with a
2521 second rldicl. */
2522 c = ~c; /* c == 0xff000ffffff00000 */
2523 lsb = c & -c; /* lsb == 0x0000000000100000 */
2524 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2525 c = ~c; /* c == 0x00fff000000fffff */
2526 c &= -lsb; /* c == 0x00fff00000000000 */
2527 lsb = c & -c; /* lsb == 0x0000100000000000 */
2528 c = ~c; /* c == 0xff000fffffffffff */
2529 c &= -lsb; /* c == 0xff00000000000000 */
2530 shift = 0;
2531 while ((lsb >>= 1) != 0)
2532 shift++; /* shift == 44 on exit from loop */
2533 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2534 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2535 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2536 }
2537 else
0ba1b2ff
AM
2538 {
2539 /* Assume c initially something like 0xff000f0000000000. The idea
2540 is to rotate the word so that the ^^^ middle group of zeros
2541 is at the LS end and can be cleared with an rldicr mask. We then
2542 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2543 a second rldicr. */
2544 lsb = c & -c; /* lsb == 0x0000010000000000 */
2545 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2546 c = ~c; /* c == 0x00fff0ffffffffff */
2547 c &= -lsb; /* c == 0x00fff00000000000 */
2548 lsb = c & -c; /* lsb == 0x0000100000000000 */
2549 c = ~c; /* c == 0xff000fffffffffff */
2550 c &= -lsb; /* c == 0xff00000000000000 */
2551 shift = 0;
2552 while ((lsb >>= 1) != 0)
2553 shift++; /* shift == 44 on exit from loop */
2554 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2555 m1 >>= shift; /* m1 == 0x0000000000000fff */
2556 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2557 }
2558
2559 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2560 masks will be all 1's. We are guaranteed more than one transition. */
2561 out[0] = GEN_INT (64 - shift);
2562 out[1] = GEN_INT (m1);
2563 out[2] = GEN_INT (shift);
2564 out[3] = GEN_INT (m2);
2565#else
045572c7
GK
2566 (void)in;
2567 (void)out;
37409796 2568 gcc_unreachable ();
0ba1b2ff 2569#endif
a260abc9
DE
2570}
2571
54b695e7 2572/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
2573
2574bool
54b695e7
AH
2575invalid_e500_subreg (rtx op, enum machine_mode mode)
2576{
2577 /* Reject (subreg:SI (reg:DF)). */
2578 if (GET_CODE (op) == SUBREG
2579 && mode == SImode
2580 && REG_P (SUBREG_REG (op))
2581 && GET_MODE (SUBREG_REG (op)) == DFmode)
2582 return true;
2583
2584 /* Reject (subreg:DF (reg:DI)). */
2585 if (GET_CODE (op) == SUBREG
2586 && mode == DFmode
2587 && REG_P (SUBREG_REG (op))
2588 && GET_MODE (SUBREG_REG (op)) == DImode)
2589 return true;
2590
2591 return false;
2592}
2593
58182de3 2594/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
2595 field is an FP double while the FP fields remain word aligned. */
2596
19d66194 2597unsigned int
fa5b0972
AM
2598rs6000_special_round_type_align (tree type, unsigned int computed,
2599 unsigned int specified)
95727fb8 2600{
fa5b0972 2601 unsigned int align = MAX (computed, specified);
95727fb8 2602 tree field = TYPE_FIELDS (type);
95727fb8 2603
bb8df8a6 2604 /* Skip all non field decls */
85962ac8 2605 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
2606 field = TREE_CHAIN (field);
2607
fa5b0972
AM
2608 if (field != NULL && field != type)
2609 {
2610 type = TREE_TYPE (field);
2611 while (TREE_CODE (type) == ARRAY_TYPE)
2612 type = TREE_TYPE (type);
2613
2614 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2615 align = MAX (align, 64);
2616 }
95727fb8 2617
fa5b0972 2618 return align;
95727fb8
AP
2619}
2620
58182de3
GK
2621/* Darwin increases record alignment to the natural alignment of
2622 the first field. */
2623
2624unsigned int
2625darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
2626 unsigned int specified)
2627{
2628 unsigned int align = MAX (computed, specified);
2629
2630 if (TYPE_PACKED (type))
2631 return align;
2632
2633 /* Find the first field, looking down into aggregates. */
2634 do {
2635 tree field = TYPE_FIELDS (type);
2636 /* Skip all non field decls */
2637 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2638 field = TREE_CHAIN (field);
2639 if (! field)
2640 break;
2641 type = TREE_TYPE (field);
2642 while (TREE_CODE (type) == ARRAY_TYPE)
2643 type = TREE_TYPE (type);
2644 } while (AGGREGATE_TYPE_P (type));
2645
2646 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
2647 align = MAX (align, TYPE_ALIGN (type));
2648
2649 return align;
2650}
2651
a4f6c312 2652/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
2653
2654int
f676971a 2655small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 2656 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 2657{
38c1f2d7 2658#if TARGET_ELF
5f59ecb7 2659 rtx sym_ref;
7509c759 2660
d9407988 2661 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 2662 return 0;
a54d04b7 2663
f607bc57 2664 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
2665 return 0;
2666
88228c4b
MM
2667 if (GET_CODE (op) == SYMBOL_REF)
2668 sym_ref = op;
2669
2670 else if (GET_CODE (op) != CONST
2671 || GET_CODE (XEXP (op, 0)) != PLUS
2672 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2673 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
2674 return 0;
2675
88228c4b 2676 else
dbf55e53
MM
2677 {
2678 rtx sum = XEXP (op, 0);
2679 HOST_WIDE_INT summand;
2680
2681 /* We have to be careful here, because it is the referenced address
c4ad648e 2682 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 2683 summand = INTVAL (XEXP (sum, 1));
307b599c 2684 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 2685 return 0;
dbf55e53
MM
2686
2687 sym_ref = XEXP (sum, 0);
2688 }
88228c4b 2689
20bfcd69 2690 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
2691#else
2692 return 0;
2693#endif
7509c759 2694}
46c07df8 2695
3a1f863f 2696/* Return true if either operand is a general purpose register. */
46c07df8 2697
3a1f863f
DE
2698bool
2699gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 2700{
3a1f863f
DE
2701 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2702 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
2703}
2704
9ebbca7d 2705\f
4d588c14
RH
2706/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2707
f676971a
EC
2708static int
2709constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 2710{
9390387d 2711 switch (GET_CODE (op))
9ebbca7d
GK
2712 {
2713 case SYMBOL_REF:
c4501e62
JJ
2714 if (RS6000_SYMBOL_REF_TLS_P (op))
2715 return 0;
2716 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
2717 {
2718 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2719 {
2720 *have_sym = 1;
2721 return 1;
2722 }
2723 else
2724 return 0;
2725 }
2726 else if (! strcmp (XSTR (op, 0), toc_label_name))
2727 {
2728 *have_toc = 1;
2729 return 1;
2730 }
2731 else
2732 return 0;
9ebbca7d
GK
2733 case PLUS:
2734 case MINUS:
c1f11548
DE
2735 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2736 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 2737 case CONST:
a4f6c312 2738 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 2739 case CONST_INT:
a4f6c312 2740 return 1;
9ebbca7d 2741 default:
a4f6c312 2742 return 0;
9ebbca7d
GK
2743 }
2744}
2745
4d588c14 2746static bool
a2369ed3 2747constant_pool_expr_p (rtx op)
9ebbca7d
GK
2748{
2749 int have_sym = 0;
2750 int have_toc = 0;
2751 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2752}
2753
48d72335 2754bool
a2369ed3 2755toc_relative_expr_p (rtx op)
9ebbca7d 2756{
4d588c14
RH
2757 int have_sym = 0;
2758 int have_toc = 0;
2759 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2760}
2761
4d588c14 2762bool
a2369ed3 2763legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
2764{
2765 return (TARGET_TOC
2766 && GET_CODE (x) == PLUS
2767 && GET_CODE (XEXP (x, 0)) == REG
2768 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2769 && constant_pool_expr_p (XEXP (x, 1)));
2770}
2771
d04b6e6e
EB
2772static bool
2773legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
2774{
2775 return (DEFAULT_ABI == ABI_V4
2776 && !flag_pic && !TARGET_TOC
2777 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2778 && small_data_operand (x, mode));
2779}
2780
60cdabab
DE
2781/* SPE offset addressing is limited to 5-bits worth of double words. */
2782#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2783
76d2b81d
DJ
2784bool
2785rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
2786{
2787 unsigned HOST_WIDE_INT offset, extra;
2788
2789 if (GET_CODE (x) != PLUS)
2790 return false;
2791 if (GET_CODE (XEXP (x, 0)) != REG)
2792 return false;
2793 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2794 return false;
60cdabab
DE
2795 if (legitimate_constant_pool_address_p (x))
2796 return true;
4d588c14
RH
2797 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2798 return false;
2799
2800 offset = INTVAL (XEXP (x, 1));
2801 extra = 0;
2802 switch (mode)
2803 {
2804 case V16QImode:
2805 case V8HImode:
2806 case V4SFmode:
2807 case V4SImode:
7a4eca66
DE
2808 /* AltiVec vector modes. Only reg+reg addressing is valid and
2809 constant offset zero should not occur due to canonicalization.
2810 Allow any offset when not strict before reload. */
2811 return !strict;
4d588c14
RH
2812
2813 case V4HImode:
2814 case V2SImode:
2815 case V1DImode:
2816 case V2SFmode:
2817 /* SPE vector modes. */
2818 return SPE_CONST_OFFSET_OK (offset);
2819
2820 case DFmode:
4d4cbc0e
AH
2821 if (TARGET_E500_DOUBLE)
2822 return SPE_CONST_OFFSET_OK (offset);
2823
4d588c14 2824 case DImode:
54b695e7
AH
2825 /* On e500v2, we may have:
2826
2827 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2828
2829 Which gets addressed with evldd instructions. */
2830 if (TARGET_E500_DOUBLE)
2831 return SPE_CONST_OFFSET_OK (offset);
2832
3364872d 2833 if (mode == DFmode || !TARGET_POWERPC64)
4d588c14
RH
2834 extra = 4;
2835 else if (offset & 3)
2836 return false;
2837 break;
2838
2839 case TFmode:
2840 case TImode:
3364872d 2841 if (mode == TFmode || !TARGET_POWERPC64)
4d588c14
RH
2842 extra = 12;
2843 else if (offset & 3)
2844 return false;
2845 else
2846 extra = 8;
2847 break;
2848
2849 default:
2850 break;
2851 }
2852
b1917422
AM
2853 offset += 0x8000;
2854 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
2855}
2856
2857static bool
a2369ed3 2858legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
2859{
2860 rtx op0, op1;
2861
2862 if (GET_CODE (x) != PLUS)
2863 return false;
850e8d3d 2864
4d588c14
RH
2865 op0 = XEXP (x, 0);
2866 op1 = XEXP (x, 1);
2867
bf00cc0f 2868 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
2869 replaced with proper base and index regs. */
2870 if (!strict
2871 && reload_in_progress
2872 && (REG_P (op0) || GET_CODE (op0) == PLUS)
2873 && REG_P (op1))
2874 return true;
2875
2876 return (REG_P (op0) && REG_P (op1)
2877 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
2878 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2879 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2880 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
2881}
2882
48d72335 2883inline bool
a2369ed3 2884legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
2885{
2886 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2887}
2888
48d72335 2889bool
4c81e946
FJ
2890macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2891{
c4ad648e 2892 if (!TARGET_MACHO || !flag_pic
9390387d 2893 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
2894 return false;
2895 x = XEXP (x, 0);
4c81e946
FJ
2896
2897 if (GET_CODE (x) != LO_SUM)
2898 return false;
2899 if (GET_CODE (XEXP (x, 0)) != REG)
2900 return false;
2901 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2902 return false;
2903 x = XEXP (x, 1);
2904
2905 return CONSTANT_P (x);
2906}
2907
4d588c14 2908static bool
a2369ed3 2909legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
2910{
2911 if (GET_CODE (x) != LO_SUM)
2912 return false;
2913 if (GET_CODE (XEXP (x, 0)) != REG)
2914 return false;
2915 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2916 return false;
54b695e7
AH
2917 /* Restrict addressing for DI because of our SUBREG hackery. */
2918 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
f82f556d 2919 return false;
4d588c14
RH
2920 x = XEXP (x, 1);
2921
8622e235 2922 if (TARGET_ELF || TARGET_MACHO)
4d588c14 2923 {
a29077da 2924 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
2925 return false;
2926 if (TARGET_TOC)
2927 return false;
2928 if (GET_MODE_NUNITS (mode) != 1)
2929 return false;
5e5f01b9 2930 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
2931 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2932 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
2933 return false;
2934
2935 return CONSTANT_P (x);
2936 }
2937
2938 return false;
2939}
2940
2941
9ebbca7d
GK
2942/* Try machine-dependent ways of modifying an illegitimate address
2943 to be legitimate. If we find one, return the new, valid address.
2944 This is used from only one place: `memory_address' in explow.c.
2945
a4f6c312
SS
2946 OLDX is the address as it was before break_out_memory_refs was
2947 called. In some cases it is useful to look at this to decide what
2948 needs to be done.
9ebbca7d 2949
a4f6c312 2950 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 2951
a4f6c312
SS
2952 It is always safe for this function to do nothing. It exists to
2953 recognize opportunities to optimize the output.
9ebbca7d
GK
2954
2955 On RS/6000, first check for the sum of a register with a constant
2956 integer that is out of range. If so, generate code to add the
2957 constant with the low-order 16 bits masked to the register and force
2958 this result into another register (this can be done with `cau').
2959 Then generate an address of REG+(CONST&0xffff), allowing for the
2960 possibility of bit 16 being a one.
2961
2962 Then check for the sum of a register and something not constant, try to
2963 load the other things into a register and return the sum. */
4d588c14 2964
9ebbca7d 2965rtx
a2369ed3
DJ
2966rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2967 enum machine_mode mode)
0ac081f6 2968{
c4501e62
JJ
2969 if (GET_CODE (x) == SYMBOL_REF)
2970 {
2971 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2972 if (model != 0)
2973 return rs6000_legitimize_tls_address (x, model);
2974 }
2975
f676971a 2976 if (GET_CODE (x) == PLUS
9ebbca7d
GK
2977 && GET_CODE (XEXP (x, 0)) == REG
2978 && GET_CODE (XEXP (x, 1)) == CONST_INT
2979 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
f676971a 2980 {
9ebbca7d
GK
2981 HOST_WIDE_INT high_int, low_int;
2982 rtx sum;
a65c591c
DE
2983 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2984 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
2985 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2986 GEN_INT (high_int)), 0);
2987 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2988 }
f676971a 2989 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
2990 && GET_CODE (XEXP (x, 0)) == REG
2991 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 2992 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
2993 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2994 || TARGET_POWERPC64
54b695e7
AH
2995 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2996 && mode != TFmode))
9ebbca7d
GK
2997 && (TARGET_POWERPC64 || mode != DImode)
2998 && mode != TImode)
2999 {
3000 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3001 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3002 }
0ac081f6
AH
3003 else if (ALTIVEC_VECTOR_MODE (mode))
3004 {
3005 rtx reg;
3006
3007 /* Make sure both operands are registers. */
3008 if (GET_CODE (x) == PLUS)
9f85ed45 3009 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3010 force_reg (Pmode, XEXP (x, 1)));
3011
3012 reg = force_reg (Pmode, x);
3013 return reg;
3014 }
4d4cbc0e 3015 else if (SPE_VECTOR_MODE (mode)
54b695e7
AH
3016 || (TARGET_E500_DOUBLE && (mode == DFmode
3017 || mode == DImode)))
a3170dc6 3018 {
54b695e7
AH
3019 if (mode == DImode)
3020 return NULL_RTX;
a3170dc6
AH
3021 /* We accept [reg + reg] and [reg + OFFSET]. */
3022
3023 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3024 {
3025 rtx op1 = XEXP (x, 0);
3026 rtx op2 = XEXP (x, 1);
a3170dc6 3027
c4ad648e 3028 op1 = force_reg (Pmode, op1);
a3170dc6 3029
c4ad648e
AM
3030 if (GET_CODE (op2) != REG
3031 && (GET_CODE (op2) != CONST_INT
3032 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3033 op2 = force_reg (Pmode, op2);
a3170dc6 3034
c4ad648e
AM
3035 return gen_rtx_PLUS (Pmode, op1, op2);
3036 }
a3170dc6
AH
3037
3038 return force_reg (Pmode, x);
3039 }
f1384257
AM
3040 else if (TARGET_ELF
3041 && TARGET_32BIT
3042 && TARGET_NO_TOC
3043 && ! flag_pic
9ebbca7d 3044 && GET_CODE (x) != CONST_INT
f676971a 3045 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3046 && CONSTANT_P (x)
6ac7bf2c
GK
3047 && GET_MODE_NUNITS (mode) == 1
3048 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 3049 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
3050 {
3051 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3052 emit_insn (gen_elf_high (reg, x));
3053 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3054 }
ee890fe2
SS
3055 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3056 && ! flag_pic
ab82a49f
AP
3057#if TARGET_MACHO
3058 && ! MACHO_DYNAMIC_NO_PIC_P
3059#endif
ee890fe2 3060 && GET_CODE (x) != CONST_INT
f676971a 3061 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3062 && CONSTANT_P (x)
f82f556d 3063 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 3064 && mode != DImode
ee890fe2
SS
3065 && mode != TImode)
3066 {
3067 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3068 emit_insn (gen_macho_high (reg, x));
3069 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3070 }
f676971a 3071 else if (TARGET_TOC
4d588c14 3072 && constant_pool_expr_p (x)
a9098fd0 3073 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3074 {
3075 return create_TOC_reference (x);
3076 }
3077 else
3078 return NULL_RTX;
3079}
258bfae2 3080
fdbe66f2 3081/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3082 We need to emit DTP-relative relocations. */
3083
fdbe66f2 3084static void
c973d557
JJ
3085rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3086{
3087 switch (size)
3088 {
3089 case 4:
3090 fputs ("\t.long\t", file);
3091 break;
3092 case 8:
3093 fputs (DOUBLE_INT_ASM_OP, file);
3094 break;
3095 default:
37409796 3096 gcc_unreachable ();
c973d557
JJ
3097 }
3098 output_addr_const (file, x);
3099 fputs ("@dtprel+0x8000", file);
3100}
3101
c4501e62
JJ
3102/* Construct the SYMBOL_REF for the tls_get_addr function. */
3103
3104static GTY(()) rtx rs6000_tls_symbol;
3105static rtx
863d938c 3106rs6000_tls_get_addr (void)
c4501e62
JJ
3107{
3108 if (!rs6000_tls_symbol)
3109 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3110
3111 return rs6000_tls_symbol;
3112}
3113
3114/* Construct the SYMBOL_REF for TLS GOT references. */
3115
3116static GTY(()) rtx rs6000_got_symbol;
3117static rtx
863d938c 3118rs6000_got_sym (void)
c4501e62
JJ
3119{
3120 if (!rs6000_got_symbol)
3121 {
3122 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3123 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3124 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3125 }
c4501e62
JJ
3126
3127 return rs6000_got_symbol;
3128}
3129
3130/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3131 this (thread-local) address. */
3132
3133static rtx
a2369ed3 3134rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3135{
3136 rtx dest, insn;
3137
3138 dest = gen_reg_rtx (Pmode);
3139 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3140 {
3141 rtx tlsreg;
3142
3143 if (TARGET_64BIT)
3144 {
3145 tlsreg = gen_rtx_REG (Pmode, 13);
3146 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3147 }
3148 else
3149 {
3150 tlsreg = gen_rtx_REG (Pmode, 2);
3151 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3152 }
3153 emit_insn (insn);
3154 }
3155 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3156 {
3157 rtx tlsreg, tmp;
3158
3159 tmp = gen_reg_rtx (Pmode);
3160 if (TARGET_64BIT)
3161 {
3162 tlsreg = gen_rtx_REG (Pmode, 13);
3163 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3164 }
3165 else
3166 {
3167 tlsreg = gen_rtx_REG (Pmode, 2);
3168 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3169 }
3170 emit_insn (insn);
3171 if (TARGET_64BIT)
3172 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3173 else
3174 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3175 emit_insn (insn);
3176 }
3177 else
3178 {
3179 rtx r3, got, tga, tmp1, tmp2, eqv;
3180
4fed8f8f
AM
3181 /* We currently use relocations like @got@tlsgd for tls, which
3182 means the linker will handle allocation of tls entries, placing
3183 them in the .got section. So use a pointer to the .got section,
3184 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3185 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3186 if (TARGET_64BIT)
972f427b 3187 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3188 else
3189 {
3190 if (flag_pic == 1)
3191 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3192 else
3193 {
3194 rtx gsym = rs6000_got_sym ();
3195 got = gen_reg_rtx (Pmode);
3196 if (flag_pic == 0)
3197 rs6000_emit_move (got, gsym, Pmode);
3198 else
3199 {
ccbca5e4 3200 rtx tempLR, tmp3, mem;
c4501e62
JJ
3201 rtx first, last;
3202
c4501e62
JJ
3203 tempLR = gen_reg_rtx (Pmode);
3204 tmp1 = gen_reg_rtx (Pmode);
3205 tmp2 = gen_reg_rtx (Pmode);
3206 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3207 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3208
ccbca5e4 3209 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
c4501e62
JJ
3210 emit_move_insn (tmp1, tempLR);
3211 emit_move_insn (tmp2, mem);
3212 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3213 last = emit_move_insn (got, tmp3);
3214 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3215 REG_NOTES (last));
3216 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3217 REG_NOTES (first));
3218 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3219 REG_NOTES (last));
3220 }
3221 }
3222 }
3223
3224 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3225 {
3226 r3 = gen_rtx_REG (Pmode, 3);
3227 if (TARGET_64BIT)
3228 insn = gen_tls_gd_64 (r3, got, addr);
3229 else
3230 insn = gen_tls_gd_32 (r3, got, addr);
3231 start_sequence ();
3232 emit_insn (insn);
3233 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3234 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3235 insn = emit_call_insn (insn);
3236 CONST_OR_PURE_CALL_P (insn) = 1;
3237 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3238 insn = get_insns ();
3239 end_sequence ();
3240 emit_libcall_block (insn, dest, r3, addr);
3241 }
3242 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3243 {
3244 r3 = gen_rtx_REG (Pmode, 3);
3245 if (TARGET_64BIT)
3246 insn = gen_tls_ld_64 (r3, got);
3247 else
3248 insn = gen_tls_ld_32 (r3, got);
3249 start_sequence ();
3250 emit_insn (insn);
3251 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3252 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3253 insn = emit_call_insn (insn);
3254 CONST_OR_PURE_CALL_P (insn) = 1;
3255 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3256 insn = get_insns ();
3257 end_sequence ();
3258 tmp1 = gen_reg_rtx (Pmode);
3259 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3260 UNSPEC_TLSLD);
3261 emit_libcall_block (insn, tmp1, r3, eqv);
3262 if (rs6000_tls_size == 16)
3263 {
3264 if (TARGET_64BIT)
3265 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3266 else
3267 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3268 }
3269 else if (rs6000_tls_size == 32)
3270 {
3271 tmp2 = gen_reg_rtx (Pmode);
3272 if (TARGET_64BIT)
3273 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3274 else
3275 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3276 emit_insn (insn);
3277 if (TARGET_64BIT)
3278 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3279 else
3280 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3281 }
3282 else
3283 {
3284 tmp2 = gen_reg_rtx (Pmode);
3285 if (TARGET_64BIT)
3286 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3287 else
3288 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3289 emit_insn (insn);
3290 insn = gen_rtx_SET (Pmode, dest,
3291 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3292 }
3293 emit_insn (insn);
3294 }
3295 else
3296 {
3297 /* IE, or 64 bit offset LE. */
3298 tmp2 = gen_reg_rtx (Pmode);
3299 if (TARGET_64BIT)
3300 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3301 else
3302 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3303 emit_insn (insn);
3304 if (TARGET_64BIT)
3305 insn = gen_tls_tls_64 (dest, tmp2, addr);
3306 else
3307 insn = gen_tls_tls_32 (dest, tmp2, addr);
3308 emit_insn (insn);
3309 }
3310 }
3311
3312 return dest;
3313}
3314
c4501e62
JJ
3315/* Return 1 if X contains a thread-local symbol. */
3316
3317bool
a2369ed3 3318rs6000_tls_referenced_p (rtx x)
c4501e62 3319{
cd413cab
AP
3320 if (! TARGET_HAVE_TLS)
3321 return false;
3322
c4501e62
JJ
3323 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3324}
3325
3326/* Return 1 if *X is a thread-local symbol. This is the same as
3327 rs6000_tls_symbol_ref except for the type of the unused argument. */
3328
9390387d 3329static int
a2369ed3 3330rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3331{
3332 return RS6000_SYMBOL_REF_TLS_P (*x);
3333}
3334
24ea750e
DJ
3335/* The convention appears to be to define this wherever it is used.
3336 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3337 is now used here. */
3338#ifndef REG_MODE_OK_FOR_BASE_P
3339#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3340#endif
3341
3342/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3343 replace the input X, or the original X if no replacement is called for.
3344 The output parameter *WIN is 1 if the calling macro should goto WIN,
3345 0 if it should not.
3346
3347 For RS/6000, we wish to handle large displacements off a base
3348 register by splitting the addend across an addiu/addis and the mem insn.
3349 This cuts number of extra insns needed from 3 to 1.
3350
3351 On Darwin, we use this to generate code for floating point constants.
3352 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3353 The Darwin code is inside #if TARGET_MACHO because only then is
3354 machopic_function_base_name() defined. */
3355rtx
f676971a 3356rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3357 int opnum, int type,
3358 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3359{
f676971a 3360 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3361 if (GET_CODE (x) == PLUS
3362 && GET_CODE (XEXP (x, 0)) == PLUS
3363 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3364 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3365 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3366 {
3367 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3368 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3369 opnum, (enum reload_type)type);
24ea750e
DJ
3370 *win = 1;
3371 return x;
3372 }
3deb2758 3373
24ea750e
DJ
3374#if TARGET_MACHO
3375 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3376 && GET_CODE (x) == LO_SUM
3377 && GET_CODE (XEXP (x, 0)) == PLUS
3378 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3379 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3380 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3381 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3382 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3383 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3384 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3385 {
3386 /* Result of previous invocation of this function on Darwin
6f317ef3 3387 floating point constant. */
24ea750e 3388 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3389 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3390 opnum, (enum reload_type)type);
24ea750e
DJ
3391 *win = 1;
3392 return x;
3393 }
3394#endif
4937d02d
DE
3395
3396 /* Force ld/std non-word aligned offset into base register by wrapping
3397 in offset 0. */
3398 if (GET_CODE (x) == PLUS
3399 && GET_CODE (XEXP (x, 0)) == REG
3400 && REGNO (XEXP (x, 0)) < 32
3401 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3402 && GET_CODE (XEXP (x, 1)) == CONST_INT
3403 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3404 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3405 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3406 && TARGET_POWERPC64)
3407 {
3408 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3409 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3410 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3411 opnum, (enum reload_type) type);
3412 *win = 1;
3413 return x;
3414 }
3415
24ea750e
DJ
3416 if (GET_CODE (x) == PLUS
3417 && GET_CODE (XEXP (x, 0)) == REG
3418 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3419 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3420 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3421 && !SPE_VECTOR_MODE (mode)
54b695e7
AH
3422 && !(TARGET_E500_DOUBLE && (mode == DFmode
3423 || mode == DImode))
78c875e8 3424 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3425 {
3426 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3427 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3428 HOST_WIDE_INT high
c4ad648e 3429 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3430
3431 /* Check for 32-bit overflow. */
3432 if (high + low != val)
c4ad648e 3433 {
24ea750e
DJ
3434 *win = 0;
3435 return x;
3436 }
3437
3438 /* Reload the high part into a base reg; leave the low part
c4ad648e 3439 in the mem directly. */
24ea750e
DJ
3440
3441 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3442 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3443 GEN_INT (high)),
3444 GEN_INT (low));
24ea750e
DJ
3445
3446 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3447 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3448 opnum, (enum reload_type)type);
24ea750e
DJ
3449 *win = 1;
3450 return x;
3451 }
4937d02d 3452
24ea750e 3453 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3454 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 3455 && !SPE_VECTOR_MODE (mode)
8308679f
DE
3456#if TARGET_MACHO
3457 && DEFAULT_ABI == ABI_DARWIN
a29077da 3458 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3459#else
3460 && DEFAULT_ABI == ABI_V4
3461 && !flag_pic
3462#endif
0d8c1c97 3463 /* Don't do this for TFmode, since the result isn't offsettable.
7b5d92b2
AM
3464 The same goes for DImode without 64-bit gprs and DFmode
3465 without fprs. */
0d8c1c97 3466 && mode != TFmode
7b5d92b2
AM
3467 && (mode != DImode || TARGET_POWERPC64)
3468 && (mode != DFmode || TARGET_POWERPC64
3469 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 3470 {
8308679f 3471#if TARGET_MACHO
a29077da
GK
3472 if (flag_pic)
3473 {
3474 rtx offset = gen_rtx_CONST (Pmode,
3475 gen_rtx_MINUS (Pmode, x,
11abc112 3476 machopic_function_base_sym ()));
a29077da
GK
3477 x = gen_rtx_LO_SUM (GET_MODE (x),
3478 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3479 gen_rtx_HIGH (Pmode, offset)), offset);
3480 }
3481 else
8308679f 3482#endif
a29077da 3483 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3484 gen_rtx_HIGH (Pmode, x), x);
a29077da 3485
24ea750e 3486 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3487 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3488 opnum, (enum reload_type)type);
24ea750e
DJ
3489 *win = 1;
3490 return x;
3491 }
4937d02d 3492
dec1f3aa
DE
3493 /* Reload an offset address wrapped by an AND that represents the
3494 masking of the lower bits. Strip the outer AND and let reload
3495 convert the offset address into an indirect address. */
3496 if (TARGET_ALTIVEC
3497 && ALTIVEC_VECTOR_MODE (mode)
3498 && GET_CODE (x) == AND
3499 && GET_CODE (XEXP (x, 0)) == PLUS
3500 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3501 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3502 && GET_CODE (XEXP (x, 1)) == CONST_INT
3503 && INTVAL (XEXP (x, 1)) == -16)
3504 {
3505 x = XEXP (x, 0);
3506 *win = 1;
3507 return x;
3508 }
3509
24ea750e 3510 if (TARGET_TOC
4d588c14 3511 && constant_pool_expr_p (x)
c1f11548 3512 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 3513 {
194c524a 3514 x = create_TOC_reference (x);
24ea750e
DJ
3515 *win = 1;
3516 return x;
3517 }
3518 *win = 0;
3519 return x;
f676971a 3520}
24ea750e 3521
258bfae2
FS
3522/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3523 that is a valid memory address for an instruction.
3524 The MODE argument is the machine mode for the MEM expression
3525 that wants to use this address.
3526
3527 On the RS/6000, there are four valid address: a SYMBOL_REF that
3528 refers to a constant pool entry of an address (or the sum of it
3529 plus a constant), a short (16-bit signed) constant plus a register,
3530 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3531 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3532 we must ensure that both words are addressable or PowerPC64 with offset
3533 word aligned.
3534
3535 For modes spanning multiple registers (DFmode in 32-bit GPRs,
76d2b81d 3536 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
258bfae2
FS
3537 adjacent memory cells are accessed by adding word-sized offsets
3538 during assembly output. */
3539int
a2369ed3 3540rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 3541{
850e8d3d
DN
3542 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3543 if (TARGET_ALTIVEC
3544 && ALTIVEC_VECTOR_MODE (mode)
3545 && GET_CODE (x) == AND
3546 && GET_CODE (XEXP (x, 1)) == CONST_INT
3547 && INTVAL (XEXP (x, 1)) == -16)
3548 x = XEXP (x, 0);
3549
c4501e62
JJ
3550 if (RS6000_SYMBOL_REF_TLS_P (x))
3551 return 0;
4d588c14 3552 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
3553 return 1;
3554 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 3555 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 3556 && !SPE_VECTOR_MODE (mode)
429ec7dc 3557 && mode != TFmode
54b695e7
AH
3558 /* Restrict addressing for DI because of our SUBREG hackery. */
3559 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
258bfae2 3560 && TARGET_UPDATE
4d588c14 3561 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 3562 return 1;
d04b6e6e 3563 if (legitimate_small_data_p (mode, x))
258bfae2 3564 return 1;
4d588c14 3565 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
3566 return 1;
3567 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3568 if (! reg_ok_strict
3569 && GET_CODE (x) == PLUS
3570 && GET_CODE (XEXP (x, 0)) == REG
708d2456 3571 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 3572 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
3573 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3574 return 1;
76d2b81d 3575 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3576 return 1;
3577 if (mode != TImode
76d2b81d 3578 && mode != TFmode
a3170dc6
AH
3579 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3580 || TARGET_POWERPC64
4d4cbc0e 3581 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 3582 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 3583 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 3584 return 1;
4d588c14 3585 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3586 return 1;
3587 return 0;
3588}
4d588c14
RH
3589
3590/* Go to LABEL if ADDR (a legitimate address expression)
3591 has an effect that depends on the machine mode it is used for.
3592
3593 On the RS/6000 this is true of all integral offsets (since AltiVec
3594 modes don't allow them) or is a pre-increment or decrement.
3595
3596 ??? Except that due to conceptual problems in offsettable_address_p
3597 we can't really report the problems of integral offsets. So leave
f676971a 3598 this assuming that the adjustable offset must be valid for the
4d588c14
RH
3599 sub-words of a TFmode operand, which is what we had before. */
3600
3601bool
a2369ed3 3602rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
3603{
3604 switch (GET_CODE (addr))
3605 {
3606 case PLUS:
3607 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3608 {
3609 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3610 return val + 12 + 0x8000 >= 0x10000;
3611 }
3612 break;
3613
3614 case LO_SUM:
3615 return true;
3616
3617 case PRE_INC:
3618 case PRE_DEC:
3619 return TARGET_UPDATE;
3620
3621 default:
3622 break;
3623 }
3624
3625 return false;
3626}
d8ecbcdb 3627
d04b6e6e
EB
3628/* More elaborate version of recog's offsettable_memref_p predicate
3629 that works around the ??? note of rs6000_mode_dependent_address.
3630 In particular it accepts
3631
3632 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3633
3634 in 32-bit mode, that the recog predicate rejects. */
3635
3636bool
3637rs6000_offsettable_memref_p (rtx op)
3638{
3639 if (!MEM_P (op))
3640 return false;
3641
3642 /* First mimic offsettable_memref_p. */
3643 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
3644 return true;
3645
3646 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3647 the latter predicate knows nothing about the mode of the memory
3648 reference and, therefore, assumes that it is the largest supported
3649 mode (TFmode). As a consequence, legitimate offsettable memory
3650 references are rejected. rs6000_legitimate_offset_address_p contains
3651 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
3652 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
3653}
3654
d8ecbcdb
AH
3655/* Return number of consecutive hard regs needed starting at reg REGNO
3656 to hold something of mode MODE.
3657 This is ordinarily the length in words of a value of mode MODE
3658 but can be less for certain modes in special long registers.
3659
3660 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3661 scalar instructions. The upper 32 bits are only available to the
3662 SIMD instructions.
3663
3664 POWER and PowerPC GPRs hold 32 bits worth;
3665 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3666
3667int
3668rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3669{
3670 if (FP_REGNO_P (regno))
3671 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3672
4d4cbc0e
AH
3673 if (TARGET_E500_DOUBLE && mode == DFmode)
3674 return 1;
3675
d8ecbcdb
AH
3676 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3677 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3678
3679 if (ALTIVEC_REGNO_P (regno))
3680 return
3681 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3682
3683 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3684}
2aa4498c
AH
3685
3686/* Change register usage conditional on target flags. */
3687void
3688rs6000_conditional_register_usage (void)
3689{
3690 int i;
3691
3692 /* Set MQ register fixed (already call_used) if not POWER
3693 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3694 be allocated. */
3695 if (! TARGET_POWER)
3696 fixed_regs[64] = 1;
3697
7c9ac5c0 3698 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
3699 if (TARGET_64BIT)
3700 fixed_regs[13] = call_used_regs[13]
3701 = call_really_used_regs[13] = 1;
3702
3703 /* Conditionally disable FPRs. */
3704 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3705 for (i = 32; i < 64; i++)
3706 fixed_regs[i] = call_used_regs[i]
c4ad648e 3707 = call_really_used_regs[i] = 1;
2aa4498c 3708
7c9ac5c0
PH
3709 /* The TOC register is not killed across calls in a way that is
3710 visible to the compiler. */
3711 if (DEFAULT_ABI == ABI_AIX)
3712 call_really_used_regs[2] = 0;
3713
2aa4498c
AH
3714 if (DEFAULT_ABI == ABI_V4
3715 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3716 && flag_pic == 2)
3717 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3718
3719 if (DEFAULT_ABI == ABI_V4
3720 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3721 && flag_pic == 1)
3722 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3723 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3724 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3725
3726 if (DEFAULT_ABI == ABI_DARWIN
3727 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 3728 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
3729 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3730 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3731
b4db40bf
JJ
3732 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3733 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3734 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3735
2aa4498c
AH
3736 if (TARGET_ALTIVEC)
3737 global_regs[VSCR_REGNO] = 1;
3738
3739 if (TARGET_SPE)
3740 {
3741 global_regs[SPEFSCR_REGNO] = 1;
3742 fixed_regs[FIXED_SCRATCH]
c4ad648e 3743 = call_used_regs[FIXED_SCRATCH]
2aa4498c
AH
3744 = call_really_used_regs[FIXED_SCRATCH] = 1;
3745 }
3746
3747 if (! TARGET_ALTIVEC)
3748 {
3749 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3750 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3751 call_really_used_regs[VRSAVE_REGNO] = 1;
3752 }
3753
3754 if (TARGET_ALTIVEC_ABI)
3755 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3756 call_used_regs[i] = call_really_used_regs[i] = 1;
3757}
fb4d4348 3758\f
a4f6c312
SS
3759/* Try to output insns to set TARGET equal to the constant C if it can
3760 be done in less than N insns. Do all computations in MODE.
3761 Returns the place where the output has been placed if it can be
3762 done and the insns have been emitted. If it would take more than N
3763 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
3764
3765rtx
f676971a 3766rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 3767 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 3768{
af8cb5c5 3769 rtx result, insn, set;
2bfcf297
DB
3770 HOST_WIDE_INT c0, c1;
3771
37409796 3772 switch (mode)
2bfcf297 3773 {
37409796
NS
3774 case QImode:
3775 case HImode:
2bfcf297 3776 if (dest == NULL)
c4ad648e 3777 dest = gen_reg_rtx (mode);
2bfcf297
DB
3778 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3779 return dest;
bb8df8a6 3780
37409796 3781 case SImode:
af8cb5c5 3782 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
bb8df8a6 3783
af8cb5c5
DE
3784 emit_insn (gen_rtx_SET (VOIDmode, result,
3785 GEN_INT (INTVAL (source)
3786 & (~ (HOST_WIDE_INT) 0xffff))));
3787 emit_insn (gen_rtx_SET (VOIDmode, dest,
3788 gen_rtx_IOR (SImode, result,
3789 GEN_INT (INTVAL (source) & 0xffff))));
3790 result = dest;
37409796
NS
3791 break;
3792
3793 case DImode:
3794 switch (GET_CODE (source))
af8cb5c5 3795 {
37409796 3796 case CONST_INT:
af8cb5c5
DE
3797 c0 = INTVAL (source);
3798 c1 = -(c0 < 0);
37409796 3799 break;
bb8df8a6 3800
37409796 3801 case CONST_DOUBLE:
2bfcf297 3802#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
3803 c0 = CONST_DOUBLE_LOW (source);
3804 c1 = -(c0 < 0);
2bfcf297 3805#else
af8cb5c5
DE
3806 c0 = CONST_DOUBLE_LOW (source);
3807 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 3808#endif
37409796
NS
3809 break;
3810
3811 default:
3812 gcc_unreachable ();
af8cb5c5 3813 }
af8cb5c5
DE
3814
3815 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
3816 break;
3817
3818 default:
3819 gcc_unreachable ();
2bfcf297 3820 }
2bfcf297 3821
af8cb5c5
DE
3822 insn = get_last_insn ();
3823 set = single_set (insn);
3824 if (! CONSTANT_P (SET_SRC (set)))
3825 set_unique_reg_note (insn, REG_EQUAL, source);
3826
3827 return result;
2bfcf297
DB
3828}
3829
3830/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3831 fall back to a straight forward decomposition. We do this to avoid
3832 exponential run times encountered when looking for longer sequences
3833 with rs6000_emit_set_const. */
3834static rtx
a2369ed3 3835rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
3836{
3837 if (!TARGET_POWERPC64)
3838 {
3839 rtx operand1, operand2;
3840
3841 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3842 DImode);
3843 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3844 DImode);
3845 emit_move_insn (operand1, GEN_INT (c1));
3846 emit_move_insn (operand2, GEN_INT (c2));
3847 }
3848 else
3849 {
bc06712d 3850 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 3851
bc06712d 3852 ud1 = c1 & 0xffff;
f921c9c9 3853 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 3854#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 3855 c2 = c1 >> 32;
2bfcf297 3856#endif
bc06712d 3857 ud3 = c2 & 0xffff;
f921c9c9 3858 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 3859
f676971a 3860 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 3861 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 3862 {
bc06712d 3863 if (ud1 & 0x8000)
b78d48dd 3864 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
3865 else
3866 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 3867 }
2bfcf297 3868
f676971a 3869 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 3870 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 3871 {
bc06712d 3872 if (ud2 & 0x8000)
f676971a 3873 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 3874 - 0x80000000));
252b88f7 3875 else
bc06712d
TR
3876 emit_move_insn (dest, GEN_INT (ud2 << 16));
3877 if (ud1 != 0)
3878 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
252b88f7 3879 }
f676971a 3880 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
3881 || (ud4 == 0 && ! (ud3 & 0x8000)))
3882 {
3883 if (ud3 & 0x8000)
f676971a 3884 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
3885 - 0x80000000));
3886 else
3887 emit_move_insn (dest, GEN_INT (ud3 << 16));
3888
3889 if (ud2 != 0)
3890 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3891 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3892 if (ud1 != 0)
3893 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3894 }
f676971a 3895 else
bc06712d
TR
3896 {
3897 if (ud4 & 0x8000)
f676971a 3898 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
3899 - 0x80000000));
3900 else
3901 emit_move_insn (dest, GEN_INT (ud4 << 16));
3902
3903 if (ud3 != 0)
3904 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2bfcf297 3905
bc06712d
TR
3906 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3907 if (ud2 != 0)
f676971a
EC
3908 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3909 GEN_INT (ud2 << 16)));
bc06712d
TR
3910 if (ud1 != 0)
3911 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3912 }
3913 }
2bfcf297
DB
3914 return dest;
3915}
3916
76d2b81d
DJ
3917/* Helper for the following. Get rid of [r+r] memory refs
3918 in cases where it won't work (TImode, TFmode). */
3919
3920static void
3921rs6000_eliminate_indexed_memrefs (rtx operands[2])
3922{
3923 if (GET_CODE (operands[0]) == MEM
3924 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 3925 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
3926 && ! reload_in_progress)
3927 operands[0]
3928 = replace_equiv_address (operands[0],
3929 copy_addr_to_reg (XEXP (operands[0], 0)));
3930
3931 if (GET_CODE (operands[1]) == MEM
3932 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 3933 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
3934 && ! reload_in_progress)
3935 operands[1]
3936 = replace_equiv_address (operands[1],
3937 copy_addr_to_reg (XEXP (operands[1], 0)));
3938}
3939
fb4d4348
GK
3940/* Emit a move from SOURCE to DEST in mode MODE. */
3941void
a2369ed3 3942rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
3943{
3944 rtx operands[2];
3945 operands[0] = dest;
3946 operands[1] = source;
f676971a 3947
fb4d4348
GK
3948 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3949 if (GET_CODE (operands[1]) == CONST_DOUBLE
3950 && ! FLOAT_MODE_P (mode)
3951 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3952 {
3953 /* FIXME. This should never happen. */
3954 /* Since it seems that it does, do the safe thing and convert
3955 to a CONST_INT. */
2496c7bd 3956 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 3957 }
37409796
NS
3958 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3959 || FLOAT_MODE_P (mode)
3960 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3961 || CONST_DOUBLE_LOW (operands[1]) < 0)
3962 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3963 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 3964
c9e8cb32
DD
3965 /* Check if GCC is setting up a block move that will end up using FP
3966 registers as temporaries. We must make sure this is acceptable. */
3967 if (GET_CODE (operands[0]) == MEM
3968 && GET_CODE (operands[1]) == MEM
3969 && mode == DImode
41543739
GK
3970 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3971 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3972 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3973 ? 32 : MEM_ALIGN (operands[0])))
3974 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 3975 ? 32
41543739
GK
3976 : MEM_ALIGN (operands[1]))))
3977 && ! MEM_VOLATILE_P (operands [0])
3978 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 3979 {
41543739
GK
3980 emit_move_insn (adjust_address (operands[0], SImode, 0),
3981 adjust_address (operands[1], SImode, 0));
3982 emit_move_insn (adjust_address (operands[0], SImode, 4),
3983 adjust_address (operands[1], SImode, 4));
c9e8cb32
DD
3984 return;
3985 }
630d42a0 3986
55aa0757 3987 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
c9dbf840 3988 && !gpc_reg_operand (operands[1], mode))
f6219a5e 3989 operands[1] = force_reg (mode, operands[1]);
a9098fd0 3990
a3170dc6
AH
3991 if (mode == SFmode && ! TARGET_POWERPC
3992 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 3993 && GET_CODE (operands[0]) == MEM)
fb4d4348 3994 {
ffc14f31
GK
3995 int regnum;
3996
3997 if (reload_in_progress || reload_completed)
3998 regnum = true_regnum (operands[1]);
3999 else if (GET_CODE (operands[1]) == REG)
4000 regnum = REGNO (operands[1]);
4001 else
4002 regnum = -1;
f676971a 4003
fb4d4348
GK
4004 /* If operands[1] is a register, on POWER it may have
4005 double-precision data in it, so truncate it to single
4006 precision. */
4007 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4008 {
4009 rtx newreg;
4010 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4011 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4012 operands[1] = newreg;
4013 }
4014 }
4015
c4501e62
JJ
4016 /* Recognize the case where operand[1] is a reference to thread-local
4017 data and load its address to a register. */
84f52ebd 4018 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4019 {
84f52ebd
RH
4020 enum tls_model model;
4021 rtx tmp = operands[1];
4022 rtx addend = NULL;
4023
4024 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4025 {
4026 addend = XEXP (XEXP (tmp, 0), 1);
4027 tmp = XEXP (XEXP (tmp, 0), 0);
4028 }
4029
4030 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4031 model = SYMBOL_REF_TLS_MODEL (tmp);
4032 gcc_assert (model != 0);
4033
4034 tmp = rs6000_legitimize_tls_address (tmp, model);
4035 if (addend)
4036 {
4037 tmp = gen_rtx_PLUS (mode, tmp, addend);
4038 tmp = force_operand (tmp, operands[0]);
4039 }
4040 operands[1] = tmp;
c4501e62
JJ
4041 }
4042
8f4e6caf
RH
4043 /* Handle the case where reload calls us with an invalid address. */
4044 if (reload_in_progress && mode == Pmode
69ef87e2 4045 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4046 || ! nonimmediate_operand (operands[0], mode)))
4047 goto emit_set;
4048
a9baceb1
GK
4049 /* 128-bit constant floating-point values on Darwin should really be
4050 loaded as two parts. */
602ea4d3 4051 if (!TARGET_IEEEQUAD
a9baceb1
GK
4052 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
4053 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4054 {
4055 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4056 know how to get a DFmode SUBREG of a TFmode. */
4057 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4058 simplify_gen_subreg (DImode, operands[1], mode, 0),
4059 DImode);
4060 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4061 GET_MODE_SIZE (DImode)),
4062 simplify_gen_subreg (DImode, operands[1], mode,
4063 GET_MODE_SIZE (DImode)),
4064 DImode);
4065 return;
4066 }
4067
fb4d4348
GK
4068 /* FIXME: In the long term, this switch statement should go away
4069 and be replaced by a sequence of tests based on things like
4070 mode == Pmode. */
4071 switch (mode)
4072 {
4073 case HImode:
4074 case QImode:
4075 if (CONSTANT_P (operands[1])
4076 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4077 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4078 break;
4079
06f4e019 4080 case TFmode:
76d2b81d
DJ
4081 rs6000_eliminate_indexed_memrefs (operands);
4082 /* fall through */
4083
fb4d4348
GK
4084 case DFmode:
4085 case SFmode:
f676971a 4086 if (CONSTANT_P (operands[1])
fb4d4348 4087 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4088 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4089 break;
f676971a 4090
0ac081f6
AH
4091 case V16QImode:
4092 case V8HImode:
4093 case V4SFmode:
4094 case V4SImode:
a3170dc6
AH
4095 case V4HImode:
4096 case V2SFmode:
4097 case V2SImode:
00a892b8 4098 case V1DImode:
69ef87e2 4099 if (CONSTANT_P (operands[1])
d744e06e 4100 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4101 operands[1] = force_const_mem (mode, operands[1]);
4102 break;
f676971a 4103
fb4d4348 4104 case SImode:
a9098fd0 4105 case DImode:
fb4d4348
GK
4106 /* Use default pattern for address of ELF small data */
4107 if (TARGET_ELF
a9098fd0 4108 && mode == Pmode
f607bc57 4109 && DEFAULT_ABI == ABI_V4
f676971a 4110 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4111 || GET_CODE (operands[1]) == CONST)
4112 && small_data_operand (operands[1], mode))
fb4d4348
GK
4113 {
4114 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4115 return;
4116 }
4117
f607bc57 4118 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4119 && mode == Pmode && mode == SImode
4120 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4121 {
4122 emit_insn (gen_movsi_got (operands[0], operands[1]));
4123 return;
4124 }
4125
ee890fe2 4126 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4127 && TARGET_NO_TOC
4128 && ! flag_pic
a9098fd0 4129 && mode == Pmode
fb4d4348
GK
4130 && CONSTANT_P (operands[1])
4131 && GET_CODE (operands[1]) != HIGH
4132 && GET_CODE (operands[1]) != CONST_INT)
4133 {
a9098fd0 4134 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
fb4d4348
GK
4135
4136 /* If this is a function address on -mcall-aixdesc,
4137 convert it to the address of the descriptor. */
4138 if (DEFAULT_ABI == ABI_AIX
4139 && GET_CODE (operands[1]) == SYMBOL_REF
4140 && XSTR (operands[1], 0)[0] == '.')
4141 {
4142 const char *name = XSTR (operands[1], 0);
4143 rtx new_ref;
4144 while (*name == '.')
4145 name++;
4146 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4147 CONSTANT_POOL_ADDRESS_P (new_ref)
4148 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4149 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4150 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4151 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4152 operands[1] = new_ref;
4153 }
7509c759 4154
ee890fe2
SS
4155 if (DEFAULT_ABI == ABI_DARWIN)
4156 {
ab82a49f
AP
4157#if TARGET_MACHO
4158 if (MACHO_DYNAMIC_NO_PIC_P)
4159 {
4160 /* Take care of any required data indirection. */
4161 operands[1] = rs6000_machopic_legitimize_pic_address (
4162 operands[1], mode, operands[0]);
4163 if (operands[0] != operands[1])
4164 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4165 operands[0], operands[1]));
ab82a49f
AP
4166 return;
4167 }
4168#endif
b8a55285
AP
4169 emit_insn (gen_macho_high (target, operands[1]));
4170 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4171 return;
4172 }
4173
fb4d4348
GK
4174 emit_insn (gen_elf_high (target, operands[1]));
4175 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4176 return;
4177 }
4178
a9098fd0
GK
4179 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4180 and we have put it in the TOC, we just need to make a TOC-relative
4181 reference to it. */
4182 if (TARGET_TOC
4183 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4184 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4185 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4186 get_pool_mode (operands[1])))
fb4d4348 4187 {
a9098fd0 4188 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4189 }
a9098fd0
GK
4190 else if (mode == Pmode
4191 && CONSTANT_P (operands[1])
38886f37
AO
4192 && ((GET_CODE (operands[1]) != CONST_INT
4193 && ! easy_fp_constant (operands[1], mode))
4194 || (GET_CODE (operands[1]) == CONST_INT
4195 && num_insns_constant (operands[1], mode) > 2)
4196 || (GET_CODE (operands[0]) == REG
4197 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4198 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4199 && ! legitimate_constant_pool_address_p (operands[1])
4200 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4201 {
4202 /* Emit a USE operation so that the constant isn't deleted if
4203 expensive optimizations are turned on because nobody
4204 references it. This should only be done for operands that
4205 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4206 This should not be done for operands that contain LABEL_REFs.
4207 For now, we just handle the obvious case. */
4208 if (GET_CODE (operands[1]) != LABEL_REF)
4209 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4210
c859cda6 4211#if TARGET_MACHO
ee890fe2 4212 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4213 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4214 {
ee890fe2
SS
4215 operands[1] =
4216 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4217 operands[0]);
4218 if (operands[0] != operands[1])
4219 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4220 return;
4221 }
c859cda6 4222#endif
ee890fe2 4223
fb4d4348
GK
4224 /* If we are to limit the number of things we put in the TOC and
4225 this is a symbol plus a constant we can add in one insn,
4226 just put the symbol in the TOC and add the constant. Don't do
4227 this if reload is in progress. */
4228 if (GET_CODE (operands[1]) == CONST
4229 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4230 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4231 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4232 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4233 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4234 && ! side_effects_p (operands[0]))
4235 {
a4f6c312
SS
4236 rtx sym =
4237 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4238 rtx other = XEXP (XEXP (operands[1], 0), 1);
4239
a9098fd0
GK
4240 sym = force_reg (mode, sym);
4241 if (mode == SImode)
4242 emit_insn (gen_addsi3 (operands[0], sym, other));
4243 else
4244 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4245 return;
4246 }
4247
a9098fd0 4248 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4249
f676971a 4250 if (TARGET_TOC
4d588c14 4251 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4252 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4253 get_pool_constant (XEXP (operands[1], 0)),
4254 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4255 {
ba4828e0 4256 operands[1]
542a8afa 4257 = gen_const_mem (mode,
c4ad648e 4258 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4259 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4260 }
fb4d4348
GK
4261 }
4262 break;
a9098fd0 4263
fb4d4348 4264 case TImode:
76d2b81d
DJ
4265 rs6000_eliminate_indexed_memrefs (operands);
4266
27dc0551
DE
4267 if (TARGET_POWER)
4268 {
4269 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4270 gen_rtvec (2,
4271 gen_rtx_SET (VOIDmode,
4272 operands[0], operands[1]),
4273 gen_rtx_CLOBBER (VOIDmode,
4274 gen_rtx_SCRATCH (SImode)))));
4275 return;
4276 }
fb4d4348
GK
4277 break;
4278
4279 default:
37409796 4280 gcc_unreachable ();
fb4d4348
GK
4281 }
4282
a9098fd0
GK
4283 /* Above, we may have called force_const_mem which may have returned
4284 an invalid address. If we can, fix this up; otherwise, reload will
4285 have to deal with it. */
8f4e6caf
RH
4286 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4287 operands[1] = validize_mem (operands[1]);
a9098fd0 4288
8f4e6caf 4289 emit_set:
fb4d4348
GK
4290 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4291}
4697a36c 4292\f
2858f73a
GK
4293/* Nonzero if we can use a floating-point register to pass this arg. */
4294#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4295 (SCALAR_FLOAT_MODE_P (MODE) \
00b79d54 4296 && !DECIMAL_FLOAT_MODE_P (MODE) \
2858f73a
GK
4297 && (CUM)->fregno <= FP_ARG_MAX_REG \
4298 && TARGET_HARD_FLOAT && TARGET_FPRS)
4299
4300/* Nonzero if we can use an AltiVec register to pass this arg. */
4301#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4302 (ALTIVEC_VECTOR_MODE (MODE) \
4303 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4304 && TARGET_ALTIVEC_ABI \
83953138 4305 && (NAMED))
2858f73a 4306
c6e8c921
GK
4307/* Return a nonzero value to say to return the function value in
4308 memory, just as large structures are always returned. TYPE will be
4309 the data type of the value, and FNTYPE will be the type of the
4310 function doing the returning, or @code{NULL} for libcalls.
4311
4312 The AIX ABI for the RS/6000 specifies that all structures are
4313 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4314 specifies that structures <= 8 bytes are returned in r3/r4, but a
4315 draft put them in memory, and GCC used to implement the draft
df01da37 4316 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4317 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4318 compatibility can change DRAFT_V4_STRUCT_RET to override the
4319 default, and -m switches get the final word. See
4320 rs6000_override_options for more details.
4321
4322 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4323 long double support is enabled. These values are returned in memory.
4324
4325 int_size_in_bytes returns -1 for variable size objects, which go in
4326 memory always. The cast to unsigned makes -1 > 8. */
4327
4328static bool
4329rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4330{
594a51fe
SS
4331 /* In the darwin64 abi, try to use registers for larger structs
4332 if possible. */
0b5383eb 4333 if (rs6000_darwin64_abi
594a51fe 4334 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4335 && int_size_in_bytes (type) > 0)
4336 {
4337 CUMULATIVE_ARGS valcum;
4338 rtx valret;
4339
4340 valcum.words = 0;
4341 valcum.fregno = FP_ARG_MIN_REG;
4342 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4343 /* Do a trial code generation as if this were going to be passed
4344 as an argument; if any part goes in memory, we return NULL. */
4345 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4346 if (valret)
4347 return false;
4348 /* Otherwise fall through to more conventional ABI rules. */
4349 }
594a51fe 4350
c6e8c921 4351 if (AGGREGATE_TYPE_P (type)
df01da37 4352 && (aix_struct_return
c6e8c921
GK
4353 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4354 return true;
b693336b 4355
bada2eb8
DE
4356 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4357 modes only exist for GCC vector types if -maltivec. */
4358 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4359 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4360 return false;
4361
b693336b
PB
4362 /* Return synthetic vectors in memory. */
4363 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4364 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4365 {
4366 static bool warned_for_return_big_vectors = false;
4367 if (!warned_for_return_big_vectors)
4368 {
d4ee4d25 4369 warning (0, "GCC vector returned by reference: "
b693336b
PB
4370 "non-standard ABI extension with no compatibility guarantee");
4371 warned_for_return_big_vectors = true;
4372 }
4373 return true;
4374 }
4375
602ea4d3 4376 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 4377 return true;
ad630bef 4378
c6e8c921
GK
4379 return false;
4380}
4381
4697a36c
MM
4382/* Initialize a variable CUM of type CUMULATIVE_ARGS
4383 for a call to a function whose data type is FNTYPE.
4384 For a library call, FNTYPE is 0.
4385
4386 For incoming args we set the number of arguments in the prototype large
1c20ae99 4387 so we never return a PARALLEL. */
4697a36c
MM
4388
4389void
f676971a 4390init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4391 rtx libname ATTRIBUTE_UNUSED, int incoming,
4392 int libcall, int n_named_args)
4697a36c
MM
4393{
4394 static CUMULATIVE_ARGS zero_cumulative;
4395
4396 *cum = zero_cumulative;
4397 cum->words = 0;
4398 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4399 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4400 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4401 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4402 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4403 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4404 cum->stdarg = fntype
4405 && (TYPE_ARG_TYPES (fntype) != 0
4406 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4407 != void_type_node));
4697a36c 4408
0f6937fe
AM
4409 cum->nargs_prototype = 0;
4410 if (incoming || cum->prototype)
4411 cum->nargs_prototype = n_named_args;
4697a36c 4412
a5c76ee6 4413 /* Check for a longcall attribute. */
3eb4e360
AM
4414 if ((!fntype && rs6000_default_long_calls)
4415 || (fntype
4416 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4417 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4418 cum->call_cookie |= CALL_LONG;
6a4cee5f 4419
4697a36c
MM
4420 if (TARGET_DEBUG_ARG)
4421 {
4422 fprintf (stderr, "\ninit_cumulative_args:");
4423 if (fntype)
4424 {
4425 tree ret_type = TREE_TYPE (fntype);
4426 fprintf (stderr, " ret code = %s,",
4427 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4428 }
4429
6a4cee5f
MM
4430 if (cum->call_cookie & CALL_LONG)
4431 fprintf (stderr, " longcall,");
4432
4697a36c
MM
4433 fprintf (stderr, " proto = %d, nargs = %d\n",
4434 cum->prototype, cum->nargs_prototype);
4435 }
f676971a 4436
c4ad648e
AM
4437 if (fntype
4438 && !TARGET_ALTIVEC
4439 && TARGET_ALTIVEC_ABI
4440 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4441 {
c85ce869 4442 error ("cannot return value in vector register because"
c4ad648e 4443 " altivec instructions are disabled, use -maltivec"
c85ce869 4444 " to enable them");
c4ad648e 4445 }
4697a36c
MM
4446}
4447\f
fe984136
RH
4448/* Return true if TYPE must be passed on the stack and not in registers. */
4449
4450static bool
4451rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4452{
4453 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4454 return must_pass_in_stack_var_size (mode, type);
4455 else
4456 return must_pass_in_stack_var_size_or_pad (mode, type);
4457}
4458
c229cba9
DE
4459/* If defined, a C expression which determines whether, and in which
4460 direction, to pad out an argument with extra space. The value
4461 should be of type `enum direction': either `upward' to pad above
4462 the argument, `downward' to pad below, or `none' to inhibit
4463 padding.
4464
4465 For the AIX ABI structs are always stored left shifted in their
4466 argument slot. */
4467
9ebbca7d 4468enum direction
a2369ed3 4469function_arg_padding (enum machine_mode mode, tree type)
c229cba9 4470{
6e985040
AM
4471#ifndef AGGREGATE_PADDING_FIXED
4472#define AGGREGATE_PADDING_FIXED 0
4473#endif
4474#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4475#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4476#endif
4477
4478 if (!AGGREGATE_PADDING_FIXED)
4479 {
4480 /* GCC used to pass structures of the same size as integer types as
4481 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4482 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4483 passed padded downward, except that -mstrict-align further
4484 muddied the water in that multi-component structures of 2 and 4
4485 bytes in size were passed padded upward.
4486
4487 The following arranges for best compatibility with previous
4488 versions of gcc, but removes the -mstrict-align dependency. */
4489 if (BYTES_BIG_ENDIAN)
4490 {
4491 HOST_WIDE_INT size = 0;
4492
4493 if (mode == BLKmode)
4494 {
4495 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4496 size = int_size_in_bytes (type);
4497 }
4498 else
4499 size = GET_MODE_SIZE (mode);
4500
4501 if (size == 1 || size == 2 || size == 4)
4502 return downward;
4503 }
4504 return upward;
4505 }
4506
4507 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4508 {
4509 if (type != 0 && AGGREGATE_TYPE_P (type))
4510 return upward;
4511 }
c229cba9 4512
d3704c46
KH
4513 /* Fall back to the default. */
4514 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
4515}
4516
b6c9286a 4517/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 4518 of an argument with the specified mode and type. If it is not defined,
b6c9286a 4519 PARM_BOUNDARY is used for all arguments.
f676971a 4520
84e9ad15
AM
4521 V.4 wants long longs and doubles to be double word aligned. Just
4522 testing the mode size is a boneheaded way to do this as it means
4523 that other types such as complex int are also double word aligned.
4524 However, we're stuck with this because changing the ABI might break
4525 existing library interfaces.
4526
b693336b
PB
4527 Doubleword align SPE vectors.
4528 Quadword align Altivec vectors.
4529 Quadword align large synthetic vector types. */
b6c9286a
MM
4530
4531int
b693336b 4532function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 4533{
84e9ad15
AM
4534 if (DEFAULT_ABI == ABI_V4
4535 && (GET_MODE_SIZE (mode) == 8
4536 || (TARGET_HARD_FLOAT
4537 && TARGET_FPRS
4538 && mode == TFmode)))
4ed78545 4539 return 64;
ad630bef
DE
4540 else if (SPE_VECTOR_MODE (mode)
4541 || (type && TREE_CODE (type) == VECTOR_TYPE
4542 && int_size_in_bytes (type) >= 8
4543 && int_size_in_bytes (type) < 16))
e1f83b4d 4544 return 64;
ad630bef
DE
4545 else if (ALTIVEC_VECTOR_MODE (mode)
4546 || (type && TREE_CODE (type) == VECTOR_TYPE
4547 && int_size_in_bytes (type) >= 16))
0ac081f6 4548 return 128;
0b5383eb
DJ
4549 else if (rs6000_darwin64_abi && mode == BLKmode
4550 && type && TYPE_ALIGN (type) > 64)
4551 return 128;
9ebbca7d 4552 else
b6c9286a 4553 return PARM_BOUNDARY;
b6c9286a 4554}
c53bdcf5 4555
294bd182
AM
4556/* For a function parm of MODE and TYPE, return the starting word in
4557 the parameter area. NWORDS of the parameter area are already used. */
4558
4559static unsigned int
4560rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4561{
4562 unsigned int align;
4563 unsigned int parm_offset;
4564
4565 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4566 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4567 return nwords + (-(parm_offset + nwords) & align);
4568}
4569
c53bdcf5
AM
4570/* Compute the size (in words) of a function argument. */
4571
4572static unsigned long
4573rs6000_arg_size (enum machine_mode mode, tree type)
4574{
4575 unsigned long size;
4576
4577 if (mode != BLKmode)
4578 size = GET_MODE_SIZE (mode);
4579 else
4580 size = int_size_in_bytes (type);
4581
4582 if (TARGET_32BIT)
4583 return (size + 3) >> 2;
4584 else
4585 return (size + 7) >> 3;
4586}
b6c9286a 4587\f
0b5383eb 4588/* Use this to flush pending int fields. */
594a51fe
SS
4589
4590static void
0b5383eb
DJ
4591rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4592 HOST_WIDE_INT bitpos)
594a51fe 4593{
0b5383eb
DJ
4594 unsigned int startbit, endbit;
4595 int intregs, intoffset;
4596 enum machine_mode mode;
594a51fe 4597
0b5383eb
DJ
4598 if (cum->intoffset == -1)
4599 return;
594a51fe 4600
0b5383eb
DJ
4601 intoffset = cum->intoffset;
4602 cum->intoffset = -1;
4603
4604 if (intoffset % BITS_PER_WORD != 0)
4605 {
4606 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4607 MODE_INT, 0);
4608 if (mode == BLKmode)
594a51fe 4609 {
0b5383eb
DJ
4610 /* We couldn't find an appropriate mode, which happens,
4611 e.g., in packed structs when there are 3 bytes to load.
4612 Back intoffset back to the beginning of the word in this
4613 case. */
4614 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 4615 }
594a51fe 4616 }
0b5383eb
DJ
4617
4618 startbit = intoffset & -BITS_PER_WORD;
4619 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4620 intregs = (endbit - startbit) / BITS_PER_WORD;
4621 cum->words += intregs;
4622}
4623
4624/* The darwin64 ABI calls for us to recurse down through structs,
4625 looking for elements passed in registers. Unfortunately, we have
4626 to track int register count here also because of misalignments
4627 in powerpc alignment mode. */
4628
4629static void
4630rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4631 tree type,
4632 HOST_WIDE_INT startbitpos)
4633{
4634 tree f;
4635
4636 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4637 if (TREE_CODE (f) == FIELD_DECL)
4638 {
4639 HOST_WIDE_INT bitpos = startbitpos;
4640 tree ftype = TREE_TYPE (f);
70fb00df
AP
4641 enum machine_mode mode;
4642 if (ftype == error_mark_node)
4643 continue;
4644 mode = TYPE_MODE (ftype);
0b5383eb
DJ
4645
4646 if (DECL_SIZE (f) != 0
4647 && host_integerp (bit_position (f), 1))
4648 bitpos += int_bit_position (f);
4649
4650 /* ??? FIXME: else assume zero offset. */
4651
4652 if (TREE_CODE (ftype) == RECORD_TYPE)
4653 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4654 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4655 {
4656 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4657 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4658 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4659 }
4660 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4661 {
4662 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4663 cum->vregno++;
4664 cum->words += 2;
4665 }
4666 else if (cum->intoffset == -1)
4667 cum->intoffset = bitpos;
4668 }
594a51fe
SS
4669}
4670
4697a36c
MM
4671/* Update the data in CUM to advance over an argument
4672 of mode MODE and data type TYPE.
b2d04ecf
AM
4673 (TYPE is null for libcalls where that information may not be available.)
4674
4675 Note that for args passed by reference, function_arg will be called
4676 with MODE and TYPE set to that of the pointer to the arg, not the arg
4677 itself. */
4697a36c
MM
4678
4679void
f676971a 4680function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 4681 tree type, int named, int depth)
4697a36c 4682{
0b5383eb
DJ
4683 int size;
4684
594a51fe
SS
4685 /* Only tick off an argument if we're not recursing. */
4686 if (depth == 0)
4687 cum->nargs_prototype--;
4697a36c 4688
ad630bef
DE
4689 if (TARGET_ALTIVEC_ABI
4690 && (ALTIVEC_VECTOR_MODE (mode)
4691 || (type && TREE_CODE (type) == VECTOR_TYPE
4692 && int_size_in_bytes (type) == 16)))
0ac081f6 4693 {
4ed78545
AM
4694 bool stack = false;
4695
2858f73a 4696 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 4697 {
6d0ef01e
HP
4698 cum->vregno++;
4699 if (!TARGET_ALTIVEC)
c85ce869 4700 error ("cannot pass argument in vector register because"
6d0ef01e 4701 " altivec instructions are disabled, use -maltivec"
c85ce869 4702 " to enable them");
4ed78545
AM
4703
4704 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 4705 even if it is going to be passed in a vector register.
4ed78545
AM
4706 Darwin does the same for variable-argument functions. */
4707 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4708 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4709 stack = true;
6d0ef01e 4710 }
4ed78545
AM
4711 else
4712 stack = true;
4713
4714 if (stack)
c4ad648e 4715 {
a594a19c 4716 int align;
f676971a 4717
2858f73a
GK
4718 /* Vector parameters must be 16-byte aligned. This places
4719 them at 2 mod 4 in terms of words in 32-bit mode, since
4720 the parameter save area starts at offset 24 from the
4721 stack. In 64-bit mode, they just have to start on an
4722 even word, since the parameter save area is 16-byte
4723 aligned. Space for GPRs is reserved even if the argument
4724 will be passed in memory. */
4725 if (TARGET_32BIT)
4ed78545 4726 align = (2 - cum->words) & 3;
2858f73a
GK
4727 else
4728 align = cum->words & 1;
c53bdcf5 4729 cum->words += align + rs6000_arg_size (mode, type);
f676971a 4730
a594a19c
GK
4731 if (TARGET_DEBUG_ARG)
4732 {
f676971a 4733 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
4734 cum->words, align);
4735 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 4736 cum->nargs_prototype, cum->prototype,
2858f73a 4737 GET_MODE_NAME (mode));
a594a19c
GK
4738 }
4739 }
0ac081f6 4740 }
a4b0320c 4741 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
4742 && !cum->stdarg
4743 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 4744 cum->sysv_gregno++;
594a51fe
SS
4745
4746 else if (rs6000_darwin64_abi
4747 && mode == BLKmode
0b5383eb
DJ
4748 && TREE_CODE (type) == RECORD_TYPE
4749 && (size = int_size_in_bytes (type)) > 0)
4750 {
4751 /* Variable sized types have size == -1 and are
4752 treated as if consisting entirely of ints.
4753 Pad to 16 byte boundary if needed. */
4754 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4755 && (cum->words % 2) != 0)
4756 cum->words++;
4757 /* For varargs, we can just go up by the size of the struct. */
4758 if (!named)
4759 cum->words += (size + 7) / 8;
4760 else
4761 {
4762 /* It is tempting to say int register count just goes up by
4763 sizeof(type)/8, but this is wrong in a case such as
4764 { int; double; int; } [powerpc alignment]. We have to
4765 grovel through the fields for these too. */
4766 cum->intoffset = 0;
4767 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 4768 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
4769 size * BITS_PER_UNIT);
4770 }
4771 }
f607bc57 4772 else if (DEFAULT_ABI == ABI_V4)
4697a36c 4773 {
a3170dc6 4774 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
4775 && (mode == SFmode || mode == DFmode
4776 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 4777 {
602ea4d3
JJ
4778 if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
4779 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
4780 else
4781 {
602ea4d3
JJ
4782 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4783 if (mode == DFmode || mode == TFmode)
c4ad648e 4784 cum->words += cum->words & 1;
c53bdcf5 4785 cum->words += rs6000_arg_size (mode, type);
4cc833b7 4786 }
4697a36c 4787 }
4cc833b7
RH
4788 else
4789 {
b2d04ecf 4790 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
4791 int gregno = cum->sysv_gregno;
4792
4ed78545
AM
4793 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4794 (r7,r8) or (r9,r10). As does any other 2 word item such
4795 as complex int due to a historical mistake. */
4796 if (n_words == 2)
4797 gregno += (1 - gregno) & 1;
4cc833b7 4798
4ed78545 4799 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
4800 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4801 {
4ed78545
AM
4802 /* Long long and SPE vectors are aligned on the stack.
4803 So are other 2 word items such as complex int due to
4804 a historical mistake. */
4cc833b7
RH
4805 if (n_words == 2)
4806 cum->words += cum->words & 1;
4807 cum->words += n_words;
4808 }
4697a36c 4809
4cc833b7
RH
4810 /* Note: continuing to accumulate gregno past when we've started
4811 spilling to the stack indicates the fact that we've started
4812 spilling to the stack to expand_builtin_saveregs. */
4813 cum->sysv_gregno = gregno + n_words;
4814 }
4697a36c 4815
4cc833b7
RH
4816 if (TARGET_DEBUG_ARG)
4817 {
4818 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4819 cum->words, cum->fregno);
4820 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4821 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4822 fprintf (stderr, "mode = %4s, named = %d\n",
4823 GET_MODE_NAME (mode), named);
4824 }
4697a36c
MM
4825 }
4826 else
4cc833b7 4827 {
b2d04ecf 4828 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
4829 int start_words = cum->words;
4830 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 4831
294bd182 4832 cum->words = align_words + n_words;
4697a36c 4833
ebb109ad 4834 if (SCALAR_FLOAT_MODE_P (mode)
00b79d54 4835 && !DECIMAL_FLOAT_MODE_P (mode)
a3170dc6 4836 && TARGET_HARD_FLOAT && TARGET_FPRS)
c53bdcf5 4837 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
4838
4839 if (TARGET_DEBUG_ARG)
4840 {
4841 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4842 cum->words, cum->fregno);
4843 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4844 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 4845 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 4846 named, align_words - start_words, depth);
4cc833b7
RH
4847 }
4848 }
4697a36c 4849}
a6c9bed4 4850
f82f556d
AH
4851static rtx
4852spe_build_register_parallel (enum machine_mode mode, int gregno)
4853{
54b695e7 4854 rtx r1, r3;
f82f556d 4855
37409796 4856 switch (mode)
f82f556d 4857 {
37409796 4858 case DFmode:
54b695e7
AH
4859 r1 = gen_rtx_REG (DImode, gregno);
4860 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4861 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
4862
4863 case DCmode:
54b695e7
AH
4864 r1 = gen_rtx_REG (DImode, gregno);
4865 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4866 r3 = gen_rtx_REG (DImode, gregno + 2);
4867 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4868 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796
NS
4869
4870 default:
4871 gcc_unreachable ();
f82f556d 4872 }
f82f556d 4873}
b78d48dd 4874
f82f556d 4875/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 4876static rtx
f676971a 4877rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 4878 tree type)
a6c9bed4 4879{
f82f556d
AH
4880 int gregno = cum->sysv_gregno;
4881
4882 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 4883 are passed and returned in a pair of GPRs for ABI compatibility. */
18f63bfa 4884 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
f82f556d 4885 {
b5870bee
AH
4886 int n_words = rs6000_arg_size (mode, type);
4887
f82f556d 4888 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
4889 if (mode == DFmode)
4890 gregno += (1 - gregno) & 1;
f82f556d 4891
b5870bee
AH
4892 /* Multi-reg args are not split between registers and stack. */
4893 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
4894 return NULL_RTX;
4895
4896 return spe_build_register_parallel (mode, gregno);
4897 }
a6c9bed4
AH
4898 if (cum->stdarg)
4899 {
c53bdcf5 4900 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
4901
4902 /* SPE vectors are put in odd registers. */
4903 if (n_words == 2 && (gregno & 1) == 0)
4904 gregno += 1;
4905
4906 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4907 {
4908 rtx r1, r2;
4909 enum machine_mode m = SImode;
4910
4911 r1 = gen_rtx_REG (m, gregno);
4912 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4913 r2 = gen_rtx_REG (m, gregno + 1);
4914 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4915 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4916 }
4917 else
b78d48dd 4918 return NULL_RTX;
a6c9bed4
AH
4919 }
4920 else
4921 {
f82f556d
AH
4922 if (gregno <= GP_ARG_MAX_REG)
4923 return gen_rtx_REG (mode, gregno);
a6c9bed4 4924 else
b78d48dd 4925 return NULL_RTX;
a6c9bed4
AH
4926 }
4927}
4928
0b5383eb
DJ
4929/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4930 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 4931
0b5383eb 4932static void
bb8df8a6 4933rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 4934 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 4935{
0b5383eb
DJ
4936 enum machine_mode mode;
4937 unsigned int regno;
4938 unsigned int startbit, endbit;
4939 int this_regno, intregs, intoffset;
4940 rtx reg;
594a51fe 4941
0b5383eb
DJ
4942 if (cum->intoffset == -1)
4943 return;
4944
4945 intoffset = cum->intoffset;
4946 cum->intoffset = -1;
4947
4948 /* If this is the trailing part of a word, try to only load that
4949 much into the register. Otherwise load the whole register. Note
4950 that in the latter case we may pick up unwanted bits. It's not a
4951 problem at the moment but may wish to revisit. */
4952
4953 if (intoffset % BITS_PER_WORD != 0)
594a51fe 4954 {
0b5383eb
DJ
4955 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4956 MODE_INT, 0);
4957 if (mode == BLKmode)
4958 {
4959 /* We couldn't find an appropriate mode, which happens,
4960 e.g., in packed structs when there are 3 bytes to load.
4961 Back intoffset back to the beginning of the word in this
4962 case. */
4963 intoffset = intoffset & -BITS_PER_WORD;
4964 mode = word_mode;
4965 }
4966 }
4967 else
4968 mode = word_mode;
4969
4970 startbit = intoffset & -BITS_PER_WORD;
4971 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4972 intregs = (endbit - startbit) / BITS_PER_WORD;
4973 this_regno = cum->words + intoffset / BITS_PER_WORD;
4974
4975 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4976 cum->use_stack = 1;
bb8df8a6 4977
0b5383eb
DJ
4978 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4979 if (intregs <= 0)
4980 return;
4981
4982 intoffset /= BITS_PER_UNIT;
4983 do
4984 {
4985 regno = GP_ARG_MIN_REG + this_regno;
4986 reg = gen_rtx_REG (mode, regno);
4987 rvec[(*k)++] =
4988 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4989
4990 this_regno += 1;
4991 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4992 mode = word_mode;
4993 intregs -= 1;
4994 }
4995 while (intregs > 0);
4996}
4997
4998/* Recursive workhorse for the following. */
4999
5000static void
bb8df8a6 5001rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
0b5383eb
DJ
5002 HOST_WIDE_INT startbitpos, rtx rvec[],
5003 int *k)
5004{
5005 tree f;
5006
5007 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5008 if (TREE_CODE (f) == FIELD_DECL)
5009 {
5010 HOST_WIDE_INT bitpos = startbitpos;
5011 tree ftype = TREE_TYPE (f);
70fb00df
AP
5012 enum machine_mode mode;
5013 if (ftype == error_mark_node)
5014 continue;
5015 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5016
5017 if (DECL_SIZE (f) != 0
5018 && host_integerp (bit_position (f), 1))
5019 bitpos += int_bit_position (f);
5020
5021 /* ??? FIXME: else assume zero offset. */
5022
5023 if (TREE_CODE (ftype) == RECORD_TYPE)
5024 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5025 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5026 {
0b5383eb
DJ
5027#if 0
5028 switch (mode)
594a51fe 5029 {
0b5383eb
DJ
5030 case SCmode: mode = SFmode; break;
5031 case DCmode: mode = DFmode; break;
5032 case TCmode: mode = TFmode; break;
5033 default: break;
594a51fe 5034 }
0b5383eb
DJ
5035#endif
5036 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5037 rvec[(*k)++]
bb8df8a6 5038 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5039 gen_rtx_REG (mode, cum->fregno++),
5040 GEN_INT (bitpos / BITS_PER_UNIT));
5041 if (mode == TFmode)
5042 cum->fregno++;
594a51fe 5043 }
0b5383eb
DJ
5044 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5045 {
5046 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5047 rvec[(*k)++]
bb8df8a6
EC
5048 = gen_rtx_EXPR_LIST (VOIDmode,
5049 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5050 GEN_INT (bitpos / BITS_PER_UNIT));
5051 }
5052 else if (cum->intoffset == -1)
5053 cum->intoffset = bitpos;
5054 }
5055}
594a51fe 5056
0b5383eb
DJ
5057/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5058 the register(s) to be used for each field and subfield of a struct
5059 being passed by value, along with the offset of where the
5060 register's value may be found in the block. FP fields go in FP
5061 register, vector fields go in vector registers, and everything
bb8df8a6 5062 else goes in int registers, packed as in memory.
8ff40a74 5063
0b5383eb
DJ
5064 This code is also used for function return values. RETVAL indicates
5065 whether this is the case.
8ff40a74 5066
a4d05547 5067 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5068 calling convention. */
594a51fe 5069
0b5383eb
DJ
5070static rtx
5071rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
5072 int named, bool retval)
5073{
5074 rtx rvec[FIRST_PSEUDO_REGISTER];
5075 int k = 1, kbase = 1;
5076 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5077 /* This is a copy; modifications are not visible to our caller. */
5078 CUMULATIVE_ARGS copy_cum = *orig_cum;
5079 CUMULATIVE_ARGS *cum = &copy_cum;
5080
5081 /* Pad to 16 byte boundary if needed. */
5082 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5083 && (cum->words % 2) != 0)
5084 cum->words++;
5085
5086 cum->intoffset = 0;
5087 cum->use_stack = 0;
5088 cum->named = named;
5089
5090 /* Put entries into rvec[] for individual FP and vector fields, and
5091 for the chunks of memory that go in int regs. Note we start at
5092 element 1; 0 is reserved for an indication of using memory, and
5093 may or may not be filled in below. */
5094 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5095 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5096
5097 /* If any part of the struct went on the stack put all of it there.
5098 This hack is because the generic code for
5099 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5100 parts of the struct are not at the beginning. */
5101 if (cum->use_stack)
5102 {
5103 if (retval)
5104 return NULL_RTX; /* doesn't go in registers at all */
5105 kbase = 0;
5106 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5107 }
5108 if (k > 1 || cum->use_stack)
5109 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5110 else
5111 return NULL_RTX;
5112}
5113
b78d48dd
FJ
5114/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5115
5116static rtx
ec6376ab 5117rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5118{
ec6376ab
AM
5119 int n_units;
5120 int i, k;
5121 rtx rvec[GP_ARG_NUM_REG + 1];
5122
5123 if (align_words >= GP_ARG_NUM_REG)
5124 return NULL_RTX;
5125
5126 n_units = rs6000_arg_size (mode, type);
5127
5128 /* Optimize the simple case where the arg fits in one gpr, except in
5129 the case of BLKmode due to assign_parms assuming that registers are
5130 BITS_PER_WORD wide. */
5131 if (n_units == 0
5132 || (n_units == 1 && mode != BLKmode))
5133 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5134
5135 k = 0;
5136 if (align_words + n_units > GP_ARG_NUM_REG)
5137 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5138 using a magic NULL_RTX component.
79773478
AM
5139 This is not strictly correct. Only some of the arg belongs in
5140 memory, not all of it. However, the normal scheme using
5141 function_arg_partial_nregs can result in unusual subregs, eg.
5142 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5143 store the whole arg to memory is often more efficient than code
5144 to store pieces, and we know that space is available in the right
5145 place for the whole arg. */
ec6376ab
AM
5146 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5147
5148 i = 0;
5149 do
36a454e1 5150 {
ec6376ab
AM
5151 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5152 rtx off = GEN_INT (i++ * 4);
5153 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5154 }
ec6376ab
AM
5155 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5156
5157 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5158}
5159
4697a36c
MM
5160/* Determine where to put an argument to a function.
5161 Value is zero to push the argument on the stack,
5162 or a hard register in which to store the argument.
5163
5164 MODE is the argument's machine mode.
5165 TYPE is the data type of the argument (as a tree).
5166 This is null for libcalls where that information may
5167 not be available.
5168 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5169 the preceding args and about the function being called. It is
5170 not modified in this routine.
4697a36c
MM
5171 NAMED is nonzero if this argument is a named parameter
5172 (otherwise it is an extra parameter matching an ellipsis).
5173
5174 On RS/6000 the first eight words of non-FP are normally in registers
5175 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5176 Under V.4, the first 8 FP args are in registers.
5177
5178 If this is floating-point and no prototype is specified, we use
5179 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5180 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5181 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5182 doesn't support PARALLEL anyway.
5183
5184 Note that for args passed by reference, function_arg will be called
5185 with MODE and TYPE set to that of the pointer to the arg, not the arg
5186 itself. */
4697a36c 5187
9390387d 5188rtx
f676971a 5189function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5190 tree type, int named)
4697a36c 5191{
4cc833b7 5192 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5193
a4f6c312
SS
5194 /* Return a marker to indicate whether CR1 needs to set or clear the
5195 bit that V.4 uses to say fp args were passed in registers.
5196 Assume that we don't need the marker for software floating point,
5197 or compiler generated library calls. */
4697a36c
MM
5198 if (mode == VOIDmode)
5199 {
f607bc57 5200 if (abi == ABI_V4
b9599e46 5201 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5202 && (cum->stdarg
5203 || (cum->nargs_prototype < 0
5204 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5205 {
a3170dc6
AH
5206 /* For the SPE, we need to crxor CR6 always. */
5207 if (TARGET_SPE_ABI)
5208 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5209 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5210 return GEN_INT (cum->call_cookie
5211 | ((cum->fregno == FP_ARG_MIN_REG)
5212 ? CALL_V4_SET_FP_ARGS
5213 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5214 }
4697a36c 5215
7509c759 5216 return GEN_INT (cum->call_cookie);
4697a36c
MM
5217 }
5218
0b5383eb
DJ
5219 if (rs6000_darwin64_abi && mode == BLKmode
5220 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5221 {
0b5383eb 5222 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5223 if (rslt != NULL_RTX)
5224 return rslt;
5225 /* Else fall through to usual handling. */
5226 }
5227
2858f73a 5228 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5229 if (TARGET_64BIT && ! cum->prototype)
5230 {
c4ad648e
AM
5231 /* Vector parameters get passed in vector register
5232 and also in GPRs or memory, in absence of prototype. */
5233 int align_words;
5234 rtx slot;
5235 align_words = (cum->words + 1) & ~1;
5236
5237 if (align_words >= GP_ARG_NUM_REG)
5238 {
5239 slot = NULL_RTX;
5240 }
5241 else
5242 {
5243 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5244 }
5245 return gen_rtx_PARALLEL (mode,
5246 gen_rtvec (2,
5247 gen_rtx_EXPR_LIST (VOIDmode,
5248 slot, const0_rtx),
5249 gen_rtx_EXPR_LIST (VOIDmode,
5250 gen_rtx_REG (mode, cum->vregno),
5251 const0_rtx)));
c72d6c26
HP
5252 }
5253 else
5254 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5255 else if (TARGET_ALTIVEC_ABI
5256 && (ALTIVEC_VECTOR_MODE (mode)
5257 || (type && TREE_CODE (type) == VECTOR_TYPE
5258 && int_size_in_bytes (type) == 16)))
0ac081f6 5259 {
2858f73a 5260 if (named || abi == ABI_V4)
a594a19c 5261 return NULL_RTX;
0ac081f6 5262 else
a594a19c
GK
5263 {
5264 /* Vector parameters to varargs functions under AIX or Darwin
5265 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5266 int align, align_words, n_words;
5267 enum machine_mode part_mode;
a594a19c
GK
5268
5269 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5270 2 mod 4 in terms of words in 32-bit mode, since the parameter
5271 save area starts at offset 24 from the stack. In 64-bit mode,
5272 they just have to start on an even word, since the parameter
5273 save area is 16-byte aligned. */
5274 if (TARGET_32BIT)
4ed78545 5275 align = (2 - cum->words) & 3;
2858f73a
GK
5276 else
5277 align = cum->words & 1;
a594a19c
GK
5278 align_words = cum->words + align;
5279
5280 /* Out of registers? Memory, then. */
5281 if (align_words >= GP_ARG_NUM_REG)
5282 return NULL_RTX;
ec6376ab
AM
5283
5284 if (TARGET_32BIT && TARGET_POWERPC64)
5285 return rs6000_mixed_function_arg (mode, type, align_words);
5286
2858f73a
GK
5287 /* The vector value goes in GPRs. Only the part of the
5288 value in GPRs is reported here. */
ec6376ab
AM
5289 part_mode = mode;
5290 n_words = rs6000_arg_size (mode, type);
5291 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5292 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5293 is either wholly in GPRs or half in GPRs and half not. */
5294 part_mode = DImode;
ec6376ab
AM
5295
5296 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5297 }
0ac081f6 5298 }
f82f556d
AH
5299 else if (TARGET_SPE_ABI && TARGET_SPE
5300 && (SPE_VECTOR_MODE (mode)
18f63bfa
AH
5301 || (TARGET_E500_DOUBLE && (mode == DFmode
5302 || mode == DCmode))))
a6c9bed4 5303 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5304
f607bc57 5305 else if (abi == ABI_V4)
4697a36c 5306 {
a3170dc6 5307 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
5308 && (mode == SFmode || mode == DFmode
5309 || (mode == TFmode && !TARGET_IEEEQUAD)))
4cc833b7 5310 {
602ea4d3 5311 if (cum->fregno + (mode == TFmode ? 1 : 0) <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5312 return gen_rtx_REG (mode, cum->fregno);
5313 else
b78d48dd 5314 return NULL_RTX;
4cc833b7
RH
5315 }
5316 else
5317 {
b2d04ecf 5318 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5319 int gregno = cum->sysv_gregno;
5320
4ed78545
AM
5321 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5322 (r7,r8) or (r9,r10). As does any other 2 word item such
5323 as complex int due to a historical mistake. */
5324 if (n_words == 2)
5325 gregno += (1 - gregno) & 1;
4cc833b7 5326
4ed78545 5327 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5328 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5329 return NULL_RTX;
ec6376ab
AM
5330
5331 if (TARGET_32BIT && TARGET_POWERPC64)
5332 return rs6000_mixed_function_arg (mode, type,
5333 gregno - GP_ARG_MIN_REG);
5334 return gen_rtx_REG (mode, gregno);
4cc833b7 5335 }
4697a36c 5336 }
4cc833b7
RH
5337 else
5338 {
294bd182 5339 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5340
2858f73a 5341 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5342 {
ec6376ab
AM
5343 rtx rvec[GP_ARG_NUM_REG + 1];
5344 rtx r;
5345 int k;
c53bdcf5
AM
5346 bool needs_psave;
5347 enum machine_mode fmode = mode;
c53bdcf5
AM
5348 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5349
5350 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5351 {
c53bdcf5
AM
5352 /* Currently, we only ever need one reg here because complex
5353 doubles are split. */
37409796 5354 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
ec6376ab
AM
5355
5356 /* Long double split over regs and memory. */
5357 fmode = DFmode;
c53bdcf5 5358 }
c53bdcf5
AM
5359
5360 /* Do we also need to pass this arg in the parameter save
5361 area? */
5362 needs_psave = (type
5363 && (cum->nargs_prototype <= 0
5364 || (DEFAULT_ABI == ABI_AIX
de17c25f 5365 && TARGET_XL_COMPAT
c53bdcf5
AM
5366 && align_words >= GP_ARG_NUM_REG)));
5367
5368 if (!needs_psave && mode == fmode)
ec6376ab 5369 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5370
ec6376ab 5371 k = 0;
c53bdcf5
AM
5372 if (needs_psave)
5373 {
ec6376ab 5374 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5375 This piece must come first, before the fprs. */
c53bdcf5
AM
5376 if (align_words < GP_ARG_NUM_REG)
5377 {
5378 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5379
5380 if (align_words + n_words > GP_ARG_NUM_REG
5381 || (TARGET_32BIT && TARGET_POWERPC64))
5382 {
5383 /* If this is partially on the stack, then we only
5384 include the portion actually in registers here. */
5385 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5386 rtx off;
79773478
AM
5387 int i = 0;
5388 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
5389 /* Not all of the arg fits in gprs. Say that it
5390 goes in memory too, using a magic NULL_RTX
5391 component. Also see comment in
5392 rs6000_mixed_function_arg for why the normal
5393 function_arg_partial_nregs scheme doesn't work
5394 in this case. */
5395 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5396 const0_rtx);
ec6376ab
AM
5397 do
5398 {
5399 r = gen_rtx_REG (rmode,
5400 GP_ARG_MIN_REG + align_words);
2e6c9641 5401 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5402 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5403 }
5404 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5405 }
5406 else
5407 {
5408 /* The whole arg fits in gprs. */
5409 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5410 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5411 }
c53bdcf5 5412 }
ec6376ab
AM
5413 else
5414 /* It's entirely in memory. */
5415 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5416 }
5417
ec6376ab
AM
5418 /* Describe where this piece goes in the fprs. */
5419 r = gen_rtx_REG (fmode, cum->fregno);
5420 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5421
5422 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5423 }
5424 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5425 {
ec6376ab
AM
5426 if (TARGET_32BIT && TARGET_POWERPC64)
5427 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5428
4eeca74f
AM
5429 if (mode == BLKmode)
5430 mode = Pmode;
5431
b2d04ecf
AM
5432 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5433 }
4cc833b7
RH
5434 else
5435 return NULL_RTX;
4697a36c 5436 }
4697a36c
MM
5437}
5438\f
ec6376ab 5439/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5440 the number of bytes passed in registers. For args passed entirely in
5441 registers or entirely in memory, zero. When an arg is described by a
5442 PARALLEL, perhaps using more than one register type, this function
5443 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 5444
78a52f11
RH
5445static int
5446rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5447 tree type, bool named)
4697a36c 5448{
c53bdcf5 5449 int ret = 0;
ec6376ab 5450 int align_words;
c53bdcf5 5451
f607bc57 5452 if (DEFAULT_ABI == ABI_V4)
4697a36c 5453 return 0;
4697a36c 5454
c53bdcf5
AM
5455 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5456 && cum->nargs_prototype >= 0)
5457 return 0;
5458
0b5383eb
DJ
5459 /* In this complicated case we just disable the partial_nregs code. */
5460 if (rs6000_darwin64_abi && mode == BLKmode
5461 && TREE_CODE (type) == RECORD_TYPE
5462 && int_size_in_bytes (type) > 0)
5463 return 0;
5464
294bd182 5465 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 5466
79773478
AM
5467 if (USE_FP_FOR_ARG_P (cum, mode, type))
5468 {
fb63c729
AM
5469 /* If we are passing this arg in the fixed parameter save area
5470 (gprs or memory) as well as fprs, then this function should
79773478
AM
5471 return the number of partial bytes passed in the parameter
5472 save area rather than partial bytes passed in fprs. */
5473 if (type
5474 && (cum->nargs_prototype <= 0
5475 || (DEFAULT_ABI == ABI_AIX
5476 && TARGET_XL_COMPAT
5477 && align_words >= GP_ARG_NUM_REG)))
5478 return 0;
5479 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5480 > FP_ARG_MAX_REG + 1)
ac7e839c 5481 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 5482 else if (cum->nargs_prototype >= 0)
4697a36c
MM
5483 return 0;
5484 }
5485
ec6376ab
AM
5486 if (align_words < GP_ARG_NUM_REG
5487 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 5488 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 5489
c53bdcf5 5490 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 5491 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 5492
c53bdcf5 5493 return ret;
4697a36c
MM
5494}
5495\f
5496/* A C expression that indicates when an argument must be passed by
5497 reference. If nonzero for an argument, a copy of that argument is
5498 made in memory and a pointer to the argument is passed instead of
5499 the argument itself. The pointer is passed in whatever way is
5500 appropriate for passing a pointer to that type.
5501
b2d04ecf
AM
5502 Under V.4, aggregates and long double are passed by reference.
5503
5504 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5505 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
5506
5507 As an extension to all ABIs, variable sized types are passed by
5508 reference. */
4697a36c 5509
8cd5a4e0 5510static bool
f676971a 5511rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
bada2eb8
DE
5512 enum machine_mode mode, tree type,
5513 bool named ATTRIBUTE_UNUSED)
4697a36c 5514{
602ea4d3 5515 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
5516 {
5517 if (TARGET_DEBUG_ARG)
bada2eb8
DE
5518 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5519 return 1;
5520 }
5521
5522 if (!type)
5523 return 0;
4697a36c 5524
bada2eb8
DE
5525 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5526 {
5527 if (TARGET_DEBUG_ARG)
5528 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5529 return 1;
5530 }
5531
5532 if (int_size_in_bytes (type) < 0)
5533 {
5534 if (TARGET_DEBUG_ARG)
5535 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5536 return 1;
5537 }
5538
5539 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5540 modes only exist for GCC vector types if -maltivec. */
5541 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5542 {
5543 if (TARGET_DEBUG_ARG)
5544 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
5545 return 1;
5546 }
b693336b
PB
5547
5548 /* Pass synthetic vectors in memory. */
bada2eb8 5549 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5550 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5551 {
5552 static bool warned_for_pass_big_vectors = false;
5553 if (TARGET_DEBUG_ARG)
5554 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5555 if (!warned_for_pass_big_vectors)
5556 {
d4ee4d25 5557 warning (0, "GCC vector passed by reference: "
b693336b
PB
5558 "non-standard ABI extension with no compatibility guarantee");
5559 warned_for_pass_big_vectors = true;
5560 }
5561 return 1;
5562 }
5563
b2d04ecf 5564 return 0;
4697a36c 5565}
5985c7a6
FJ
5566
5567static void
2d9db8eb 5568rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
5569{
5570 int i;
5571 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5572
5573 if (nregs == 0)
5574 return;
5575
c4ad648e 5576 for (i = 0; i < nregs; i++)
5985c7a6 5577 {
9390387d 5578 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 5579 if (reload_completed)
c4ad648e
AM
5580 {
5581 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5582 tem = NULL_RTX;
5583 else
5584 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 5585 i * GET_MODE_SIZE (reg_mode));
c4ad648e 5586 }
5985c7a6
FJ
5587 else
5588 tem = replace_equiv_address (tem, XEXP (tem, 0));
5589
37409796 5590 gcc_assert (tem);
5985c7a6
FJ
5591
5592 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5593 }
5594}
4697a36c
MM
5595\f
5596/* Perform any needed actions needed for a function that is receiving a
f676971a 5597 variable number of arguments.
4697a36c
MM
5598
5599 CUM is as above.
5600
5601 MODE and TYPE are the mode and type of the current parameter.
5602
5603 PRETEND_SIZE is a variable that should be set to the amount of stack
5604 that must be pushed by the prolog to pretend that our caller pushed
5605 it.
5606
5607 Normally, this macro will push all remaining incoming registers on the
5608 stack and set PRETEND_SIZE to the length of the registers pushed. */
5609
c6e8c921 5610static void
f676971a 5611setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
5612 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5613 int no_rtl)
4697a36c 5614{
4cc833b7
RH
5615 CUMULATIVE_ARGS next_cum;
5616 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 5617 rtx save_area = NULL_RTX, mem;
dfafc897 5618 int first_reg_offset, set;
4697a36c 5619
f31bf321 5620 /* Skip the last named argument. */
d34c5b80 5621 next_cum = *cum;
594a51fe 5622 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 5623
f607bc57 5624 if (DEFAULT_ABI == ABI_V4)
d34c5b80 5625 {
5b667039
JJ
5626 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5627
60e2d0ca 5628 if (! no_rtl)
5b667039
JJ
5629 {
5630 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5631 HOST_WIDE_INT offset = 0;
5632
5633 /* Try to optimize the size of the varargs save area.
5634 The ABI requires that ap.reg_save_area is doubleword
5635 aligned, but we don't need to allocate space for all
5636 the bytes, only those to which we actually will save
5637 anything. */
5638 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5639 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5640 if (TARGET_HARD_FLOAT && TARGET_FPRS
5641 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5642 && cfun->va_list_fpr_size)
5643 {
5644 if (gpr_reg_num)
5645 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5646 * UNITS_PER_FP_WORD;
5647 if (cfun->va_list_fpr_size
5648 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5649 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5650 else
5651 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5652 * UNITS_PER_FP_WORD;
5653 }
5654 if (gpr_reg_num)
5655 {
5656 offset = -((first_reg_offset * reg_size) & ~7);
5657 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5658 {
5659 gpr_reg_num = cfun->va_list_gpr_size;
5660 if (reg_size == 4 && (first_reg_offset & 1))
5661 gpr_reg_num++;
5662 }
5663 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5664 }
5665 else if (fpr_size)
5666 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5667 * UNITS_PER_FP_WORD
5668 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 5669
5b667039
JJ
5670 if (gpr_size + fpr_size)
5671 {
5672 rtx reg_save_area
5673 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5674 gcc_assert (GET_CODE (reg_save_area) == MEM);
5675 reg_save_area = XEXP (reg_save_area, 0);
5676 if (GET_CODE (reg_save_area) == PLUS)
5677 {
5678 gcc_assert (XEXP (reg_save_area, 0)
5679 == virtual_stack_vars_rtx);
5680 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5681 offset += INTVAL (XEXP (reg_save_area, 1));
5682 }
5683 else
5684 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5685 }
5686
5687 cfun->machine->varargs_save_offset = offset;
5688 save_area = plus_constant (virtual_stack_vars_rtx, offset);
5689 }
4697a36c 5690 }
60e2d0ca 5691 else
4697a36c 5692 {
d34c5b80 5693 first_reg_offset = next_cum.words;
4cc833b7 5694 save_area = virtual_incoming_args_rtx;
4697a36c 5695
fe984136 5696 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 5697 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 5698 }
4697a36c 5699
dfafc897 5700 set = get_varargs_alias_set ();
9d30f3c1
JJ
5701 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5702 && cfun->va_list_gpr_size)
4cc833b7 5703 {
9d30f3c1
JJ
5704 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5705
5706 if (va_list_gpr_counter_field)
5707 {
5708 /* V4 va_list_gpr_size counts number of registers needed. */
5709 if (nregs > cfun->va_list_gpr_size)
5710 nregs = cfun->va_list_gpr_size;
5711 }
5712 else
5713 {
5714 /* char * va_list instead counts number of bytes needed. */
5715 if (nregs > cfun->va_list_gpr_size / reg_size)
5716 nregs = cfun->va_list_gpr_size / reg_size;
5717 }
5718
dfafc897 5719 mem = gen_rtx_MEM (BLKmode,
c4ad648e 5720 plus_constant (save_area,
13e2e16e
DE
5721 first_reg_offset * reg_size));
5722 MEM_NOTRAP_P (mem) = 1;
ba4828e0 5723 set_mem_alias_set (mem, set);
8ac61af7 5724 set_mem_align (mem, BITS_PER_WORD);
dfafc897 5725
f676971a 5726 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 5727 nregs);
4697a36c
MM
5728 }
5729
4697a36c 5730 /* Save FP registers if needed. */
f607bc57 5731 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
5732 && TARGET_HARD_FLOAT && TARGET_FPRS
5733 && ! no_rtl
9d30f3c1
JJ
5734 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5735 && cfun->va_list_fpr_size)
4697a36c 5736 {
9d30f3c1 5737 int fregno = next_cum.fregno, nregs;
9ebbca7d 5738 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 5739 rtx lab = gen_label_rtx ();
5b667039
JJ
5740 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5741 * UNITS_PER_FP_WORD);
4697a36c 5742
c4ad648e
AM
5743 emit_jump_insn
5744 (gen_rtx_SET (VOIDmode,
5745 pc_rtx,
5746 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 5747 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 5748 const0_rtx),
39403d82 5749 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
5750 pc_rtx)));
5751
9d30f3c1
JJ
5752 for (nregs = 0;
5753 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 5754 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 5755 {
5496b36f 5756 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 5757 MEM_NOTRAP_P (mem) = 1;
c4ad648e 5758 set_mem_alias_set (mem, set);
94ff898d 5759 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 5760 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 5761 }
4cc833b7
RH
5762
5763 emit_label (lab);
4697a36c 5764 }
4697a36c 5765}
4697a36c 5766
dfafc897 5767/* Create the va_list data type. */
2c4974b7 5768
c35d187f
RH
5769static tree
5770rs6000_build_builtin_va_list (void)
dfafc897 5771{
64c2816f 5772 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 5773
9ebbca7d
GK
5774 /* For AIX, prefer 'char *' because that's what the system
5775 header files like. */
f607bc57 5776 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 5777 return build_pointer_type (char_type_node);
dfafc897 5778
f1e639b1 5779 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 5780 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 5781
f676971a 5782 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 5783 unsigned_char_type_node);
f676971a 5784 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 5785 unsigned_char_type_node);
64c2816f
DT
5786 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5787 every user file. */
5788 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5789 short_unsigned_type_node);
dfafc897
FS
5790 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5791 ptr_type_node);
5792 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5793 ptr_type_node);
5794
9d30f3c1
JJ
5795 va_list_gpr_counter_field = f_gpr;
5796 va_list_fpr_counter_field = f_fpr;
5797
dfafc897
FS
5798 DECL_FIELD_CONTEXT (f_gpr) = record;
5799 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 5800 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
5801 DECL_FIELD_CONTEXT (f_ovf) = record;
5802 DECL_FIELD_CONTEXT (f_sav) = record;
5803
bab45a51
FS
5804 TREE_CHAIN (record) = type_decl;
5805 TYPE_NAME (record) = type_decl;
dfafc897
FS
5806 TYPE_FIELDS (record) = f_gpr;
5807 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
5808 TREE_CHAIN (f_fpr) = f_res;
5809 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
5810 TREE_CHAIN (f_ovf) = f_sav;
5811
5812 layout_type (record);
5813
5814 /* The correct type is an array type of one element. */
5815 return build_array_type (record, build_index_type (size_zero_node));
5816}
5817
5818/* Implement va_start. */
5819
5820void
a2369ed3 5821rs6000_va_start (tree valist, rtx nextarg)
4697a36c 5822{
dfafc897 5823 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 5824 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 5825 tree gpr, fpr, ovf, sav, t;
2c4974b7 5826
dfafc897 5827 /* Only SVR4 needs something special. */
f607bc57 5828 if (DEFAULT_ABI != ABI_V4)
dfafc897 5829 {
e5faf155 5830 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
5831 return;
5832 }
5833
973a648b 5834 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 5835 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
5836 f_res = TREE_CHAIN (f_fpr);
5837 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
5838 f_sav = TREE_CHAIN (f_ovf);
5839
872a65b5 5840 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
5841 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5842 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5843 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5844 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
5845
5846 /* Count number of gp and fp argument registers used. */
4cc833b7 5847 words = current_function_args_info.words;
987732e0
DE
5848 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5849 GP_ARG_NUM_REG);
5850 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5851 FP_ARG_NUM_REG);
dfafc897
FS
5852
5853 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
5854 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5855 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5856 words, n_gpr, n_fpr);
dfafc897 5857
9d30f3c1
JJ
5858 if (cfun->va_list_gpr_size)
5859 {
47a25a46
RG
5860 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5861 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
5862 TREE_SIDE_EFFECTS (t) = 1;
5863 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5864 }
58c8adc1 5865
9d30f3c1
JJ
5866 if (cfun->va_list_fpr_size)
5867 {
47a25a46
RG
5868 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5869 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
5870 TREE_SIDE_EFFECTS (t) = 1;
5871 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5872 }
dfafc897
FS
5873
5874 /* Find the overflow area. */
5875 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5876 if (words != 0)
47a25a46
RG
5877 t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t,
5878 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
5879 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
5880 TREE_SIDE_EFFECTS (t) = 1;
5881 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5882
9d30f3c1
JJ
5883 /* If there were no va_arg invocations, don't set up the register
5884 save area. */
5885 if (!cfun->va_list_gpr_size
5886 && !cfun->va_list_fpr_size
5887 && n_gpr < GP_ARG_NUM_REG
5888 && n_fpr < FP_ARG_V4_MAX_REG)
5889 return;
5890
dfafc897
FS
5891 /* Find the register save area. */
5892 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 5893 if (cfun->machine->varargs_save_offset)
47a25a46
RG
5894 t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
5895 build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
5896 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
5897 TREE_SIDE_EFFECTS (t) = 1;
5898 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5899}
5900
5901/* Implement va_arg. */
5902
23a60a04
JM
5903tree
5904rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 5905{
cd3ce9b4
JM
5906 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5907 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 5908 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
5909 tree lab_false, lab_over, addr;
5910 int align;
5911 tree ptrtype = build_pointer_type (type);
5912
08b0dc1b
RH
5913 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5914 {
5915 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 5916 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
5917 }
5918
cd3ce9b4
JM
5919 if (DEFAULT_ABI != ABI_V4)
5920 {
08b0dc1b 5921 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
5922 {
5923 tree elem_type = TREE_TYPE (type);
5924 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5925 int elem_size = GET_MODE_SIZE (elem_mode);
5926
5927 if (elem_size < UNITS_PER_WORD)
5928 {
23a60a04 5929 tree real_part, imag_part;
cd3ce9b4
JM
5930 tree post = NULL_TREE;
5931
23a60a04
JM
5932 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5933 &post);
5934 /* Copy the value into a temporary, lest the formal temporary
5935 be reused out from under us. */
5936 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
5937 append_to_statement_list (post, pre_p);
5938
23a60a04
JM
5939 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5940 post_p);
cd3ce9b4 5941
47a25a46 5942 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
5943 }
5944 }
5945
23a60a04 5946 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
5947 }
5948
5949 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5950 f_fpr = TREE_CHAIN (f_gpr);
5951 f_res = TREE_CHAIN (f_fpr);
5952 f_ovf = TREE_CHAIN (f_res);
5953 f_sav = TREE_CHAIN (f_ovf);
5954
872a65b5 5955 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
5956 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5957 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5958 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5959 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
5960
5961 size = int_size_in_bytes (type);
5962 rsize = (size + 3) / 4;
5963 align = 1;
5964
08b0dc1b 5965 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
5966 && (TYPE_MODE (type) == SFmode
5967 || TYPE_MODE (type) == DFmode
5968 || TYPE_MODE (type) == TFmode))
cd3ce9b4
JM
5969 {
5970 /* FP args go in FP registers, if present. */
cd3ce9b4 5971 reg = fpr;
602ea4d3 5972 n_reg = (size + 7) / 8;
cd3ce9b4
JM
5973 sav_ofs = 8*4;
5974 sav_scale = 8;
602ea4d3 5975 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
5976 align = 8;
5977 }
5978 else
5979 {
5980 /* Otherwise into GP registers. */
cd3ce9b4
JM
5981 reg = gpr;
5982 n_reg = rsize;
5983 sav_ofs = 0;
5984 sav_scale = 4;
5985 if (n_reg == 2)
5986 align = 8;
5987 }
5988
5989 /* Pull the value out of the saved registers.... */
5990
5991 lab_over = NULL;
5992 addr = create_tmp_var (ptr_type_node, "addr");
5993 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5994
5995 /* AltiVec vectors never go in registers when -mabi=altivec. */
5996 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5997 align = 16;
5998 else
5999 {
6000 lab_false = create_artificial_label ();
6001 lab_over = create_artificial_label ();
6002
6003 /* Long long and SPE vectors are aligned in the registers.
6004 As are any other 2 gpr item such as complex int due to a
6005 historical mistake. */
6006 u = reg;
602ea4d3 6007 if (n_reg == 2 && reg == gpr)
cd3ce9b4
JM
6008 {
6009 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
95674810 6010 size_int (n_reg - 1));
cd3ce9b4
JM
6011 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6012 }
6013
95674810 6014 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6015 t = build2 (GE_EXPR, boolean_type_node, u, t);
6016 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6017 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6018 gimplify_and_add (t, pre_p);
6019
6020 t = sav;
6021 if (sav_ofs)
95674810 6022 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6023
95674810 6024 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
cd3ce9b4 6025 u = build1 (CONVERT_EXPR, integer_type_node, u);
95674810 6026 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
cd3ce9b4
JM
6027 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
6028
6029 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
6030 gimplify_and_add (t, pre_p);
6031
6032 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6033 gimplify_and_add (t, pre_p);
6034
6035 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6036 append_to_statement_list (t, pre_p);
6037
9a74f8ee 6038 if ((n_reg == 2 && reg != gpr) || n_reg > 2)
cd3ce9b4
JM
6039 {
6040 /* Ensure that we don't find any more args in regs.
56438901 6041 Alignment has taken care of the n_reg == 2 gpr case. */
47a25a46 6042 t = build2 (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6043 gimplify_and_add (t, pre_p);
6044 }
6045 }
6046
6047 /* ... otherwise out of the overflow area. */
6048
6049 /* Care for on-stack alignment if needed. */
6050 t = ovf;
6051 if (align != 1)
6052 {
95674810 6053 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
4a90aeeb 6054 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
7d60be94 6055 build_int_cst (NULL_TREE, -align));
cd3ce9b4
JM
6056 }
6057 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6058
6059 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
6060 gimplify_and_add (u, pre_p);
6061
95674810 6062 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
cd3ce9b4
JM
6063 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6064 gimplify_and_add (t, pre_p);
6065
6066 if (lab_over)
6067 {
6068 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6069 append_to_statement_list (t, pre_p);
6070 }
6071
08b0dc1b 6072 addr = fold_convert (ptrtype, addr);
872a65b5 6073 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6074}
6075
0ac081f6
AH
6076/* Builtins. */
6077
58646b77
PB
6078static void
6079def_builtin (int mask, const char *name, tree type, int code)
6080{
6081 if (mask & target_flags)
6082 {
6083 if (rs6000_builtin_decls[code])
6084 abort ();
6085
6086 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6087 add_builtin_function (name, type, code, BUILT_IN_MD,
6088 NULL, NULL_TREE);
58646b77
PB
6089 }
6090}
0ac081f6 6091
24408032
AH
6092/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6093
2212663f 6094static const struct builtin_description bdesc_3arg[] =
24408032
AH
6095{
6096 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6097 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6098 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6099 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6100 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6101 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6102 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6103 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6104 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6105 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6106 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6107 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6108 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6109 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6110 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6111 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6112 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6113 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6114 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6115 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6116 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6117 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6118 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6119
6120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
24408032 6135};
2212663f 6136
95385cbb
AH
6137/* DST operations: void foo (void *, const int, const char). */
6138
6139static const struct builtin_description bdesc_dst[] =
6140{
6141 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6142 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6143 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6144 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6145
6146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6150};
6151
2212663f 6152/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6153
a3170dc6 6154static struct builtin_description bdesc_2arg[] =
0ac081f6 6155{
f18c054f
DB
6156 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6157 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6158 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6159 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6160 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6161 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6162 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6163 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6164 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6165 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6166 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6167 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6168 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6169 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6170 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6171 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6172 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6173 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6174 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6175 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6176 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6177 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6178 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6179 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6180 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6181 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6182 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6183 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6184 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6185 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6186 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6187 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6188 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6189 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6190 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6191 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6192 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6193 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6194 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6195 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6196 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6197 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6198 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6199 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6200 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6201 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6202 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6203 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6204 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6205 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6206 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6207 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6208 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6209 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6210 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6211 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6212 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6213 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6214 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6215 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6216 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6217 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6218 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6219 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6220 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6221 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6222 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6223 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6224 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6225 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6226 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6227 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6228 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6229 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6230 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6231 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6232 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6233 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6234 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6235 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6236 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6237 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6238 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6239 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6240 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6241 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6242 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6243 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6244 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6245 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6246 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6247 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6248 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6249 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6250 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6251 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6252 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6253 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6254 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6255 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6256 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6257 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6258 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6259 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6260 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6261 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6262 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6263 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6264 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6265 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6266 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6267
58646b77
PB
6268 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6269 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6270 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6271 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6272 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6273 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6274 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6275 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6276 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6277 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6278 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6279 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6280 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6281 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6282 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6283 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6284 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6285 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6286 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6287 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6288 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6289 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6290 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6291 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6292 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6293 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6294 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6295 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6296 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6297 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6298 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6299 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6300 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6301 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6302 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6303 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6304 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6305 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6306 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6307 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6308 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6309 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6310 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6311 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6312 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6313 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6314 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6315 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6316 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6317 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6318 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6319 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6320 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6321 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6322 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6323 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6324 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6325 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6326 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6327 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6328 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6329 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6330 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6331 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6332 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6333 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6334 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6335 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6336 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6337 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6338 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6339 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6340 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6341 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6342 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6343 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6344 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6345 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6346 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6347 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6348 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6349 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6350 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6351 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6352 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6353 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6354 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6355 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6356 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6357 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6358 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6359 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6360 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6361 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6362 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6363 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6364 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6365 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6366 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6367 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6368 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6369 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6370 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6371 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6372 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6373 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6374 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6375 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6376 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6377 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6378 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6379 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6380 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6381 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6382 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6395
a3170dc6
AH
6396 /* Place holder, leave as first spe builtin. */
6397 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6398 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6399 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6400 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6401 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6402 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6403 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6404 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6405 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6406 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6407 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6408 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6409 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6410 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6411 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6412 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6413 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6414 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6415 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6416 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6417 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6418 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6419 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6420 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6421 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6422 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6423 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6424 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6425 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6426 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6427 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6428 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6429 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6430 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6431 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6432 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6433 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6434 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6435 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6436 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6437 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6438 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6439 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6440 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6441 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6442 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6443 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6444 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6445 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6446 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6447 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6448 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6449 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6450 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6451 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6452 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6453 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6454 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6455 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6456 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6457 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6458 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6459 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6460 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6461 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6462 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6463 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6464 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6465 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6466 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6467 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6468 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6469 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6470 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
6471 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6472 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
6473 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6474 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6475 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6476 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6477 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6478 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6479 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6480 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6481 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6482 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6483 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6484 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6485 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6486 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6487 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6488 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6489 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6490 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6491 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6492 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6493 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6494 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6495 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6496 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6497 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6498 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6499 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6500 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6501 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6502 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6503 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6504 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6505 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6506
6507 /* SPE binary operations expecting a 5-bit unsigned literal. */
6508 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6509
6510 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6511 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6512 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6513 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6514 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6515 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6516 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6517 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6518 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6519 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6520 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6521 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6522 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6523 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6524 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6525 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6526 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6527 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6528 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6529 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6530 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6531 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6532 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6533 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6534 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6535 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6536
6537 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 6538 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
6539};
6540
6541/* AltiVec predicates. */
6542
6543struct builtin_description_predicates
6544{
6545 const unsigned int mask;
6546 const enum insn_code icode;
6547 const char *opcode;
6548 const char *const name;
6549 const enum rs6000_builtins code;
6550};
6551
6552static const struct builtin_description_predicates bdesc_altivec_preds[] =
6553{
6554 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6555 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6556 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6557 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6558 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6559 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6560 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6561 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6562 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6563 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6564 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6565 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
6566 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6567
6568 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6569 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6570 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 6571};
24408032 6572
a3170dc6
AH
6573/* SPE predicates. */
6574static struct builtin_description bdesc_spe_predicates[] =
6575{
6576 /* Place-holder. Leave as first. */
6577 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6578 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6579 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6580 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6581 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6582 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6583 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6584 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6585 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6586 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6587 /* Place-holder. Leave as last. */
6588 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6589};
6590
6591/* SPE evsel predicates. */
6592static struct builtin_description bdesc_spe_evsel[] =
6593{
6594 /* Place-holder. Leave as first. */
6595 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6596 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6597 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6598 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6599 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6600 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6601 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6602 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6603 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6604 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6605 /* Place-holder. Leave as last. */
6606 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6607};
6608
b6d08ca1 6609/* ABS* operations. */
100c4561
AH
6610
6611static const struct builtin_description bdesc_abs[] =
6612{
6613 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6614 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6615 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6616 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6617 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6618 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6619 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6620};
6621
617e0e1d
DB
6622/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6623 foo (VECa). */
24408032 6624
a3170dc6 6625static struct builtin_description bdesc_1arg[] =
2212663f 6626{
617e0e1d
DB
6627 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6628 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6629 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6630 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6631 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6632 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6633 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6634 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
6635 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6636 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6637 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
6638 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6639 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6640 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6641 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6642 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6643 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 6644
58646b77
PB
6645 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6646 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6647 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6648 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6649 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6650 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6651 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6652 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6653 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6654 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6655 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6656 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6657 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6658 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6659 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6660 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6661 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6662 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6663 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6664
a3170dc6
AH
6665 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6666 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6667 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6668 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6669 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6670 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6671 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6672 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6673 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6674 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6675 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6676 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6677 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6678 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6679 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6680 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6681 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6682 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6683 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6684 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6685 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6686 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6687 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6688 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6689 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 6690 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
6691 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6692 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6693 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6694 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
6695
6696 /* Place-holder. Leave as last unary SPE builtin. */
58646b77 6697 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
2212663f
DB
6698};
6699
6700static rtx
a2369ed3 6701rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
2212663f
DB
6702{
6703 rtx pat;
6704 tree arg0 = TREE_VALUE (arglist);
84217346 6705 rtx op0 = expand_normal (arg0);
2212663f
DB
6706 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6707 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6708
0559cc77
DE
6709 if (icode == CODE_FOR_nothing)
6710 /* Builtin not supported on this processor. */
6711 return 0;
6712
20e26713
AH
6713 /* If we got invalid arguments bail out before generating bad rtl. */
6714 if (arg0 == error_mark_node)
9a171fcd 6715 return const0_rtx;
20e26713 6716
0559cc77
DE
6717 if (icode == CODE_FOR_altivec_vspltisb
6718 || icode == CODE_FOR_altivec_vspltish
6719 || icode == CODE_FOR_altivec_vspltisw
6720 || icode == CODE_FOR_spe_evsplatfi
6721 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
6722 {
6723 /* Only allow 5-bit *signed* literals. */
b44140e7 6724 if (GET_CODE (op0) != CONST_INT
afca671b
DP
6725 || INTVAL (op0) > 15
6726 || INTVAL (op0) < -16)
b44140e7
AH
6727 {
6728 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 6729 return const0_rtx;
b44140e7 6730 }
b44140e7
AH
6731 }
6732
c62f2db5 6733 if (target == 0
2212663f
DB
6734 || GET_MODE (target) != tmode
6735 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6736 target = gen_reg_rtx (tmode);
6737
6738 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6739 op0 = copy_to_mode_reg (mode0, op0);
6740
6741 pat = GEN_FCN (icode) (target, op0);
6742 if (! pat)
6743 return 0;
6744 emit_insn (pat);
0ac081f6 6745
2212663f
DB
6746 return target;
6747}
ae4b4a02 6748
100c4561 6749static rtx
a2369ed3 6750altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
100c4561
AH
6751{
6752 rtx pat, scratch1, scratch2;
6753 tree arg0 = TREE_VALUE (arglist);
84217346 6754 rtx op0 = expand_normal (arg0);
100c4561
AH
6755 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6756 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6757
6758 /* If we have invalid arguments, bail out before generating bad rtl. */
6759 if (arg0 == error_mark_node)
9a171fcd 6760 return const0_rtx;
100c4561
AH
6761
6762 if (target == 0
6763 || GET_MODE (target) != tmode
6764 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6765 target = gen_reg_rtx (tmode);
6766
6767 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6768 op0 = copy_to_mode_reg (mode0, op0);
6769
6770 scratch1 = gen_reg_rtx (mode0);
6771 scratch2 = gen_reg_rtx (mode0);
6772
6773 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6774 if (! pat)
6775 return 0;
6776 emit_insn (pat);
6777
6778 return target;
6779}
6780
0ac081f6 6781static rtx
a2369ed3 6782rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
0ac081f6
AH
6783{
6784 rtx pat;
6785 tree arg0 = TREE_VALUE (arglist);
6786 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
84217346
MD
6787 rtx op0 = expand_normal (arg0);
6788 rtx op1 = expand_normal (arg1);
0ac081f6
AH
6789 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6790 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6791 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6792
0559cc77
DE
6793 if (icode == CODE_FOR_nothing)
6794 /* Builtin not supported on this processor. */
6795 return 0;
6796
20e26713
AH
6797 /* If we got invalid arguments bail out before generating bad rtl. */
6798 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 6799 return const0_rtx;
20e26713 6800
0559cc77
DE
6801 if (icode == CODE_FOR_altivec_vcfux
6802 || icode == CODE_FOR_altivec_vcfsx
6803 || icode == CODE_FOR_altivec_vctsxs
6804 || icode == CODE_FOR_altivec_vctuxs
6805 || icode == CODE_FOR_altivec_vspltb
6806 || icode == CODE_FOR_altivec_vsplth
6807 || icode == CODE_FOR_altivec_vspltw
6808 || icode == CODE_FOR_spe_evaddiw
6809 || icode == CODE_FOR_spe_evldd
6810 || icode == CODE_FOR_spe_evldh
6811 || icode == CODE_FOR_spe_evldw
6812 || icode == CODE_FOR_spe_evlhhesplat
6813 || icode == CODE_FOR_spe_evlhhossplat
6814 || icode == CODE_FOR_spe_evlhhousplat
6815 || icode == CODE_FOR_spe_evlwhe
6816 || icode == CODE_FOR_spe_evlwhos
6817 || icode == CODE_FOR_spe_evlwhou
6818 || icode == CODE_FOR_spe_evlwhsplat
6819 || icode == CODE_FOR_spe_evlwwsplat
6820 || icode == CODE_FOR_spe_evrlwi
6821 || icode == CODE_FOR_spe_evslwi
6822 || icode == CODE_FOR_spe_evsrwis
f5119d10 6823 || icode == CODE_FOR_spe_evsubifw
0559cc77 6824 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
6825 {
6826 /* Only allow 5-bit unsigned literals. */
8bb418a3 6827 STRIP_NOPS (arg1);
b44140e7
AH
6828 if (TREE_CODE (arg1) != INTEGER_CST
6829 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6830 {
6831 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 6832 return const0_rtx;
b44140e7 6833 }
b44140e7
AH
6834 }
6835
c62f2db5 6836 if (target == 0
0ac081f6
AH
6837 || GET_MODE (target) != tmode
6838 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6839 target = gen_reg_rtx (tmode);
6840
6841 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6842 op0 = copy_to_mode_reg (mode0, op0);
6843 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6844 op1 = copy_to_mode_reg (mode1, op1);
6845
6846 pat = GEN_FCN (icode) (target, op0, op1);
6847 if (! pat)
6848 return 0;
6849 emit_insn (pat);
6850
6851 return target;
6852}
6525c0e7 6853
ae4b4a02 6854static rtx
f676971a 6855altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
a2369ed3 6856 tree arglist, rtx target)
ae4b4a02
AH
6857{
6858 rtx pat, scratch;
6859 tree cr6_form = TREE_VALUE (arglist);
6860 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6861 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
84217346
MD
6862 rtx op0 = expand_normal (arg0);
6863 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
6864 enum machine_mode tmode = SImode;
6865 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6866 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6867 int cr6_form_int;
6868
6869 if (TREE_CODE (cr6_form) != INTEGER_CST)
6870 {
6871 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 6872 return const0_rtx;
ae4b4a02
AH
6873 }
6874 else
6875 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6876
37409796 6877 gcc_assert (mode0 == mode1);
ae4b4a02
AH
6878
6879 /* If we have invalid arguments, bail out before generating bad rtl. */
6880 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 6881 return const0_rtx;
ae4b4a02
AH
6882
6883 if (target == 0
6884 || GET_MODE (target) != tmode
6885 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6886 target = gen_reg_rtx (tmode);
6887
6888 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6889 op0 = copy_to_mode_reg (mode0, op0);
6890 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6891 op1 = copy_to_mode_reg (mode1, op1);
6892
6893 scratch = gen_reg_rtx (mode0);
6894
6895 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 6896 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
6897 if (! pat)
6898 return 0;
6899 emit_insn (pat);
6900
6901 /* The vec_any* and vec_all* predicates use the same opcodes for two
6902 different operations, but the bits in CR6 will be different
6903 depending on what information we want. So we have to play tricks
6904 with CR6 to get the right bits out.
6905
6906 If you think this is disgusting, look at the specs for the
6907 AltiVec predicates. */
6908
c4ad648e
AM
6909 switch (cr6_form_int)
6910 {
6911 case 0:
6912 emit_insn (gen_cr6_test_for_zero (target));
6913 break;
6914 case 1:
6915 emit_insn (gen_cr6_test_for_zero_reverse (target));
6916 break;
6917 case 2:
6918 emit_insn (gen_cr6_test_for_lt (target));
6919 break;
6920 case 3:
6921 emit_insn (gen_cr6_test_for_lt_reverse (target));
6922 break;
6923 default:
6924 error ("argument 1 of __builtin_altivec_predicate is out of range");
6925 break;
6926 }
ae4b4a02
AH
6927
6928 return target;
6929}
6930
b4a62fa0 6931static rtx
38f391a5 6932altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
b4a62fa0
SB
6933{
6934 rtx pat, addr;
6935 tree arg0 = TREE_VALUE (arglist);
6936 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6937 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6938 enum machine_mode mode0 = Pmode;
6939 enum machine_mode mode1 = Pmode;
84217346
MD
6940 rtx op0 = expand_normal (arg0);
6941 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
6942
6943 if (icode == CODE_FOR_nothing)
6944 /* Builtin not supported on this processor. */
6945 return 0;
6946
6947 /* If we got invalid arguments bail out before generating bad rtl. */
6948 if (arg0 == error_mark_node || arg1 == error_mark_node)
6949 return const0_rtx;
6950
6951 if (target == 0
6952 || GET_MODE (target) != tmode
6953 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6954 target = gen_reg_rtx (tmode);
6955
f676971a 6956 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
6957
6958 if (op0 == const0_rtx)
6959 {
6960 addr = gen_rtx_MEM (tmode, op1);
6961 }
6962 else
6963 {
6964 op0 = copy_to_mode_reg (mode0, op0);
6965 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6966 }
6967
6968 pat = GEN_FCN (icode) (target, addr);
6969
6970 if (! pat)
6971 return 0;
6972 emit_insn (pat);
6973
6974 return target;
6975}
6976
61bea3b0
AH
6977static rtx
6978spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6979{
6980 tree arg0 = TREE_VALUE (arglist);
6981 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6982 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
84217346
MD
6983 rtx op0 = expand_normal (arg0);
6984 rtx op1 = expand_normal (arg1);
6985 rtx op2 = expand_normal (arg2);
61bea3b0
AH
6986 rtx pat;
6987 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6988 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6989 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6990
6991 /* Invalid arguments. Bail before doing anything stoopid! */
6992 if (arg0 == error_mark_node
6993 || arg1 == error_mark_node
6994 || arg2 == error_mark_node)
6995 return const0_rtx;
6996
6997 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6998 op0 = copy_to_mode_reg (mode2, op0);
6999 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7000 op1 = copy_to_mode_reg (mode0, op1);
7001 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7002 op2 = copy_to_mode_reg (mode1, op2);
7003
7004 pat = GEN_FCN (icode) (op1, op2, op0);
7005 if (pat)
7006 emit_insn (pat);
7007 return NULL_RTX;
7008}
7009
6525c0e7 7010static rtx
a2369ed3 7011altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6525c0e7
AH
7012{
7013 tree arg0 = TREE_VALUE (arglist);
7014 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7015 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
84217346
MD
7016 rtx op0 = expand_normal (arg0);
7017 rtx op1 = expand_normal (arg1);
7018 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7019 rtx pat, addr;
7020 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7021 enum machine_mode mode1 = Pmode;
7022 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7023
7024 /* Invalid arguments. Bail before doing anything stoopid! */
7025 if (arg0 == error_mark_node
7026 || arg1 == error_mark_node
7027 || arg2 == error_mark_node)
9a171fcd 7028 return const0_rtx;
6525c0e7 7029
b4a62fa0
SB
7030 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7031 op0 = copy_to_mode_reg (tmode, op0);
7032
f676971a 7033 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7034
7035 if (op1 == const0_rtx)
7036 {
7037 addr = gen_rtx_MEM (tmode, op2);
7038 }
7039 else
7040 {
7041 op1 = copy_to_mode_reg (mode1, op1);
7042 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7043 }
6525c0e7 7044
b4a62fa0 7045 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7046 if (pat)
7047 emit_insn (pat);
7048 return NULL_RTX;
7049}
7050
2212663f 7051static rtx
a2369ed3 7052rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
2212663f
DB
7053{
7054 rtx pat;
7055 tree arg0 = TREE_VALUE (arglist);
7056 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7057 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
84217346
MD
7058 rtx op0 = expand_normal (arg0);
7059 rtx op1 = expand_normal (arg1);
7060 rtx op2 = expand_normal (arg2);
2212663f
DB
7061 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7062 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7063 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7064 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7065
774b5662
DE
7066 if (icode == CODE_FOR_nothing)
7067 /* Builtin not supported on this processor. */
7068 return 0;
7069
20e26713
AH
7070 /* If we got invalid arguments bail out before generating bad rtl. */
7071 if (arg0 == error_mark_node
7072 || arg1 == error_mark_node
7073 || arg2 == error_mark_node)
9a171fcd 7074 return const0_rtx;
20e26713 7075
aba5fb01
NS
7076 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7077 || icode == CODE_FOR_altivec_vsldoi_v4si
7078 || icode == CODE_FOR_altivec_vsldoi_v8hi
7079 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7080 {
7081 /* Only allow 4-bit unsigned literals. */
8bb418a3 7082 STRIP_NOPS (arg2);
b44140e7
AH
7083 if (TREE_CODE (arg2) != INTEGER_CST
7084 || TREE_INT_CST_LOW (arg2) & ~0xf)
7085 {
7086 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7087 return const0_rtx;
b44140e7 7088 }
b44140e7
AH
7089 }
7090
c62f2db5 7091 if (target == 0
2212663f
DB
7092 || GET_MODE (target) != tmode
7093 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7094 target = gen_reg_rtx (tmode);
7095
7096 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7097 op0 = copy_to_mode_reg (mode0, op0);
7098 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7099 op1 = copy_to_mode_reg (mode1, op1);
7100 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7101 op2 = copy_to_mode_reg (mode2, op2);
7102
7103 pat = GEN_FCN (icode) (target, op0, op1, op2);
7104 if (! pat)
7105 return 0;
7106 emit_insn (pat);
7107
7108 return target;
7109}
92898235 7110
3a9b8c7e 7111/* Expand the lvx builtins. */
0ac081f6 7112static rtx
a2369ed3 7113altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7114{
0ac081f6
AH
7115 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7116 tree arglist = TREE_OPERAND (exp, 1);
0ac081f6 7117 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7118 tree arg0;
7119 enum machine_mode tmode, mode0;
7c3abc73 7120 rtx pat, op0;
3a9b8c7e 7121 enum insn_code icode;
92898235 7122
0ac081f6
AH
7123 switch (fcode)
7124 {
f18c054f 7125 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7126 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7127 break;
f18c054f 7128 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7129 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7130 break;
7131 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7132 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7133 break;
7134 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7135 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7136 break;
7137 default:
7138 *expandedp = false;
7139 return NULL_RTX;
7140 }
0ac081f6 7141
3a9b8c7e 7142 *expandedp = true;
f18c054f 7143
3a9b8c7e 7144 arg0 = TREE_VALUE (arglist);
84217346 7145 op0 = expand_normal (arg0);
3a9b8c7e
AH
7146 tmode = insn_data[icode].operand[0].mode;
7147 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7148
3a9b8c7e
AH
7149 if (target == 0
7150 || GET_MODE (target) != tmode
7151 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7152 target = gen_reg_rtx (tmode);
24408032 7153
3a9b8c7e
AH
7154 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7155 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7156
3a9b8c7e
AH
7157 pat = GEN_FCN (icode) (target, op0);
7158 if (! pat)
7159 return 0;
7160 emit_insn (pat);
7161 return target;
7162}
f18c054f 7163
3a9b8c7e
AH
7164/* Expand the stvx builtins. */
7165static rtx
f676971a 7166altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7167 bool *expandedp)
3a9b8c7e
AH
7168{
7169 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7170 tree arglist = TREE_OPERAND (exp, 1);
7171 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7172 tree arg0, arg1;
7173 enum machine_mode mode0, mode1;
7c3abc73 7174 rtx pat, op0, op1;
3a9b8c7e 7175 enum insn_code icode;
f18c054f 7176
3a9b8c7e
AH
7177 switch (fcode)
7178 {
7179 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 7180 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
7181 break;
7182 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 7183 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
7184 break;
7185 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 7186 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
7187 break;
7188 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 7189 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
7190 break;
7191 default:
7192 *expandedp = false;
7193 return NULL_RTX;
7194 }
24408032 7195
3a9b8c7e
AH
7196 arg0 = TREE_VALUE (arglist);
7197 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
84217346
MD
7198 op0 = expand_normal (arg0);
7199 op1 = expand_normal (arg1);
3a9b8c7e
AH
7200 mode0 = insn_data[icode].operand[0].mode;
7201 mode1 = insn_data[icode].operand[1].mode;
f18c054f 7202
3a9b8c7e
AH
7203 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7204 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7205 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7206 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 7207
3a9b8c7e
AH
7208 pat = GEN_FCN (icode) (op0, op1);
7209 if (pat)
7210 emit_insn (pat);
f18c054f 7211
3a9b8c7e
AH
7212 *expandedp = true;
7213 return NULL_RTX;
7214}
f18c054f 7215
3a9b8c7e
AH
7216/* Expand the dst builtins. */
7217static rtx
f676971a 7218altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7219 bool *expandedp)
3a9b8c7e
AH
7220{
7221 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7222 tree arglist = TREE_OPERAND (exp, 1);
7223 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7224 tree arg0, arg1, arg2;
7225 enum machine_mode mode0, mode1, mode2;
7c3abc73 7226 rtx pat, op0, op1, op2;
3a9b8c7e 7227 struct builtin_description *d;
a3170dc6 7228 size_t i;
f18c054f 7229
3a9b8c7e 7230 *expandedp = false;
f18c054f 7231
3a9b8c7e
AH
7232 /* Handle DST variants. */
7233 d = (struct builtin_description *) bdesc_dst;
7234 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7235 if (d->code == fcode)
7236 {
7237 arg0 = TREE_VALUE (arglist);
7238 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7239 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
84217346
MD
7240 op0 = expand_normal (arg0);
7241 op1 = expand_normal (arg1);
7242 op2 = expand_normal (arg2);
3a9b8c7e
AH
7243 mode0 = insn_data[d->icode].operand[0].mode;
7244 mode1 = insn_data[d->icode].operand[1].mode;
7245 mode2 = insn_data[d->icode].operand[2].mode;
24408032 7246
3a9b8c7e
AH
7247 /* Invalid arguments, bail out before generating bad rtl. */
7248 if (arg0 == error_mark_node
7249 || arg1 == error_mark_node
7250 || arg2 == error_mark_node)
7251 return const0_rtx;
f18c054f 7252
86e7df90 7253 *expandedp = true;
8bb418a3 7254 STRIP_NOPS (arg2);
3a9b8c7e
AH
7255 if (TREE_CODE (arg2) != INTEGER_CST
7256 || TREE_INT_CST_LOW (arg2) & ~0x3)
7257 {
9e637a26 7258 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7259 return const0_rtx;
7260 }
f18c054f 7261
3a9b8c7e 7262 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7263 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7264 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7265 op1 = copy_to_mode_reg (mode1, op1);
24408032 7266
3a9b8c7e
AH
7267 pat = GEN_FCN (d->icode) (op0, op1, op2);
7268 if (pat != 0)
7269 emit_insn (pat);
f18c054f 7270
3a9b8c7e
AH
7271 return NULL_RTX;
7272 }
f18c054f 7273
3a9b8c7e
AH
7274 return NULL_RTX;
7275}
24408032 7276
7a4eca66
DE
7277/* Expand vec_init builtin. */
7278static rtx
7279altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7280{
7281 enum machine_mode tmode = TYPE_MODE (type);
7282 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7283 int i, n_elt = GET_MODE_NUNITS (tmode);
7284 rtvec v = rtvec_alloc (n_elt);
7285
7286 gcc_assert (VECTOR_MODE_P (tmode));
7287
7288 for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7289 {
84217346 7290 rtx x = expand_normal (TREE_VALUE (arglist));
7a4eca66
DE
7291 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7292 }
7293
7294 gcc_assert (arglist == NULL);
7295
7296 if (!target || !register_operand (target, tmode))
7297 target = gen_reg_rtx (tmode);
7298
7299 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7300 return target;
7301}
7302
7303/* Return the integer constant in ARG. Constrain it to be in the range
7304 of the subparts of VEC_TYPE; issue an error if not. */
7305
7306static int
7307get_element_number (tree vec_type, tree arg)
7308{
7309 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7310
7311 if (!host_integerp (arg, 1)
7312 || (elt = tree_low_cst (arg, 1), elt > max))
7313 {
7314 error ("selector must be an integer constant in the range 0..%wi", max);
7315 return 0;
7316 }
7317
7318 return elt;
7319}
7320
7321/* Expand vec_set builtin. */
7322static rtx
7323altivec_expand_vec_set_builtin (tree arglist)
7324{
7325 enum machine_mode tmode, mode1;
7326 tree arg0, arg1, arg2;
7327 int elt;
7328 rtx op0, op1;
7329
7330 arg0 = TREE_VALUE (arglist);
7331 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7332 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7333
7334 tmode = TYPE_MODE (TREE_TYPE (arg0));
7335 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7336 gcc_assert (VECTOR_MODE_P (tmode));
7337
7338 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7339 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7340 elt = get_element_number (TREE_TYPE (arg0), arg2);
7341
7342 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7343 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7344
7345 op0 = force_reg (tmode, op0);
7346 op1 = force_reg (mode1, op1);
7347
7348 rs6000_expand_vector_set (op0, op1, elt);
7349
7350 return op0;
7351}
7352
7353/* Expand vec_ext builtin. */
7354static rtx
7355altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7356{
7357 enum machine_mode tmode, mode0;
7358 tree arg0, arg1;
7359 int elt;
7360 rtx op0;
7361
7362 arg0 = TREE_VALUE (arglist);
7363 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7364
84217346 7365 op0 = expand_normal (arg0);
7a4eca66
DE
7366 elt = get_element_number (TREE_TYPE (arg0), arg1);
7367
7368 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7369 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7370 gcc_assert (VECTOR_MODE_P (mode0));
7371
7372 op0 = force_reg (mode0, op0);
7373
7374 if (optimize || !target || !register_operand (target, tmode))
7375 target = gen_reg_rtx (tmode);
7376
7377 rs6000_expand_vector_extract (target, op0, elt);
7378
7379 return target;
7380}
7381
3a9b8c7e
AH
7382/* Expand the builtin in EXP and store the result in TARGET. Store
7383 true in *EXPANDEDP if we found a builtin to expand. */
7384static rtx
a2369ed3 7385altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e
AH
7386{
7387 struct builtin_description *d;
7388 struct builtin_description_predicates *dp;
7389 size_t i;
7390 enum insn_code icode;
7391 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7392 tree arglist = TREE_OPERAND (exp, 1);
7c3abc73
AH
7393 tree arg0;
7394 rtx op0, pat;
7395 enum machine_mode tmode, mode0;
3a9b8c7e 7396 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 7397
58646b77
PB
7398 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7399 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7400 {
7401 *expandedp = true;
ea40ba9c 7402 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
7403 return const0_rtx;
7404 }
7405
3a9b8c7e
AH
7406 target = altivec_expand_ld_builtin (exp, target, expandedp);
7407 if (*expandedp)
7408 return target;
0ac081f6 7409
3a9b8c7e
AH
7410 target = altivec_expand_st_builtin (exp, target, expandedp);
7411 if (*expandedp)
7412 return target;
7413
7414 target = altivec_expand_dst_builtin (exp, target, expandedp);
7415 if (*expandedp)
7416 return target;
7417
7418 *expandedp = true;
95385cbb 7419
3a9b8c7e
AH
7420 switch (fcode)
7421 {
6525c0e7
AH
7422 case ALTIVEC_BUILTIN_STVX:
7423 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7424 case ALTIVEC_BUILTIN_STVEBX:
7425 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7426 case ALTIVEC_BUILTIN_STVEHX:
7427 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7428 case ALTIVEC_BUILTIN_STVEWX:
7429 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7430 case ALTIVEC_BUILTIN_STVXL:
7431 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3a9b8c7e 7432
95385cbb
AH
7433 case ALTIVEC_BUILTIN_MFVSCR:
7434 icode = CODE_FOR_altivec_mfvscr;
7435 tmode = insn_data[icode].operand[0].mode;
7436
7437 if (target == 0
7438 || GET_MODE (target) != tmode
7439 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7440 target = gen_reg_rtx (tmode);
f676971a 7441
95385cbb 7442 pat = GEN_FCN (icode) (target);
0ac081f6
AH
7443 if (! pat)
7444 return 0;
7445 emit_insn (pat);
95385cbb
AH
7446 return target;
7447
7448 case ALTIVEC_BUILTIN_MTVSCR:
7449 icode = CODE_FOR_altivec_mtvscr;
7450 arg0 = TREE_VALUE (arglist);
84217346 7451 op0 = expand_normal (arg0);
95385cbb
AH
7452 mode0 = insn_data[icode].operand[0].mode;
7453
7454 /* If we got invalid arguments bail out before generating bad rtl. */
7455 if (arg0 == error_mark_node)
9a171fcd 7456 return const0_rtx;
95385cbb
AH
7457
7458 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7459 op0 = copy_to_mode_reg (mode0, op0);
7460
7461 pat = GEN_FCN (icode) (op0);
7462 if (pat)
7463 emit_insn (pat);
7464 return NULL_RTX;
3a9b8c7e 7465
95385cbb
AH
7466 case ALTIVEC_BUILTIN_DSSALL:
7467 emit_insn (gen_altivec_dssall ());
7468 return NULL_RTX;
7469
7470 case ALTIVEC_BUILTIN_DSS:
7471 icode = CODE_FOR_altivec_dss;
7472 arg0 = TREE_VALUE (arglist);
8bb418a3 7473 STRIP_NOPS (arg0);
84217346 7474 op0 = expand_normal (arg0);
95385cbb
AH
7475 mode0 = insn_data[icode].operand[0].mode;
7476
7477 /* If we got invalid arguments bail out before generating bad rtl. */
7478 if (arg0 == error_mark_node)
9a171fcd 7479 return const0_rtx;
95385cbb 7480
b44140e7
AH
7481 if (TREE_CODE (arg0) != INTEGER_CST
7482 || TREE_INT_CST_LOW (arg0) & ~0x3)
7483 {
7484 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 7485 return const0_rtx;
b44140e7
AH
7486 }
7487
95385cbb
AH
7488 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7489 op0 = copy_to_mode_reg (mode0, op0);
7490
7491 emit_insn (gen_altivec_dss (op0));
0ac081f6 7492 return NULL_RTX;
7a4eca66
DE
7493
7494 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7495 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7496 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7497 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7498 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7499
7500 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7501 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7502 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7503 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7504 return altivec_expand_vec_set_builtin (arglist);
7505
7506 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7507 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7508 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7509 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7510 return altivec_expand_vec_ext_builtin (arglist, target);
7511
7512 default:
7513 break;
7514 /* Fall through. */
0ac081f6 7515 }
24408032 7516
100c4561
AH
7517 /* Expand abs* operations. */
7518 d = (struct builtin_description *) bdesc_abs;
ca7558fc 7519 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561
AH
7520 if (d->code == fcode)
7521 return altivec_expand_abs_builtin (d->icode, arglist, target);
7522
ae4b4a02
AH
7523 /* Expand the AltiVec predicates. */
7524 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
ca7558fc 7525 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 7526 if (dp->code == fcode)
c4ad648e
AM
7527 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7528 arglist, target);
ae4b4a02 7529
6525c0e7
AH
7530 /* LV* are funky. We initialized them differently. */
7531 switch (fcode)
7532 {
7533 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 7534 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
c4ad648e 7535 arglist, target);
6525c0e7 7536 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 7537 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
c4ad648e 7538 arglist, target);
6525c0e7 7539 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 7540 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
c4ad648e 7541 arglist, target);
6525c0e7 7542 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 7543 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
c4ad648e 7544 arglist, target);
6525c0e7 7545 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 7546 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
c4ad648e 7547 arglist, target);
6525c0e7 7548 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 7549 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
c4ad648e 7550 arglist, target);
6525c0e7 7551 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 7552 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
c4ad648e 7553 arglist, target);
6525c0e7
AH
7554 default:
7555 break;
7556 /* Fall through. */
7557 }
95385cbb 7558
92898235 7559 *expandedp = false;
0ac081f6
AH
7560 return NULL_RTX;
7561}
7562
a3170dc6
AH
7563/* Binops that need to be initialized manually, but can be expanded
7564 automagically by rs6000_expand_binop_builtin. */
7565static struct builtin_description bdesc_2arg_spe[] =
7566{
7567 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7568 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7569 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7570 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7571 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7572 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7573 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7574 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7575 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7576 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7577 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7578 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7579 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7580 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7581 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7582 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7583 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7584 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7585 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7586 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7587 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7588 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7589};
7590
7591/* Expand the builtin in EXP and store the result in TARGET. Store
7592 true in *EXPANDEDP if we found a builtin to expand.
7593
7594 This expands the SPE builtins that are not simple unary and binary
7595 operations. */
7596static rtx
a2369ed3 7597spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6
AH
7598{
7599 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7600 tree arglist = TREE_OPERAND (exp, 1);
7601 tree arg1, arg0;
7602 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7603 enum insn_code icode;
7604 enum machine_mode tmode, mode0;
7605 rtx pat, op0;
7606 struct builtin_description *d;
7607 size_t i;
7608
7609 *expandedp = true;
7610
7611 /* Syntax check for a 5-bit unsigned immediate. */
7612 switch (fcode)
7613 {
7614 case SPE_BUILTIN_EVSTDD:
7615 case SPE_BUILTIN_EVSTDH:
7616 case SPE_BUILTIN_EVSTDW:
7617 case SPE_BUILTIN_EVSTWHE:
7618 case SPE_BUILTIN_EVSTWHO:
7619 case SPE_BUILTIN_EVSTWWE:
7620 case SPE_BUILTIN_EVSTWWO:
7621 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7622 if (TREE_CODE (arg1) != INTEGER_CST
7623 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7624 {
7625 error ("argument 2 must be a 5-bit unsigned literal");
7626 return const0_rtx;
7627 }
7628 break;
7629 default:
7630 break;
7631 }
7632
00332c9f
AH
7633 /* The evsplat*i instructions are not quite generic. */
7634 switch (fcode)
7635 {
7636 case SPE_BUILTIN_EVSPLATFI:
7637 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7638 arglist, target);
7639 case SPE_BUILTIN_EVSPLATI:
7640 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7641 arglist, target);
7642 default:
7643 break;
7644 }
7645
a3170dc6
AH
7646 d = (struct builtin_description *) bdesc_2arg_spe;
7647 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7648 if (d->code == fcode)
7649 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7650
7651 d = (struct builtin_description *) bdesc_spe_predicates;
7652 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7653 if (d->code == fcode)
7654 return spe_expand_predicate_builtin (d->icode, arglist, target);
7655
7656 d = (struct builtin_description *) bdesc_spe_evsel;
7657 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7658 if (d->code == fcode)
7659 return spe_expand_evsel_builtin (d->icode, arglist, target);
7660
7661 switch (fcode)
7662 {
7663 case SPE_BUILTIN_EVSTDDX:
61bea3b0 7664 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
a3170dc6 7665 case SPE_BUILTIN_EVSTDHX:
61bea3b0 7666 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
a3170dc6 7667 case SPE_BUILTIN_EVSTDWX:
61bea3b0 7668 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
a3170dc6 7669 case SPE_BUILTIN_EVSTWHEX:
61bea3b0 7670 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
a3170dc6 7671 case SPE_BUILTIN_EVSTWHOX:
61bea3b0 7672 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
a3170dc6 7673 case SPE_BUILTIN_EVSTWWEX:
61bea3b0 7674 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
a3170dc6 7675 case SPE_BUILTIN_EVSTWWOX:
61bea3b0 7676 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
a3170dc6 7677 case SPE_BUILTIN_EVSTDD:
61bea3b0 7678 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
a3170dc6 7679 case SPE_BUILTIN_EVSTDH:
61bea3b0 7680 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
a3170dc6 7681 case SPE_BUILTIN_EVSTDW:
61bea3b0 7682 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
a3170dc6 7683 case SPE_BUILTIN_EVSTWHE:
61bea3b0 7684 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
a3170dc6 7685 case SPE_BUILTIN_EVSTWHO:
61bea3b0 7686 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
a3170dc6 7687 case SPE_BUILTIN_EVSTWWE:
61bea3b0 7688 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
a3170dc6 7689 case SPE_BUILTIN_EVSTWWO:
61bea3b0 7690 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
a3170dc6
AH
7691 case SPE_BUILTIN_MFSPEFSCR:
7692 icode = CODE_FOR_spe_mfspefscr;
7693 tmode = insn_data[icode].operand[0].mode;
7694
7695 if (target == 0
7696 || GET_MODE (target) != tmode
7697 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7698 target = gen_reg_rtx (tmode);
f676971a 7699
a3170dc6
AH
7700 pat = GEN_FCN (icode) (target);
7701 if (! pat)
7702 return 0;
7703 emit_insn (pat);
7704 return target;
7705 case SPE_BUILTIN_MTSPEFSCR:
7706 icode = CODE_FOR_spe_mtspefscr;
7707 arg0 = TREE_VALUE (arglist);
84217346 7708 op0 = expand_normal (arg0);
a3170dc6
AH
7709 mode0 = insn_data[icode].operand[0].mode;
7710
7711 if (arg0 == error_mark_node)
7712 return const0_rtx;
7713
7714 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7715 op0 = copy_to_mode_reg (mode0, op0);
7716
7717 pat = GEN_FCN (icode) (op0);
7718 if (pat)
7719 emit_insn (pat);
7720 return NULL_RTX;
7721 default:
7722 break;
7723 }
7724
7725 *expandedp = false;
7726 return NULL_RTX;
7727}
7728
7729static rtx
a2369ed3 7730spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
a3170dc6
AH
7731{
7732 rtx pat, scratch, tmp;
7733 tree form = TREE_VALUE (arglist);
7734 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7735 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
84217346
MD
7736 rtx op0 = expand_normal (arg0);
7737 rtx op1 = expand_normal (arg1);
a3170dc6
AH
7738 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7739 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7740 int form_int;
7741 enum rtx_code code;
7742
7743 if (TREE_CODE (form) != INTEGER_CST)
7744 {
7745 error ("argument 1 of __builtin_spe_predicate must be a constant");
7746 return const0_rtx;
7747 }
7748 else
7749 form_int = TREE_INT_CST_LOW (form);
7750
37409796 7751 gcc_assert (mode0 == mode1);
a3170dc6
AH
7752
7753 if (arg0 == error_mark_node || arg1 == error_mark_node)
7754 return const0_rtx;
7755
7756 if (target == 0
7757 || GET_MODE (target) != SImode
7758 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7759 target = gen_reg_rtx (SImode);
7760
7761 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7762 op0 = copy_to_mode_reg (mode0, op0);
7763 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7764 op1 = copy_to_mode_reg (mode1, op1);
7765
7766 scratch = gen_reg_rtx (CCmode);
7767
7768 pat = GEN_FCN (icode) (scratch, op0, op1);
7769 if (! pat)
7770 return const0_rtx;
7771 emit_insn (pat);
7772
7773 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7774 _lower_. We use one compare, but look in different bits of the
7775 CR for each variant.
7776
7777 There are 2 elements in each SPE simd type (upper/lower). The CR
7778 bits are set as follows:
7779
7780 BIT0 | BIT 1 | BIT 2 | BIT 3
7781 U | L | (U | L) | (U & L)
7782
7783 So, for an "all" relationship, BIT 3 would be set.
7784 For an "any" relationship, BIT 2 would be set. Etc.
7785
7786 Following traditional nomenclature, these bits map to:
7787
7788 BIT0 | BIT 1 | BIT 2 | BIT 3
7789 LT | GT | EQ | OV
7790
7791 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7792 */
7793
7794 switch (form_int)
7795 {
7796 /* All variant. OV bit. */
7797 case 0:
7798 /* We need to get to the OV bit, which is the ORDERED bit. We
7799 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 7800 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
7801 So let's just use another pattern. */
7802 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7803 return target;
7804 /* Any variant. EQ bit. */
7805 case 1:
7806 code = EQ;
7807 break;
7808 /* Upper variant. LT bit. */
7809 case 2:
7810 code = LT;
7811 break;
7812 /* Lower variant. GT bit. */
7813 case 3:
7814 code = GT;
7815 break;
7816 default:
7817 error ("argument 1 of __builtin_spe_predicate is out of range");
7818 return const0_rtx;
7819 }
7820
7821 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7822 emit_move_insn (target, tmp);
7823
7824 return target;
7825}
7826
7827/* The evsel builtins look like this:
7828
7829 e = __builtin_spe_evsel_OP (a, b, c, d);
7830
7831 and work like this:
7832
7833 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7834 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7835*/
7836
7837static rtx
a2369ed3 7838spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
a3170dc6
AH
7839{
7840 rtx pat, scratch;
7841 tree arg0 = TREE_VALUE (arglist);
7842 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7843 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7844 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
84217346
MD
7845 rtx op0 = expand_normal (arg0);
7846 rtx op1 = expand_normal (arg1);
7847 rtx op2 = expand_normal (arg2);
7848 rtx op3 = expand_normal (arg3);
a3170dc6
AH
7849 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7850 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7851
37409796 7852 gcc_assert (mode0 == mode1);
a3170dc6
AH
7853
7854 if (arg0 == error_mark_node || arg1 == error_mark_node
7855 || arg2 == error_mark_node || arg3 == error_mark_node)
7856 return const0_rtx;
7857
7858 if (target == 0
7859 || GET_MODE (target) != mode0
7860 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7861 target = gen_reg_rtx (mode0);
7862
7863 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7864 op0 = copy_to_mode_reg (mode0, op0);
7865 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7866 op1 = copy_to_mode_reg (mode0, op1);
7867 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7868 op2 = copy_to_mode_reg (mode0, op2);
7869 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7870 op3 = copy_to_mode_reg (mode0, op3);
7871
7872 /* Generate the compare. */
7873 scratch = gen_reg_rtx (CCmode);
7874 pat = GEN_FCN (icode) (scratch, op0, op1);
7875 if (! pat)
7876 return const0_rtx;
7877 emit_insn (pat);
7878
7879 if (mode0 == V2SImode)
7880 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7881 else
7882 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7883
7884 return target;
7885}
7886
0ac081f6
AH
7887/* Expand an expression EXP that calls a built-in function,
7888 with result going to TARGET if that's convenient
7889 (and in mode MODE if that's convenient).
7890 SUBTARGET may be used as the target for computing one of EXP's operands.
7891 IGNORE is nonzero if the value is to be ignored. */
7892
7893static rtx
a2369ed3 7894rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
7895 enum machine_mode mode ATTRIBUTE_UNUSED,
7896 int ignore ATTRIBUTE_UNUSED)
0ac081f6 7897{
92898235
AH
7898 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7899 tree arglist = TREE_OPERAND (exp, 1);
7900 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7901 struct builtin_description *d;
7902 size_t i;
7903 rtx ret;
7904 bool success;
f676971a 7905
7ccf35ed
DN
7906 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7907 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7908 {
7909 int icode = (int) CODE_FOR_altivec_lvsr;
7910 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7911 enum machine_mode mode = insn_data[icode].operand[1].mode;
7912 tree arg;
7913 rtx op, addr, pat;
7914
37409796 7915 gcc_assert (TARGET_ALTIVEC);
7ccf35ed
DN
7916
7917 arg = TREE_VALUE (arglist);
37409796 7918 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
7919 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7920 addr = memory_address (mode, op);
7921 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7922 op = addr;
7923 else
7924 {
7925 /* For the load case need to negate the address. */
7926 op = gen_reg_rtx (GET_MODE (addr));
7927 emit_insn (gen_rtx_SET (VOIDmode, op,
7928 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 7929 }
7ccf35ed
DN
7930 op = gen_rtx_MEM (mode, op);
7931
7932 if (target == 0
7933 || GET_MODE (target) != tmode
7934 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7935 target = gen_reg_rtx (tmode);
7936
7937 /*pat = gen_altivec_lvsr (target, op);*/
7938 pat = GEN_FCN (icode) (target, op);
7939 if (!pat)
7940 return 0;
7941 emit_insn (pat);
7942
7943 return target;
7944 }
7945
0ac081f6 7946 if (TARGET_ALTIVEC)
92898235
AH
7947 {
7948 ret = altivec_expand_builtin (exp, target, &success);
7949
a3170dc6
AH
7950 if (success)
7951 return ret;
7952 }
7953 if (TARGET_SPE)
7954 {
7955 ret = spe_expand_builtin (exp, target, &success);
7956
92898235
AH
7957 if (success)
7958 return ret;
7959 }
7960
37409796 7961 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
bb8df8a6 7962
37409796
NS
7963 /* Handle simple unary operations. */
7964 d = (struct builtin_description *) bdesc_1arg;
7965 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7966 if (d->code == fcode)
7967 return rs6000_expand_unop_builtin (d->icode, arglist, target);
bb8df8a6 7968
37409796
NS
7969 /* Handle simple binary operations. */
7970 d = (struct builtin_description *) bdesc_2arg;
7971 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7972 if (d->code == fcode)
7973 return rs6000_expand_binop_builtin (d->icode, arglist, target);
0ac081f6 7974
37409796
NS
7975 /* Handle simple ternary operations. */
7976 d = (struct builtin_description *) bdesc_3arg;
7977 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7978 if (d->code == fcode)
7979 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
bb8df8a6 7980
37409796 7981 gcc_unreachable ();
0ac081f6
AH
7982}
7983
7c62e993
PB
7984static tree
7985build_opaque_vector_type (tree node, int nunits)
7986{
7987 node = copy_node (node);
7988 TYPE_MAIN_VARIANT (node) = node;
7989 return build_vector_type (node, nunits);
7990}
7991
0ac081f6 7992static void
863d938c 7993rs6000_init_builtins (void)
0ac081f6 7994{
4a5eab38
PB
7995 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7996 V2SF_type_node = build_vector_type (float_type_node, 2);
7997 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7998 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7999 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8000 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8001 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8002
8003 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8004 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8005 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8006
7c62e993
PB
8007 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8008 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8009 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8010 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8011
8bb418a3
ZL
8012 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8013 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8014 'vector unsigned short'. */
8015
8dd16ecc
NS
8016 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8017 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8018 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8019 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8020
58646b77
PB
8021 long_integer_type_internal_node = long_integer_type_node;
8022 long_unsigned_type_internal_node = long_unsigned_type_node;
8023 intQI_type_internal_node = intQI_type_node;
8024 uintQI_type_internal_node = unsigned_intQI_type_node;
8025 intHI_type_internal_node = intHI_type_node;
8026 uintHI_type_internal_node = unsigned_intHI_type_node;
8027 intSI_type_internal_node = intSI_type_node;
8028 uintSI_type_internal_node = unsigned_intSI_type_node;
8029 float_type_internal_node = float_type_node;
8030 void_type_internal_node = void_type_node;
8031
8bb418a3
ZL
8032 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8033 get_identifier ("__bool char"),
8034 bool_char_type_node));
8035 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8036 get_identifier ("__bool short"),
8037 bool_short_type_node));
8038 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8039 get_identifier ("__bool int"),
8040 bool_int_type_node));
8041 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8042 get_identifier ("__pixel"),
8043 pixel_type_node));
8044
4a5eab38
PB
8045 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8046 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8047 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8048 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
8049
8050 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8051 get_identifier ("__vector unsigned char"),
8052 unsigned_V16QI_type_node));
8053 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8054 get_identifier ("__vector signed char"),
8055 V16QI_type_node));
8056 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8057 get_identifier ("__vector __bool char"),
8058 bool_V16QI_type_node));
8059
8060 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8061 get_identifier ("__vector unsigned short"),
8062 unsigned_V8HI_type_node));
8063 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8064 get_identifier ("__vector signed short"),
8065 V8HI_type_node));
8066 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8067 get_identifier ("__vector __bool short"),
8068 bool_V8HI_type_node));
8069
8070 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8071 get_identifier ("__vector unsigned int"),
8072 unsigned_V4SI_type_node));
8073 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8074 get_identifier ("__vector signed int"),
8075 V4SI_type_node));
8076 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8077 get_identifier ("__vector __bool int"),
8078 bool_V4SI_type_node));
8079
8080 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8081 get_identifier ("__vector float"),
8082 V4SF_type_node));
8083 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8084 get_identifier ("__vector __pixel"),
8085 pixel_V8HI_type_node));
8086
a3170dc6 8087 if (TARGET_SPE)
3fdaa45a 8088 spe_init_builtins ();
0ac081f6
AH
8089 if (TARGET_ALTIVEC)
8090 altivec_init_builtins ();
0559cc77
DE
8091 if (TARGET_ALTIVEC || TARGET_SPE)
8092 rs6000_common_init_builtins ();
69ca3549
DE
8093
8094#if TARGET_XCOFF
8095 /* AIX libm provides clog as __clog. */
8096 if (built_in_decls [BUILT_IN_CLOG])
8097 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8098#endif
0ac081f6
AH
8099}
8100
a3170dc6
AH
8101/* Search through a set of builtins and enable the mask bits.
8102 DESC is an array of builtins.
b6d08ca1 8103 SIZE is the total number of builtins.
a3170dc6
AH
8104 START is the builtin enum at which to start.
8105 END is the builtin enum at which to end. */
0ac081f6 8106static void
a2369ed3 8107enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 8108 enum rs6000_builtins start,
a2369ed3 8109 enum rs6000_builtins end)
a3170dc6
AH
8110{
8111 int i;
8112
8113 for (i = 0; i < size; ++i)
8114 if (desc[i].code == start)
8115 break;
8116
8117 if (i == size)
8118 return;
8119
8120 for (; i < size; ++i)
8121 {
8122 /* Flip all the bits on. */
8123 desc[i].mask = target_flags;
8124 if (desc[i].code == end)
8125 break;
8126 }
8127}
8128
8129static void
863d938c 8130spe_init_builtins (void)
0ac081f6 8131{
a3170dc6
AH
8132 tree endlink = void_list_node;
8133 tree puint_type_node = build_pointer_type (unsigned_type_node);
8134 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 8135 struct builtin_description *d;
0ac081f6
AH
8136 size_t i;
8137
a3170dc6
AH
8138 tree v2si_ftype_4_v2si
8139 = build_function_type
3fdaa45a
AH
8140 (opaque_V2SI_type_node,
8141 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8142 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8143 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8144 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8145 endlink)))));
8146
8147 tree v2sf_ftype_4_v2sf
8148 = build_function_type
3fdaa45a
AH
8149 (opaque_V2SF_type_node,
8150 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8151 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8152 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8153 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8154 endlink)))));
8155
8156 tree int_ftype_int_v2si_v2si
8157 = build_function_type
8158 (integer_type_node,
8159 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8160 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8161 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8162 endlink))));
8163
8164 tree int_ftype_int_v2sf_v2sf
8165 = build_function_type
8166 (integer_type_node,
8167 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8168 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8169 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8170 endlink))));
8171
8172 tree void_ftype_v2si_puint_int
8173 = build_function_type (void_type_node,
3fdaa45a 8174 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8175 tree_cons (NULL_TREE, puint_type_node,
8176 tree_cons (NULL_TREE,
8177 integer_type_node,
8178 endlink))));
8179
8180 tree void_ftype_v2si_puint_char
8181 = build_function_type (void_type_node,
3fdaa45a 8182 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8183 tree_cons (NULL_TREE, puint_type_node,
8184 tree_cons (NULL_TREE,
8185 char_type_node,
8186 endlink))));
8187
8188 tree void_ftype_v2si_pv2si_int
8189 = build_function_type (void_type_node,
3fdaa45a 8190 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8191 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8192 tree_cons (NULL_TREE,
8193 integer_type_node,
8194 endlink))));
8195
8196 tree void_ftype_v2si_pv2si_char
8197 = build_function_type (void_type_node,
3fdaa45a 8198 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8199 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8200 tree_cons (NULL_TREE,
8201 char_type_node,
8202 endlink))));
8203
8204 tree void_ftype_int
8205 = build_function_type (void_type_node,
8206 tree_cons (NULL_TREE, integer_type_node, endlink));
8207
8208 tree int_ftype_void
36e8d515 8209 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
8210
8211 tree v2si_ftype_pv2si_int
3fdaa45a 8212 = build_function_type (opaque_V2SI_type_node,
6035d635 8213 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8214 tree_cons (NULL_TREE, integer_type_node,
8215 endlink)));
8216
8217 tree v2si_ftype_puint_int
3fdaa45a 8218 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8219 tree_cons (NULL_TREE, puint_type_node,
8220 tree_cons (NULL_TREE, integer_type_node,
8221 endlink)));
8222
8223 tree v2si_ftype_pushort_int
3fdaa45a 8224 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8225 tree_cons (NULL_TREE, pushort_type_node,
8226 tree_cons (NULL_TREE, integer_type_node,
8227 endlink)));
8228
00332c9f
AH
8229 tree v2si_ftype_signed_char
8230 = build_function_type (opaque_V2SI_type_node,
8231 tree_cons (NULL_TREE, signed_char_type_node,
8232 endlink));
8233
a3170dc6
AH
8234 /* The initialization of the simple binary and unary builtins is
8235 done in rs6000_common_init_builtins, but we have to enable the
8236 mask bits here manually because we have run out of `target_flags'
8237 bits. We really need to redesign this mask business. */
8238
8239 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8240 ARRAY_SIZE (bdesc_2arg),
8241 SPE_BUILTIN_EVADDW,
8242 SPE_BUILTIN_EVXOR);
8243 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8244 ARRAY_SIZE (bdesc_1arg),
8245 SPE_BUILTIN_EVABS,
8246 SPE_BUILTIN_EVSUBFUSIAAW);
8247 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8248 ARRAY_SIZE (bdesc_spe_predicates),
8249 SPE_BUILTIN_EVCMPEQ,
8250 SPE_BUILTIN_EVFSTSTLT);
8251 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8252 ARRAY_SIZE (bdesc_spe_evsel),
8253 SPE_BUILTIN_EVSEL_CMPGTS,
8254 SPE_BUILTIN_EVSEL_FSTSTEQ);
8255
36252949
AH
8256 (*lang_hooks.decls.pushdecl)
8257 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8258 opaque_V2SI_type_node));
8259
a3170dc6 8260 /* Initialize irregular SPE builtins. */
f676971a 8261
a3170dc6
AH
8262 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8263 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8264 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8265 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8266 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8267 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8268 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8269 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8270 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8271 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8272 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8273 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8274 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8275 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8276 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8277 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
8278 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8279 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
8280
8281 /* Loads. */
8282 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8283 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8284 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8285 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8286 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8287 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8288 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8289 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8290 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8291 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8292 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8293 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8294 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8295 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8296 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8297 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8298 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8299 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8300 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8301 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8302 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8303 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8304
8305 /* Predicates. */
8306 d = (struct builtin_description *) bdesc_spe_predicates;
8307 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8308 {
8309 tree type;
8310
8311 switch (insn_data[d->icode].operand[1].mode)
8312 {
8313 case V2SImode:
8314 type = int_ftype_int_v2si_v2si;
8315 break;
8316 case V2SFmode:
8317 type = int_ftype_int_v2sf_v2sf;
8318 break;
8319 default:
37409796 8320 gcc_unreachable ();
a3170dc6
AH
8321 }
8322
8323 def_builtin (d->mask, d->name, type, d->code);
8324 }
8325
8326 /* Evsel predicates. */
8327 d = (struct builtin_description *) bdesc_spe_evsel;
8328 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8329 {
8330 tree type;
8331
8332 switch (insn_data[d->icode].operand[1].mode)
8333 {
8334 case V2SImode:
8335 type = v2si_ftype_4_v2si;
8336 break;
8337 case V2SFmode:
8338 type = v2sf_ftype_4_v2sf;
8339 break;
8340 default:
37409796 8341 gcc_unreachable ();
a3170dc6
AH
8342 }
8343
8344 def_builtin (d->mask, d->name, type, d->code);
8345 }
8346}
8347
8348static void
863d938c 8349altivec_init_builtins (void)
a3170dc6
AH
8350{
8351 struct builtin_description *d;
8352 struct builtin_description_predicates *dp;
8353 size_t i;
7a4eca66
DE
8354 tree ftype;
8355
a3170dc6
AH
8356 tree pfloat_type_node = build_pointer_type (float_type_node);
8357 tree pint_type_node = build_pointer_type (integer_type_node);
8358 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8359 tree pchar_type_node = build_pointer_type (char_type_node);
8360
8361 tree pvoid_type_node = build_pointer_type (void_type_node);
8362
0dbc3651
ZW
8363 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8364 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8365 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8366 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8367
8368 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8369
58646b77
PB
8370 tree int_ftype_opaque
8371 = build_function_type_list (integer_type_node,
8372 opaque_V4SI_type_node, NULL_TREE);
8373
8374 tree opaque_ftype_opaque_int
8375 = build_function_type_list (opaque_V4SI_type_node,
8376 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8377 tree opaque_ftype_opaque_opaque_int
8378 = build_function_type_list (opaque_V4SI_type_node,
8379 opaque_V4SI_type_node, opaque_V4SI_type_node,
8380 integer_type_node, NULL_TREE);
8381 tree int_ftype_int_opaque_opaque
8382 = build_function_type_list (integer_type_node,
8383 integer_type_node, opaque_V4SI_type_node,
8384 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
8385 tree int_ftype_int_v4si_v4si
8386 = build_function_type_list (integer_type_node,
8387 integer_type_node, V4SI_type_node,
8388 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8389 tree v4sf_ftype_pcfloat
8390 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 8391 tree void_ftype_pfloat_v4sf
b4de2f7d 8392 = build_function_type_list (void_type_node,
a3170dc6 8393 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
8394 tree v4si_ftype_pcint
8395 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8396 tree void_ftype_pint_v4si
b4de2f7d
AH
8397 = build_function_type_list (void_type_node,
8398 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8399 tree v8hi_ftype_pcshort
8400 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 8401 tree void_ftype_pshort_v8hi
b4de2f7d
AH
8402 = build_function_type_list (void_type_node,
8403 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
8404 tree v16qi_ftype_pcchar
8405 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 8406 tree void_ftype_pchar_v16qi
b4de2f7d
AH
8407 = build_function_type_list (void_type_node,
8408 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 8409 tree void_ftype_v4si
b4de2f7d 8410 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8411 tree v8hi_ftype_void
8412 = build_function_type (V8HI_type_node, void_list_node);
8413 tree void_ftype_void
8414 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
8415 tree void_ftype_int
8416 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 8417
58646b77
PB
8418 tree opaque_ftype_long_pcvoid
8419 = build_function_type_list (opaque_V4SI_type_node,
8420 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 8421 tree v16qi_ftype_long_pcvoid
a3170dc6 8422 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
8423 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8424 tree v8hi_ftype_long_pcvoid
a3170dc6 8425 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
8426 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8427 tree v4si_ftype_long_pcvoid
a3170dc6 8428 = build_function_type_list (V4SI_type_node,
b4a62fa0 8429 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 8430
58646b77
PB
8431 tree void_ftype_opaque_long_pvoid
8432 = build_function_type_list (void_type_node,
8433 opaque_V4SI_type_node, long_integer_type_node,
8434 pvoid_type_node, NULL_TREE);
b4a62fa0 8435 tree void_ftype_v4si_long_pvoid
b4de2f7d 8436 = build_function_type_list (void_type_node,
b4a62fa0 8437 V4SI_type_node, long_integer_type_node,
b4de2f7d 8438 pvoid_type_node, NULL_TREE);
b4a62fa0 8439 tree void_ftype_v16qi_long_pvoid
b4de2f7d 8440 = build_function_type_list (void_type_node,
b4a62fa0 8441 V16QI_type_node, long_integer_type_node,
b4de2f7d 8442 pvoid_type_node, NULL_TREE);
b4a62fa0 8443 tree void_ftype_v8hi_long_pvoid
b4de2f7d 8444 = build_function_type_list (void_type_node,
b4a62fa0 8445 V8HI_type_node, long_integer_type_node,
b4de2f7d 8446 pvoid_type_node, NULL_TREE);
a3170dc6
AH
8447 tree int_ftype_int_v8hi_v8hi
8448 = build_function_type_list (integer_type_node,
8449 integer_type_node, V8HI_type_node,
8450 V8HI_type_node, NULL_TREE);
8451 tree int_ftype_int_v16qi_v16qi
8452 = build_function_type_list (integer_type_node,
8453 integer_type_node, V16QI_type_node,
8454 V16QI_type_node, NULL_TREE);
8455 tree int_ftype_int_v4sf_v4sf
8456 = build_function_type_list (integer_type_node,
8457 integer_type_node, V4SF_type_node,
8458 V4SF_type_node, NULL_TREE);
8459 tree v4si_ftype_v4si
8460 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8461 tree v8hi_ftype_v8hi
8462 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8463 tree v16qi_ftype_v16qi
8464 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8465 tree v4sf_ftype_v4sf
8466 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 8467 tree void_ftype_pcvoid_int_int
a3170dc6 8468 = build_function_type_list (void_type_node,
0dbc3651 8469 pcvoid_type_node, integer_type_node,
8bb418a3 8470 integer_type_node, NULL_TREE);
8bb418a3 8471
0dbc3651
ZW
8472 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8473 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8474 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8475 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8476 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8477 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8478 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8479 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8480 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8481 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8482 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8483 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8484 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8485 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8486 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8487 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
8488 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8489 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8490 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 8491 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
8492 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8493 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8494 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8495 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8496 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8497 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8498 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8499 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8500 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8501 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8502 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8503 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
8504 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8505 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8506 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8507 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8508 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8509 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8510 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8511 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8512 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8513 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8514 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8515 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8516 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8517 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8518
8519 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8520
8521 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8522 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8523 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8524 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8525 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8526 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8527 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8528 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8529 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8530 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 8531
a3170dc6
AH
8532 /* Add the DST variants. */
8533 d = (struct builtin_description *) bdesc_dst;
8534 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 8535 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
8536
8537 /* Initialize the predicates. */
8538 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8539 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8540 {
8541 enum machine_mode mode1;
8542 tree type;
58646b77
PB
8543 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8544 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 8545
58646b77
PB
8546 if (is_overloaded)
8547 mode1 = VOIDmode;
8548 else
8549 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
8550
8551 switch (mode1)
8552 {
58646b77
PB
8553 case VOIDmode:
8554 type = int_ftype_int_opaque_opaque;
8555 break;
a3170dc6
AH
8556 case V4SImode:
8557 type = int_ftype_int_v4si_v4si;
8558 break;
8559 case V8HImode:
8560 type = int_ftype_int_v8hi_v8hi;
8561 break;
8562 case V16QImode:
8563 type = int_ftype_int_v16qi_v16qi;
8564 break;
8565 case V4SFmode:
8566 type = int_ftype_int_v4sf_v4sf;
8567 break;
8568 default:
37409796 8569 gcc_unreachable ();
a3170dc6 8570 }
f676971a 8571
a3170dc6
AH
8572 def_builtin (dp->mask, dp->name, type, dp->code);
8573 }
8574
8575 /* Initialize the abs* operators. */
8576 d = (struct builtin_description *) bdesc_abs;
8577 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8578 {
8579 enum machine_mode mode0;
8580 tree type;
8581
8582 mode0 = insn_data[d->icode].operand[0].mode;
8583
8584 switch (mode0)
8585 {
8586 case V4SImode:
8587 type = v4si_ftype_v4si;
8588 break;
8589 case V8HImode:
8590 type = v8hi_ftype_v8hi;
8591 break;
8592 case V16QImode:
8593 type = v16qi_ftype_v16qi;
8594 break;
8595 case V4SFmode:
8596 type = v4sf_ftype_v4sf;
8597 break;
8598 default:
37409796 8599 gcc_unreachable ();
a3170dc6 8600 }
f676971a 8601
a3170dc6
AH
8602 def_builtin (d->mask, d->name, type, d->code);
8603 }
7ccf35ed 8604
13c62176
DN
8605 if (TARGET_ALTIVEC)
8606 {
8607 tree decl;
8608
8609 /* Initialize target builtin that implements
8610 targetm.vectorize.builtin_mask_for_load. */
8611
c79efc4d
RÁE
8612 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
8613 v16qi_ftype_long_pcvoid,
8614 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8615 BUILT_IN_MD, NULL,
8616 tree_cons (get_identifier ("const"),
8617 NULL_TREE, NULL_TREE));
13c62176
DN
8618 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8619 altivec_builtin_mask_for_load = decl;
13c62176 8620 }
7a4eca66
DE
8621
8622 /* Access to the vec_init patterns. */
8623 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8624 integer_type_node, integer_type_node,
8625 integer_type_node, NULL_TREE);
8626 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8627 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8628
8629 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8630 short_integer_type_node,
8631 short_integer_type_node,
8632 short_integer_type_node,
8633 short_integer_type_node,
8634 short_integer_type_node,
8635 short_integer_type_node,
8636 short_integer_type_node, NULL_TREE);
8637 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8638 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8639
8640 ftype = build_function_type_list (V16QI_type_node, char_type_node,
8641 char_type_node, char_type_node,
8642 char_type_node, char_type_node,
8643 char_type_node, char_type_node,
8644 char_type_node, char_type_node,
8645 char_type_node, char_type_node,
8646 char_type_node, char_type_node,
8647 char_type_node, char_type_node,
8648 char_type_node, NULL_TREE);
8649 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8650 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8651
8652 ftype = build_function_type_list (V4SF_type_node, float_type_node,
8653 float_type_node, float_type_node,
8654 float_type_node, NULL_TREE);
8655 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8656 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8657
8658 /* Access to the vec_set patterns. */
8659 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8660 intSI_type_node,
8661 integer_type_node, NULL_TREE);
8662 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8663 ALTIVEC_BUILTIN_VEC_SET_V4SI);
8664
8665 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8666 intHI_type_node,
8667 integer_type_node, NULL_TREE);
8668 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8669 ALTIVEC_BUILTIN_VEC_SET_V8HI);
8670
8671 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8672 intQI_type_node,
8673 integer_type_node, NULL_TREE);
8674 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8675 ALTIVEC_BUILTIN_VEC_SET_V16QI);
8676
8677 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8678 float_type_node,
8679 integer_type_node, NULL_TREE);
8680 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8681 ALTIVEC_BUILTIN_VEC_SET_V4SF);
8682
8683 /* Access to the vec_extract patterns. */
8684 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8685 integer_type_node, NULL_TREE);
8686 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8687 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8688
8689 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8690 integer_type_node, NULL_TREE);
8691 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8692 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8693
8694 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8695 integer_type_node, NULL_TREE);
8696 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8697 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8698
8699 ftype = build_function_type_list (float_type_node, V4SF_type_node,
8700 integer_type_node, NULL_TREE);
8701 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8702 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
8703}
8704
8705static void
863d938c 8706rs6000_common_init_builtins (void)
a3170dc6
AH
8707{
8708 struct builtin_description *d;
8709 size_t i;
8710
8711 tree v4sf_ftype_v4sf_v4sf_v16qi
8712 = build_function_type_list (V4SF_type_node,
8713 V4SF_type_node, V4SF_type_node,
8714 V16QI_type_node, NULL_TREE);
8715 tree v4si_ftype_v4si_v4si_v16qi
8716 = build_function_type_list (V4SI_type_node,
8717 V4SI_type_node, V4SI_type_node,
8718 V16QI_type_node, NULL_TREE);
8719 tree v8hi_ftype_v8hi_v8hi_v16qi
8720 = build_function_type_list (V8HI_type_node,
8721 V8HI_type_node, V8HI_type_node,
8722 V16QI_type_node, NULL_TREE);
8723 tree v16qi_ftype_v16qi_v16qi_v16qi
8724 = build_function_type_list (V16QI_type_node,
8725 V16QI_type_node, V16QI_type_node,
8726 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
8727 tree v4si_ftype_int
8728 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8729 tree v8hi_ftype_int
8730 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8731 tree v16qi_ftype_int
8732 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
8733 tree v8hi_ftype_v16qi
8734 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8735 tree v4sf_ftype_v4sf
8736 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8737
8738 tree v2si_ftype_v2si_v2si
2abe3e28
AH
8739 = build_function_type_list (opaque_V2SI_type_node,
8740 opaque_V2SI_type_node,
8741 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8742
8743 tree v2sf_ftype_v2sf_v2sf
2abe3e28
AH
8744 = build_function_type_list (opaque_V2SF_type_node,
8745 opaque_V2SF_type_node,
8746 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
8747
8748 tree v2si_ftype_int_int
2abe3e28 8749 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
8750 integer_type_node, integer_type_node,
8751 NULL_TREE);
8752
58646b77
PB
8753 tree opaque_ftype_opaque
8754 = build_function_type_list (opaque_V4SI_type_node,
8755 opaque_V4SI_type_node, NULL_TREE);
8756
a3170dc6 8757 tree v2si_ftype_v2si
2abe3e28
AH
8758 = build_function_type_list (opaque_V2SI_type_node,
8759 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8760
8761 tree v2sf_ftype_v2sf
2abe3e28
AH
8762 = build_function_type_list (opaque_V2SF_type_node,
8763 opaque_V2SF_type_node, NULL_TREE);
f676971a 8764
a3170dc6 8765 tree v2sf_ftype_v2si
2abe3e28
AH
8766 = build_function_type_list (opaque_V2SF_type_node,
8767 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8768
8769 tree v2si_ftype_v2sf
2abe3e28
AH
8770 = build_function_type_list (opaque_V2SI_type_node,
8771 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
8772
8773 tree v2si_ftype_v2si_char
2abe3e28
AH
8774 = build_function_type_list (opaque_V2SI_type_node,
8775 opaque_V2SI_type_node,
8776 char_type_node, NULL_TREE);
a3170dc6
AH
8777
8778 tree v2si_ftype_int_char
2abe3e28 8779 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
8780 integer_type_node, char_type_node, NULL_TREE);
8781
8782 tree v2si_ftype_char
2abe3e28
AH
8783 = build_function_type_list (opaque_V2SI_type_node,
8784 char_type_node, NULL_TREE);
a3170dc6
AH
8785
8786 tree int_ftype_int_int
8787 = build_function_type_list (integer_type_node,
8788 integer_type_node, integer_type_node,
8789 NULL_TREE);
95385cbb 8790
58646b77
PB
8791 tree opaque_ftype_opaque_opaque
8792 = build_function_type_list (opaque_V4SI_type_node,
8793 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 8794 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
8795 = build_function_type_list (V4SI_type_node,
8796 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 8797 tree v4sf_ftype_v4si_int
b4de2f7d 8798 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
8799 V4SI_type_node, integer_type_node, NULL_TREE);
8800 tree v4si_ftype_v4sf_int
b4de2f7d 8801 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
8802 V4SF_type_node, integer_type_node, NULL_TREE);
8803 tree v4si_ftype_v4si_int
b4de2f7d 8804 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
8805 V4SI_type_node, integer_type_node, NULL_TREE);
8806 tree v8hi_ftype_v8hi_int
b4de2f7d 8807 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
8808 V8HI_type_node, integer_type_node, NULL_TREE);
8809 tree v16qi_ftype_v16qi_int
b4de2f7d 8810 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
8811 V16QI_type_node, integer_type_node, NULL_TREE);
8812 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
8813 = build_function_type_list (V16QI_type_node,
8814 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
8815 integer_type_node, NULL_TREE);
8816 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
8817 = build_function_type_list (V8HI_type_node,
8818 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
8819 integer_type_node, NULL_TREE);
8820 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
8821 = build_function_type_list (V4SI_type_node,
8822 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
8823 integer_type_node, NULL_TREE);
8824 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
8825 = build_function_type_list (V4SF_type_node,
8826 V4SF_type_node, V4SF_type_node,
b9e4e5d1 8827 integer_type_node, NULL_TREE);
0ac081f6 8828 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
8829 = build_function_type_list (V4SF_type_node,
8830 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
8831 tree opaque_ftype_opaque_opaque_opaque
8832 = build_function_type_list (opaque_V4SI_type_node,
8833 opaque_V4SI_type_node, opaque_V4SI_type_node,
8834 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 8835 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
8836 = build_function_type_list (V4SF_type_node,
8837 V4SF_type_node, V4SF_type_node,
8838 V4SI_type_node, NULL_TREE);
2212663f 8839 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
8840 = build_function_type_list (V4SF_type_node,
8841 V4SF_type_node, V4SF_type_node,
8842 V4SF_type_node, NULL_TREE);
f676971a 8843 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
8844 = build_function_type_list (V4SI_type_node,
8845 V4SI_type_node, V4SI_type_node,
8846 V4SI_type_node, NULL_TREE);
0ac081f6 8847 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
8848 = build_function_type_list (V8HI_type_node,
8849 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 8850 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
8851 = build_function_type_list (V8HI_type_node,
8852 V8HI_type_node, V8HI_type_node,
8853 V8HI_type_node, NULL_TREE);
c4ad648e 8854 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
8855 = build_function_type_list (V4SI_type_node,
8856 V8HI_type_node, V8HI_type_node,
8857 V4SI_type_node, NULL_TREE);
c4ad648e 8858 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
8859 = build_function_type_list (V4SI_type_node,
8860 V16QI_type_node, V16QI_type_node,
8861 V4SI_type_node, NULL_TREE);
0ac081f6 8862 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
8863 = build_function_type_list (V16QI_type_node,
8864 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8865 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
8866 = build_function_type_list (V4SI_type_node,
8867 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 8868 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
8869 = build_function_type_list (V8HI_type_node,
8870 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8871 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
8872 = build_function_type_list (V4SI_type_node,
8873 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8874 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
8875 = build_function_type_list (V8HI_type_node,
8876 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 8877 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
8878 = build_function_type_list (V16QI_type_node,
8879 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8880 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
8881 = build_function_type_list (V4SI_type_node,
8882 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 8883 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
8884 = build_function_type_list (V4SI_type_node,
8885 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8886 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
8887 = build_function_type_list (V4SI_type_node,
8888 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8889 tree v4si_ftype_v8hi
8890 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8891 tree int_ftype_v4si_v4si
8892 = build_function_type_list (integer_type_node,
8893 V4SI_type_node, V4SI_type_node, NULL_TREE);
8894 tree int_ftype_v4sf_v4sf
8895 = build_function_type_list (integer_type_node,
8896 V4SF_type_node, V4SF_type_node, NULL_TREE);
8897 tree int_ftype_v16qi_v16qi
8898 = build_function_type_list (integer_type_node,
8899 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8900 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
8901 = build_function_type_list (integer_type_node,
8902 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8903
6f317ef3 8904 /* Add the simple ternary operators. */
2212663f 8905 d = (struct builtin_description *) bdesc_3arg;
ca7558fc 8906 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 8907 {
2212663f
DB
8908 enum machine_mode mode0, mode1, mode2, mode3;
8909 tree type;
58646b77
PB
8910 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8911 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 8912
58646b77
PB
8913 if (is_overloaded)
8914 {
8915 mode0 = VOIDmode;
8916 mode1 = VOIDmode;
8917 mode2 = VOIDmode;
8918 mode3 = VOIDmode;
8919 }
8920 else
8921 {
8922 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8923 continue;
f676971a 8924
58646b77
PB
8925 mode0 = insn_data[d->icode].operand[0].mode;
8926 mode1 = insn_data[d->icode].operand[1].mode;
8927 mode2 = insn_data[d->icode].operand[2].mode;
8928 mode3 = insn_data[d->icode].operand[3].mode;
8929 }
bb8df8a6 8930
2212663f
DB
8931 /* When all four are of the same mode. */
8932 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8933 {
8934 switch (mode0)
8935 {
58646b77
PB
8936 case VOIDmode:
8937 type = opaque_ftype_opaque_opaque_opaque;
8938 break;
617e0e1d
DB
8939 case V4SImode:
8940 type = v4si_ftype_v4si_v4si_v4si;
8941 break;
2212663f
DB
8942 case V4SFmode:
8943 type = v4sf_ftype_v4sf_v4sf_v4sf;
8944 break;
8945 case V8HImode:
8946 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 8947 break;
2212663f
DB
8948 case V16QImode:
8949 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 8950 break;
2212663f 8951 default:
37409796 8952 gcc_unreachable ();
2212663f
DB
8953 }
8954 }
8955 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 8956 {
2212663f
DB
8957 switch (mode0)
8958 {
8959 case V4SImode:
8960 type = v4si_ftype_v4si_v4si_v16qi;
8961 break;
8962 case V4SFmode:
8963 type = v4sf_ftype_v4sf_v4sf_v16qi;
8964 break;
8965 case V8HImode:
8966 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 8967 break;
2212663f
DB
8968 case V16QImode:
8969 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 8970 break;
2212663f 8971 default:
37409796 8972 gcc_unreachable ();
2212663f
DB
8973 }
8974 }
f676971a 8975 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 8976 && mode3 == V4SImode)
24408032 8977 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 8978 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 8979 && mode3 == V4SImode)
24408032 8980 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 8981 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 8982 && mode3 == V4SImode)
24408032
AH
8983 type = v4sf_ftype_v4sf_v4sf_v4si;
8984
8985 /* vchar, vchar, vchar, 4 bit literal. */
8986 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8987 && mode3 == QImode)
b9e4e5d1 8988 type = v16qi_ftype_v16qi_v16qi_int;
24408032
AH
8989
8990 /* vshort, vshort, vshort, 4 bit literal. */
8991 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8992 && mode3 == QImode)
b9e4e5d1 8993 type = v8hi_ftype_v8hi_v8hi_int;
24408032
AH
8994
8995 /* vint, vint, vint, 4 bit literal. */
8996 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8997 && mode3 == QImode)
b9e4e5d1 8998 type = v4si_ftype_v4si_v4si_int;
24408032
AH
8999
9000 /* vfloat, vfloat, vfloat, 4 bit literal. */
9001 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9002 && mode3 == QImode)
b9e4e5d1 9003 type = v4sf_ftype_v4sf_v4sf_int;
24408032 9004
2212663f 9005 else
37409796 9006 gcc_unreachable ();
2212663f
DB
9007
9008 def_builtin (d->mask, d->name, type, d->code);
9009 }
9010
0ac081f6 9011 /* Add the simple binary operators. */
00b960c7 9012 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 9013 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
9014 {
9015 enum machine_mode mode0, mode1, mode2;
9016 tree type;
58646b77
PB
9017 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9018 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 9019
58646b77
PB
9020 if (is_overloaded)
9021 {
9022 mode0 = VOIDmode;
9023 mode1 = VOIDmode;
9024 mode2 = VOIDmode;
9025 }
9026 else
bb8df8a6 9027 {
58646b77
PB
9028 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9029 continue;
f676971a 9030
58646b77
PB
9031 mode0 = insn_data[d->icode].operand[0].mode;
9032 mode1 = insn_data[d->icode].operand[1].mode;
9033 mode2 = insn_data[d->icode].operand[2].mode;
9034 }
0ac081f6
AH
9035
9036 /* When all three operands are of the same mode. */
9037 if (mode0 == mode1 && mode1 == mode2)
9038 {
9039 switch (mode0)
9040 {
58646b77
PB
9041 case VOIDmode:
9042 type = opaque_ftype_opaque_opaque;
9043 break;
0ac081f6
AH
9044 case V4SFmode:
9045 type = v4sf_ftype_v4sf_v4sf;
9046 break;
9047 case V4SImode:
9048 type = v4si_ftype_v4si_v4si;
9049 break;
9050 case V16QImode:
9051 type = v16qi_ftype_v16qi_v16qi;
9052 break;
9053 case V8HImode:
9054 type = v8hi_ftype_v8hi_v8hi;
9055 break;
a3170dc6
AH
9056 case V2SImode:
9057 type = v2si_ftype_v2si_v2si;
9058 break;
9059 case V2SFmode:
9060 type = v2sf_ftype_v2sf_v2sf;
9061 break;
9062 case SImode:
9063 type = int_ftype_int_int;
9064 break;
0ac081f6 9065 default:
37409796 9066 gcc_unreachable ();
0ac081f6
AH
9067 }
9068 }
9069
9070 /* A few other combos we really don't want to do manually. */
9071
9072 /* vint, vfloat, vfloat. */
9073 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9074 type = v4si_ftype_v4sf_v4sf;
9075
9076 /* vshort, vchar, vchar. */
9077 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9078 type = v8hi_ftype_v16qi_v16qi;
9079
9080 /* vint, vshort, vshort. */
9081 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9082 type = v4si_ftype_v8hi_v8hi;
9083
9084 /* vshort, vint, vint. */
9085 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9086 type = v8hi_ftype_v4si_v4si;
9087
9088 /* vchar, vshort, vshort. */
9089 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9090 type = v16qi_ftype_v8hi_v8hi;
9091
9092 /* vint, vchar, vint. */
9093 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9094 type = v4si_ftype_v16qi_v4si;
9095
fa066a23
AH
9096 /* vint, vchar, vchar. */
9097 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9098 type = v4si_ftype_v16qi_v16qi;
9099
0ac081f6
AH
9100 /* vint, vshort, vint. */
9101 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9102 type = v4si_ftype_v8hi_v4si;
f676971a 9103
2212663f
DB
9104 /* vint, vint, 5 bit literal. */
9105 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9106 type = v4si_ftype_v4si_int;
f676971a 9107
2212663f
DB
9108 /* vshort, vshort, 5 bit literal. */
9109 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 9110 type = v8hi_ftype_v8hi_int;
f676971a 9111
2212663f
DB
9112 /* vchar, vchar, 5 bit literal. */
9113 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 9114 type = v16qi_ftype_v16qi_int;
0ac081f6 9115
617e0e1d
DB
9116 /* vfloat, vint, 5 bit literal. */
9117 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9118 type = v4sf_ftype_v4si_int;
f676971a 9119
617e0e1d
DB
9120 /* vint, vfloat, 5 bit literal. */
9121 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 9122 type = v4si_ftype_v4sf_int;
617e0e1d 9123
a3170dc6
AH
9124 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9125 type = v2si_ftype_int_int;
9126
9127 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9128 type = v2si_ftype_v2si_char;
9129
9130 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9131 type = v2si_ftype_int_char;
9132
37409796 9133 else
0ac081f6 9134 {
37409796
NS
9135 /* int, x, x. */
9136 gcc_assert (mode0 == SImode);
0ac081f6
AH
9137 switch (mode1)
9138 {
9139 case V4SImode:
9140 type = int_ftype_v4si_v4si;
9141 break;
9142 case V4SFmode:
9143 type = int_ftype_v4sf_v4sf;
9144 break;
9145 case V16QImode:
9146 type = int_ftype_v16qi_v16qi;
9147 break;
9148 case V8HImode:
9149 type = int_ftype_v8hi_v8hi;
9150 break;
9151 default:
37409796 9152 gcc_unreachable ();
0ac081f6
AH
9153 }
9154 }
9155
2212663f
DB
9156 def_builtin (d->mask, d->name, type, d->code);
9157 }
24408032 9158
2212663f
DB
9159 /* Add the simple unary operators. */
9160 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 9161 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
9162 {
9163 enum machine_mode mode0, mode1;
9164 tree type;
58646b77
PB
9165 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9166 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9167
9168 if (is_overloaded)
9169 {
9170 mode0 = VOIDmode;
9171 mode1 = VOIDmode;
9172 }
9173 else
9174 {
9175 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9176 continue;
bb8df8a6 9177
58646b77
PB
9178 mode0 = insn_data[d->icode].operand[0].mode;
9179 mode1 = insn_data[d->icode].operand[1].mode;
9180 }
2212663f
DB
9181
9182 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 9183 type = v4si_ftype_int;
2212663f 9184 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 9185 type = v8hi_ftype_int;
2212663f 9186 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 9187 type = v16qi_ftype_int;
58646b77
PB
9188 else if (mode0 == VOIDmode && mode1 == VOIDmode)
9189 type = opaque_ftype_opaque;
617e0e1d
DB
9190 else if (mode0 == V4SFmode && mode1 == V4SFmode)
9191 type = v4sf_ftype_v4sf;
20e26713
AH
9192 else if (mode0 == V8HImode && mode1 == V16QImode)
9193 type = v8hi_ftype_v16qi;
9194 else if (mode0 == V4SImode && mode1 == V8HImode)
9195 type = v4si_ftype_v8hi;
a3170dc6
AH
9196 else if (mode0 == V2SImode && mode1 == V2SImode)
9197 type = v2si_ftype_v2si;
9198 else if (mode0 == V2SFmode && mode1 == V2SFmode)
9199 type = v2sf_ftype_v2sf;
9200 else if (mode0 == V2SFmode && mode1 == V2SImode)
9201 type = v2sf_ftype_v2si;
9202 else if (mode0 == V2SImode && mode1 == V2SFmode)
9203 type = v2si_ftype_v2sf;
9204 else if (mode0 == V2SImode && mode1 == QImode)
9205 type = v2si_ftype_char;
2212663f 9206 else
37409796 9207 gcc_unreachable ();
2212663f 9208
0ac081f6
AH
9209 def_builtin (d->mask, d->name, type, d->code);
9210 }
9211}
9212
c15c90bb
ZW
9213static void
9214rs6000_init_libfuncs (void)
9215{
9216 if (!TARGET_HARD_FLOAT)
9217 return;
9218
602ea4d3
JJ
9219 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
9220 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 9221 {
602ea4d3
JJ
9222 /* AIX library routines for float->int conversion. */
9223 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9224 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9225 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9226 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9227 }
c15c90bb 9228
602ea4d3 9229 if (!TARGET_IEEEQUAD)
98c41d98 9230 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
9231 if (!TARGET_XL_COMPAT)
9232 {
9233 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9234 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9235 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9236 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9237 }
9238 else
9239 {
9240 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9241 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9242 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9243 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9244 }
c9034561 9245 else
c15c90bb 9246 {
c9034561 9247 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
9248
9249 set_optab_libfunc (add_optab, TFmode, "_q_add");
9250 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9251 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9252 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9253 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9254 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9255 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9256
c9034561
ZW
9257 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9258 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9259 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9260 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9261 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9262 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9263
85363ca0
ZW
9264 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9265 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9266 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9267 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9268 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9269 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9270 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 9271 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
9272 }
9273}
fba73eb1
DE
9274
9275\f
9276/* Expand a block clear operation, and return 1 if successful. Return 0
9277 if we should let the compiler generate normal code.
9278
9279 operands[0] is the destination
9280 operands[1] is the length
57e84f18 9281 operands[3] is the alignment */
fba73eb1
DE
9282
9283int
9284expand_block_clear (rtx operands[])
9285{
9286 rtx orig_dest = operands[0];
9287 rtx bytes_rtx = operands[1];
57e84f18 9288 rtx align_rtx = operands[3];
5514620a
GK
9289 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9290 HOST_WIDE_INT align;
9291 HOST_WIDE_INT bytes;
fba73eb1
DE
9292 int offset;
9293 int clear_bytes;
5514620a 9294 int clear_step;
fba73eb1
DE
9295
9296 /* If this is not a fixed size move, just call memcpy */
9297 if (! constp)
9298 return 0;
9299
37409796
NS
9300 /* This must be a fixed size alignment */
9301 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
9302 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9303
9304 /* Anything to clear? */
9305 bytes = INTVAL (bytes_rtx);
9306 if (bytes <= 0)
9307 return 1;
9308
5514620a
GK
9309 /* Use the builtin memset after a point, to avoid huge code bloat.
9310 When optimize_size, avoid any significant code bloat; calling
9311 memset is about 4 instructions, so allow for one instruction to
9312 load zero and three to do clearing. */
9313 if (TARGET_ALTIVEC && align >= 128)
9314 clear_step = 16;
9315 else if (TARGET_POWERPC64 && align >= 32)
9316 clear_step = 8;
9317 else
9318 clear_step = 4;
fba73eb1 9319
5514620a
GK
9320 if (optimize_size && bytes > 3 * clear_step)
9321 return 0;
9322 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
9323 return 0;
9324
9325 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9326 {
fba73eb1
DE
9327 enum machine_mode mode = BLKmode;
9328 rtx dest;
f676971a 9329
5514620a
GK
9330 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9331 {
9332 clear_bytes = 16;
9333 mode = V4SImode;
9334 }
9335 else if (bytes >= 8 && TARGET_POWERPC64
9336 /* 64-bit loads and stores require word-aligned
9337 displacements. */
9338 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
9339 {
9340 clear_bytes = 8;
9341 mode = DImode;
fba73eb1 9342 }
5514620a 9343 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
9344 { /* move 4 bytes */
9345 clear_bytes = 4;
9346 mode = SImode;
fba73eb1 9347 }
ec53fc93 9348 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
9349 { /* move 2 bytes */
9350 clear_bytes = 2;
9351 mode = HImode;
fba73eb1
DE
9352 }
9353 else /* move 1 byte at a time */
9354 {
9355 clear_bytes = 1;
9356 mode = QImode;
fba73eb1 9357 }
f676971a 9358
fba73eb1 9359 dest = adjust_address (orig_dest, mode, offset);
f676971a 9360
5514620a 9361 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
9362 }
9363
9364 return 1;
9365}
9366
35aff10b 9367\f
7e69e155
MM
9368/* Expand a block move operation, and return 1 if successful. Return 0
9369 if we should let the compiler generate normal code.
9370
9371 operands[0] is the destination
9372 operands[1] is the source
9373 operands[2] is the length
9374 operands[3] is the alignment */
9375
3933e0e1
MM
9376#define MAX_MOVE_REG 4
9377
7e69e155 9378int
a2369ed3 9379expand_block_move (rtx operands[])
7e69e155 9380{
b6c9286a
MM
9381 rtx orig_dest = operands[0];
9382 rtx orig_src = operands[1];
7e69e155 9383 rtx bytes_rtx = operands[2];
7e69e155 9384 rtx align_rtx = operands[3];
3933e0e1 9385 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 9386 int align;
3933e0e1
MM
9387 int bytes;
9388 int offset;
7e69e155 9389 int move_bytes;
cabfd258
GK
9390 rtx stores[MAX_MOVE_REG];
9391 int num_reg = 0;
7e69e155 9392
3933e0e1 9393 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 9394 if (! constp)
3933e0e1
MM
9395 return 0;
9396
37409796
NS
9397 /* This must be a fixed size alignment */
9398 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 9399 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 9400
7e69e155 9401 /* Anything to move? */
3933e0e1
MM
9402 bytes = INTVAL (bytes_rtx);
9403 if (bytes <= 0)
7e69e155
MM
9404 return 1;
9405
ea9982a8 9406 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 9407 reg_parm_stack_space. */
ea9982a8 9408 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
9409 return 0;
9410
cabfd258 9411 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 9412 {
cabfd258 9413 union {
70128ad9 9414 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 9415 rtx (*mov) (rtx, rtx);
cabfd258
GK
9416 } gen_func;
9417 enum machine_mode mode = BLKmode;
9418 rtx src, dest;
f676971a 9419
5514620a
GK
9420 /* Altivec first, since it will be faster than a string move
9421 when it applies, and usually not significantly larger. */
9422 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9423 {
9424 move_bytes = 16;
9425 mode = V4SImode;
9426 gen_func.mov = gen_movv4si;
9427 }
9428 else if (TARGET_STRING
cabfd258
GK
9429 && bytes > 24 /* move up to 32 bytes at a time */
9430 && ! fixed_regs[5]
9431 && ! fixed_regs[6]
9432 && ! fixed_regs[7]
9433 && ! fixed_regs[8]
9434 && ! fixed_regs[9]
9435 && ! fixed_regs[10]
9436 && ! fixed_regs[11]
9437 && ! fixed_regs[12])
7e69e155 9438 {
cabfd258 9439 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 9440 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
9441 }
9442 else if (TARGET_STRING
9443 && bytes > 16 /* move up to 24 bytes at a time */
9444 && ! fixed_regs[5]
9445 && ! fixed_regs[6]
9446 && ! fixed_regs[7]
9447 && ! fixed_regs[8]
9448 && ! fixed_regs[9]
9449 && ! fixed_regs[10])
9450 {
9451 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 9452 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
9453 }
9454 else if (TARGET_STRING
9455 && bytes > 8 /* move up to 16 bytes at a time */
9456 && ! fixed_regs[5]
9457 && ! fixed_regs[6]
9458 && ! fixed_regs[7]
9459 && ! fixed_regs[8])
9460 {
9461 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 9462 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
9463 }
9464 else if (bytes >= 8 && TARGET_POWERPC64
9465 /* 64-bit loads and stores require word-aligned
9466 displacements. */
fba73eb1 9467 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
9468 {
9469 move_bytes = 8;
9470 mode = DImode;
9471 gen_func.mov = gen_movdi;
9472 }
9473 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9474 { /* move up to 8 bytes at a time */
9475 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 9476 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 9477 }
cd7d9ca4 9478 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
9479 { /* move 4 bytes */
9480 move_bytes = 4;
9481 mode = SImode;
9482 gen_func.mov = gen_movsi;
9483 }
ec53fc93 9484 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
9485 { /* move 2 bytes */
9486 move_bytes = 2;
9487 mode = HImode;
9488 gen_func.mov = gen_movhi;
9489 }
9490 else if (TARGET_STRING && bytes > 1)
9491 { /* move up to 4 bytes at a time */
9492 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 9493 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
9494 }
9495 else /* move 1 byte at a time */
9496 {
9497 move_bytes = 1;
9498 mode = QImode;
9499 gen_func.mov = gen_movqi;
9500 }
f676971a 9501
cabfd258
GK
9502 src = adjust_address (orig_src, mode, offset);
9503 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
9504
9505 if (mode != BLKmode)
cabfd258
GK
9506 {
9507 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 9508
cabfd258
GK
9509 emit_insn ((*gen_func.mov) (tmp_reg, src));
9510 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 9511 }
3933e0e1 9512
cabfd258
GK
9513 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9514 {
9515 int i;
9516 for (i = 0; i < num_reg; i++)
9517 emit_insn (stores[i]);
9518 num_reg = 0;
9519 }
35aff10b 9520
cabfd258 9521 if (mode == BLKmode)
7e69e155 9522 {
70128ad9 9523 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
9524 patterns require zero offset. */
9525 if (!REG_P (XEXP (src, 0)))
b6c9286a 9526 {
cabfd258
GK
9527 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9528 src = replace_equiv_address (src, src_reg);
b6c9286a 9529 }
cabfd258 9530 set_mem_size (src, GEN_INT (move_bytes));
f676971a 9531
cabfd258 9532 if (!REG_P (XEXP (dest, 0)))
3933e0e1 9533 {
cabfd258
GK
9534 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9535 dest = replace_equiv_address (dest, dest_reg);
7e69e155 9536 }
cabfd258 9537 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 9538
70128ad9 9539 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
9540 GEN_INT (move_bytes & 31),
9541 align_rtx));
7e69e155 9542 }
7e69e155
MM
9543 }
9544
9545 return 1;
9546}
9547
d62294f5 9548\f
9caa3eb2
DE
9549/* Return a string to perform a load_multiple operation.
9550 operands[0] is the vector.
9551 operands[1] is the source address.
9552 operands[2] is the first destination register. */
9553
9554const char *
a2369ed3 9555rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
9556{
9557 /* We have to handle the case where the pseudo used to contain the address
9558 is assigned to one of the output registers. */
9559 int i, j;
9560 int words = XVECLEN (operands[0], 0);
9561 rtx xop[10];
9562
9563 if (XVECLEN (operands[0], 0) == 1)
9564 return "{l|lwz} %2,0(%1)";
9565
9566 for (i = 0; i < words; i++)
9567 if (refers_to_regno_p (REGNO (operands[2]) + i,
9568 REGNO (operands[2]) + i + 1, operands[1], 0))
9569 {
9570 if (i == words-1)
9571 {
9572 xop[0] = GEN_INT (4 * (words-1));
9573 xop[1] = operands[1];
9574 xop[2] = operands[2];
9575 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9576 return "";
9577 }
9578 else if (i == 0)
9579 {
9580 xop[0] = GEN_INT (4 * (words-1));
9581 xop[1] = operands[1];
9582 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9583 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9584 return "";
9585 }
9586 else
9587 {
9588 for (j = 0; j < words; j++)
9589 if (j != i)
9590 {
9591 xop[0] = GEN_INT (j * 4);
9592 xop[1] = operands[1];
9593 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9594 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9595 }
9596 xop[0] = GEN_INT (i * 4);
9597 xop[1] = operands[1];
9598 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9599 return "";
9600 }
9601 }
9602
9603 return "{lsi|lswi} %2,%1,%N0";
9604}
9605
9878760c 9606\f
a4f6c312
SS
9607/* A validation routine: say whether CODE, a condition code, and MODE
9608 match. The other alternatives either don't make sense or should
9609 never be generated. */
39a10a29 9610
48d72335 9611void
a2369ed3 9612validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 9613{
37409796
NS
9614 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9615 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9616 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
9617
9618 /* These don't make sense. */
37409796
NS
9619 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9620 || mode != CCUNSmode);
39a10a29 9621
37409796
NS
9622 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9623 || mode == CCUNSmode);
39a10a29 9624
37409796
NS
9625 gcc_assert (mode == CCFPmode
9626 || (code != ORDERED && code != UNORDERED
9627 && code != UNEQ && code != LTGT
9628 && code != UNGT && code != UNLT
9629 && code != UNGE && code != UNLE));
f676971a
EC
9630
9631 /* These should never be generated except for
bc9ec0e0 9632 flag_finite_math_only. */
37409796
NS
9633 gcc_assert (mode != CCFPmode
9634 || flag_finite_math_only
9635 || (code != LE && code != GE
9636 && code != UNEQ && code != LTGT
9637 && code != UNGT && code != UNLT));
39a10a29
GK
9638
9639 /* These are invalid; the information is not there. */
37409796 9640 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
9641}
9642
9878760c
RK
9643\f
9644/* Return 1 if ANDOP is a mask that has no bits on that are not in the
9645 mask required to convert the result of a rotate insn into a shift
b1765bde 9646 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
9647
9648int
a2369ed3 9649includes_lshift_p (rtx shiftop, rtx andop)
9878760c 9650{
e2c953b6
DE
9651 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9652
9653 shift_mask <<= INTVAL (shiftop);
9878760c 9654
b1765bde 9655 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
9656}
9657
9658/* Similar, but for right shift. */
9659
9660int
a2369ed3 9661includes_rshift_p (rtx shiftop, rtx andop)
9878760c 9662{
a7653a2c 9663 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
9664
9665 shift_mask >>= INTVAL (shiftop);
9666
b1765bde 9667 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
9668}
9669
c5059423
AM
9670/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9671 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 9672 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
9673
9674int
a2369ed3 9675includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 9676{
c5059423
AM
9677 if (GET_CODE (andop) == CONST_INT)
9678 {
02071907 9679 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 9680
c5059423 9681 c = INTVAL (andop);
02071907 9682 if (c == 0 || c == ~0)
c5059423 9683 return 0;
e2c953b6 9684
02071907 9685 shift_mask = ~0;
c5059423
AM
9686 shift_mask <<= INTVAL (shiftop);
9687
b6d08ca1 9688 /* Find the least significant one bit. */
c5059423
AM
9689 lsb = c & -c;
9690
9691 /* It must coincide with the LSB of the shift mask. */
9692 if (-lsb != shift_mask)
9693 return 0;
e2c953b6 9694
c5059423
AM
9695 /* Invert to look for the next transition (if any). */
9696 c = ~c;
9697
9698 /* Remove the low group of ones (originally low group of zeros). */
9699 c &= -lsb;
9700
9701 /* Again find the lsb, and check we have all 1's above. */
9702 lsb = c & -c;
9703 return c == -lsb;
9704 }
9705 else if (GET_CODE (andop) == CONST_DOUBLE
9706 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9707 {
02071907
AM
9708 HOST_WIDE_INT low, high, lsb;
9709 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
9710
9711 low = CONST_DOUBLE_LOW (andop);
9712 if (HOST_BITS_PER_WIDE_INT < 64)
9713 high = CONST_DOUBLE_HIGH (andop);
9714
9715 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 9716 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
9717 return 0;
9718
9719 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9720 {
02071907 9721 shift_mask_high = ~0;
c5059423
AM
9722 if (INTVAL (shiftop) > 32)
9723 shift_mask_high <<= INTVAL (shiftop) - 32;
9724
9725 lsb = high & -high;
9726
9727 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9728 return 0;
9729
9730 high = ~high;
9731 high &= -lsb;
9732
9733 lsb = high & -high;
9734 return high == -lsb;
9735 }
9736
02071907 9737 shift_mask_low = ~0;
c5059423
AM
9738 shift_mask_low <<= INTVAL (shiftop);
9739
9740 lsb = low & -low;
9741
9742 if (-lsb != shift_mask_low)
9743 return 0;
9744
9745 if (HOST_BITS_PER_WIDE_INT < 64)
9746 high = ~high;
9747 low = ~low;
9748 low &= -lsb;
9749
9750 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9751 {
9752 lsb = high & -high;
9753 return high == -lsb;
9754 }
9755
9756 lsb = low & -low;
9757 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9758 }
9759 else
9760 return 0;
9761}
e2c953b6 9762
c5059423
AM
9763/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9764 to perform a left shift. It must have SHIFTOP or more least
c1207243 9765 significant 0's, with the remainder of the word 1's. */
e2c953b6 9766
c5059423 9767int
a2369ed3 9768includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 9769{
e2c953b6 9770 if (GET_CODE (andop) == CONST_INT)
c5059423 9771 {
02071907 9772 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 9773
02071907 9774 shift_mask = ~0;
c5059423
AM
9775 shift_mask <<= INTVAL (shiftop);
9776 c = INTVAL (andop);
9777
c1207243 9778 /* Find the least significant one bit. */
c5059423
AM
9779 lsb = c & -c;
9780
9781 /* It must be covered by the shift mask.
a4f6c312 9782 This test also rejects c == 0. */
c5059423
AM
9783 if ((lsb & shift_mask) == 0)
9784 return 0;
9785
9786 /* Check we have all 1's above the transition, and reject all 1's. */
9787 return c == -lsb && lsb != 1;
9788 }
9789 else if (GET_CODE (andop) == CONST_DOUBLE
9790 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9791 {
02071907 9792 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
9793
9794 low = CONST_DOUBLE_LOW (andop);
9795
9796 if (HOST_BITS_PER_WIDE_INT < 64)
9797 {
02071907 9798 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
9799
9800 high = CONST_DOUBLE_HIGH (andop);
9801
9802 if (low == 0)
9803 {
02071907 9804 shift_mask_high = ~0;
c5059423
AM
9805 if (INTVAL (shiftop) > 32)
9806 shift_mask_high <<= INTVAL (shiftop) - 32;
9807
9808 lsb = high & -high;
9809
9810 if ((lsb & shift_mask_high) == 0)
9811 return 0;
9812
9813 return high == -lsb;
9814 }
9815 if (high != ~0)
9816 return 0;
9817 }
9818
02071907 9819 shift_mask_low = ~0;
c5059423
AM
9820 shift_mask_low <<= INTVAL (shiftop);
9821
9822 lsb = low & -low;
9823
9824 if ((lsb & shift_mask_low) == 0)
9825 return 0;
9826
9827 return low == -lsb && lsb != 1;
9828 }
e2c953b6 9829 else
c5059423 9830 return 0;
9878760c 9831}
35068b43 9832
11ac38b2
DE
9833/* Return 1 if operands will generate a valid arguments to rlwimi
9834instruction for insert with right shift in 64-bit mode. The mask may
9835not start on the first bit or stop on the last bit because wrap-around
9836effects of instruction do not correspond to semantics of RTL insn. */
9837
9838int
9839insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9840{
429ec7dc
DE
9841 if (INTVAL (startop) > 32
9842 && INTVAL (startop) < 64
9843 && INTVAL (sizeop) > 1
9844 && INTVAL (sizeop) + INTVAL (startop) < 64
9845 && INTVAL (shiftop) > 0
9846 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
9847 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9848 return 1;
9849
9850 return 0;
9851}
9852
35068b43 9853/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 9854 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
9855
9856int
a2369ed3 9857registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
9858{
9859 /* We might have been passed a SUBREG. */
f676971a 9860 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 9861 return 0;
f676971a 9862
90f81f99
AP
9863 /* We might have been passed non floating point registers. */
9864 if (!FP_REGNO_P (REGNO (reg1))
9865 || !FP_REGNO_P (REGNO (reg2)))
9866 return 0;
35068b43
RK
9867
9868 return (REGNO (reg1) == REGNO (reg2) - 1);
9869}
9870
a4f6c312
SS
9871/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9872 addr1 and addr2 must be in consecutive memory locations
9873 (addr2 == addr1 + 8). */
35068b43
RK
9874
9875int
90f81f99 9876mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 9877{
90f81f99 9878 rtx addr1, addr2;
bb8df8a6
EC
9879 unsigned int reg1, reg2;
9880 int offset1, offset2;
35068b43 9881
90f81f99
AP
9882 /* The mems cannot be volatile. */
9883 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9884 return 0;
f676971a 9885
90f81f99
AP
9886 addr1 = XEXP (mem1, 0);
9887 addr2 = XEXP (mem2, 0);
9888
35068b43
RK
9889 /* Extract an offset (if used) from the first addr. */
9890 if (GET_CODE (addr1) == PLUS)
9891 {
9892 /* If not a REG, return zero. */
9893 if (GET_CODE (XEXP (addr1, 0)) != REG)
9894 return 0;
9895 else
9896 {
c4ad648e 9897 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
9898 /* The offset must be constant! */
9899 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
9900 return 0;
9901 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
9902 }
9903 }
9904 else if (GET_CODE (addr1) != REG)
9905 return 0;
9906 else
9907 {
9908 reg1 = REGNO (addr1);
9909 /* This was a simple (mem (reg)) expression. Offset is 0. */
9910 offset1 = 0;
9911 }
9912
bb8df8a6
EC
9913 /* And now for the second addr. */
9914 if (GET_CODE (addr2) == PLUS)
9915 {
9916 /* If not a REG, return zero. */
9917 if (GET_CODE (XEXP (addr2, 0)) != REG)
9918 return 0;
9919 else
9920 {
9921 reg2 = REGNO (XEXP (addr2, 0));
9922 /* The offset must be constant. */
9923 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9924 return 0;
9925 offset2 = INTVAL (XEXP (addr2, 1));
9926 }
9927 }
9928 else if (GET_CODE (addr2) != REG)
35068b43 9929 return 0;
bb8df8a6
EC
9930 else
9931 {
9932 reg2 = REGNO (addr2);
9933 /* This was a simple (mem (reg)) expression. Offset is 0. */
9934 offset2 = 0;
9935 }
35068b43 9936
bb8df8a6
EC
9937 /* Both of these must have the same base register. */
9938 if (reg1 != reg2)
35068b43
RK
9939 return 0;
9940
9941 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 9942 if (offset2 != offset1 + 8)
35068b43
RK
9943 return 0;
9944
9945 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9946 instructions. */
9947 return 1;
9948}
9878760c
RK
9949\f
9950/* Return the register class of a scratch register needed to copy IN into
9951 or out of a register in CLASS in MODE. If it can be done directly,
9952 NO_REGS is returned. */
9953
9954enum reg_class
3c4774e0
R
9955rs6000_secondary_reload_class (enum reg_class class,
9956 enum machine_mode mode ATTRIBUTE_UNUSED,
9957 rtx in)
9878760c 9958{
5accd822 9959 int regno;
9878760c 9960
ab82a49f
AP
9961 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9962#if TARGET_MACHO
c4ad648e 9963 && MACHOPIC_INDIRECT
ab82a49f 9964#endif
c4ad648e 9965 ))
46fad5b7
DJ
9966 {
9967 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
9968 other than BASE_REGS for TARGET_ELF. So indicate that a
9969 register from BASE_REGS is needed as an intermediate
9970 register.
f676971a 9971
46fad5b7
DJ
9972 On Darwin, pic addresses require a load from memory, which
9973 needs a base register. */
9974 if (class != BASE_REGS
c4ad648e
AM
9975 && (GET_CODE (in) == SYMBOL_REF
9976 || GET_CODE (in) == HIGH
9977 || GET_CODE (in) == LABEL_REF
9978 || GET_CODE (in) == CONST))
9979 return BASE_REGS;
46fad5b7 9980 }
e7b7998a 9981
5accd822
DE
9982 if (GET_CODE (in) == REG)
9983 {
9984 regno = REGNO (in);
9985 if (regno >= FIRST_PSEUDO_REGISTER)
9986 {
9987 regno = true_regnum (in);
9988 if (regno >= FIRST_PSEUDO_REGISTER)
9989 regno = -1;
9990 }
9991 }
9992 else if (GET_CODE (in) == SUBREG)
9993 {
9994 regno = true_regnum (in);
9995 if (regno >= FIRST_PSEUDO_REGISTER)
9996 regno = -1;
9997 }
9998 else
9999 regno = -1;
10000
9878760c
RK
10001 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10002 into anything. */
10003 if (class == GENERAL_REGS || class == BASE_REGS
10004 || (regno >= 0 && INT_REGNO_P (regno)))
10005 return NO_REGS;
10006
10007 /* Constants, memory, and FP registers can go into FP registers. */
10008 if ((regno == -1 || FP_REGNO_P (regno))
10009 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10010 return NO_REGS;
10011
0ac081f6
AH
10012 /* Memory, and AltiVec registers can go into AltiVec registers. */
10013 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10014 && class == ALTIVEC_REGS)
10015 return NO_REGS;
10016
9878760c
RK
10017 /* We can copy among the CR registers. */
10018 if ((class == CR_REGS || class == CR0_REGS)
10019 && regno >= 0 && CR_REGNO_P (regno))
10020 return NO_REGS;
10021
10022 /* Otherwise, we need GENERAL_REGS. */
10023 return GENERAL_REGS;
10024}
10025\f
10026/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 10027 know this is a valid comparison.
9878760c
RK
10028
10029 SCC_P is 1 if this is for an scc. That means that %D will have been
10030 used instead of %C, so the bits will be in different places.
10031
b4ac57ab 10032 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
10033
10034int
a2369ed3 10035ccr_bit (rtx op, int scc_p)
9878760c
RK
10036{
10037 enum rtx_code code = GET_CODE (op);
10038 enum machine_mode cc_mode;
10039 int cc_regnum;
10040 int base_bit;
9ebbca7d 10041 rtx reg;
9878760c 10042
ec8e098d 10043 if (!COMPARISON_P (op))
9878760c
RK
10044 return -1;
10045
9ebbca7d
GK
10046 reg = XEXP (op, 0);
10047
37409796 10048 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
10049
10050 cc_mode = GET_MODE (reg);
10051 cc_regnum = REGNO (reg);
10052 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 10053
39a10a29 10054 validate_condition_mode (code, cc_mode);
c5defebb 10055
b7053a3f
GK
10056 /* When generating a sCOND operation, only positive conditions are
10057 allowed. */
37409796
NS
10058 gcc_assert (!scc_p
10059 || code == EQ || code == GT || code == LT || code == UNORDERED
10060 || code == GTU || code == LTU);
f676971a 10061
9878760c
RK
10062 switch (code)
10063 {
10064 case NE:
10065 return scc_p ? base_bit + 3 : base_bit + 2;
10066 case EQ:
10067 return base_bit + 2;
1c882ea4 10068 case GT: case GTU: case UNLE:
9878760c 10069 return base_bit + 1;
1c882ea4 10070 case LT: case LTU: case UNGE:
9878760c 10071 return base_bit;
1c882ea4
GK
10072 case ORDERED: case UNORDERED:
10073 return base_bit + 3;
9878760c
RK
10074
10075 case GE: case GEU:
39a10a29 10076 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
10077 unordered position. So test that bit. For integer, this is ! LT
10078 unless this is an scc insn. */
39a10a29 10079 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
10080
10081 case LE: case LEU:
39a10a29 10082 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 10083
9878760c 10084 default:
37409796 10085 gcc_unreachable ();
9878760c
RK
10086 }
10087}
1ff7789b 10088\f
8d30c4ee 10089/* Return the GOT register. */
1ff7789b 10090
9390387d 10091rtx
a2369ed3 10092rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 10093{
a4f6c312
SS
10094 /* The second flow pass currently (June 1999) can't update
10095 regs_ever_live without disturbing other parts of the compiler, so
10096 update it here to make the prolog/epilogue code happy. */
1db02437
FS
10097 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
10098 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
1ff7789b 10099
8d30c4ee 10100 current_function_uses_pic_offset_table = 1;
3cb999d8 10101
1ff7789b
MM
10102 return pic_offset_table_rtx;
10103}
a7df97e6 10104\f
e2500fed
GK
10105/* Function to init struct machine_function.
10106 This will be called, via a pointer variable,
10107 from push_function_context. */
a7df97e6 10108
e2500fed 10109static struct machine_function *
863d938c 10110rs6000_init_machine_status (void)
a7df97e6 10111{
e2500fed 10112 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 10113}
9878760c 10114\f
0ba1b2ff
AM
10115/* These macros test for integers and extract the low-order bits. */
10116#define INT_P(X) \
10117((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
10118 && GET_MODE (X) == VOIDmode)
10119
10120#define INT_LOWPART(X) \
10121 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10122
10123int
a2369ed3 10124extract_MB (rtx op)
0ba1b2ff
AM
10125{
10126 int i;
10127 unsigned long val = INT_LOWPART (op);
10128
10129 /* If the high bit is zero, the value is the first 1 bit we find
10130 from the left. */
10131 if ((val & 0x80000000) == 0)
10132 {
37409796 10133 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10134
10135 i = 1;
10136 while (((val <<= 1) & 0x80000000) == 0)
10137 ++i;
10138 return i;
10139 }
10140
10141 /* If the high bit is set and the low bit is not, or the mask is all
10142 1's, the value is zero. */
10143 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
10144 return 0;
10145
10146 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10147 from the right. */
10148 i = 31;
10149 while (((val >>= 1) & 1) != 0)
10150 --i;
10151
10152 return i;
10153}
10154
10155int
a2369ed3 10156extract_ME (rtx op)
0ba1b2ff
AM
10157{
10158 int i;
10159 unsigned long val = INT_LOWPART (op);
10160
10161 /* If the low bit is zero, the value is the first 1 bit we find from
10162 the right. */
10163 if ((val & 1) == 0)
10164 {
37409796 10165 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10166
10167 i = 30;
10168 while (((val >>= 1) & 1) == 0)
10169 --i;
10170
10171 return i;
10172 }
10173
10174 /* If the low bit is set and the high bit is not, or the mask is all
10175 1's, the value is 31. */
10176 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
10177 return 31;
10178
10179 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10180 from the left. */
10181 i = 0;
10182 while (((val <<= 1) & 0x80000000) != 0)
10183 ++i;
10184
10185 return i;
10186}
10187
c4501e62
JJ
10188/* Locate some local-dynamic symbol still in use by this function
10189 so that we can print its name in some tls_ld pattern. */
10190
10191static const char *
863d938c 10192rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
10193{
10194 rtx insn;
10195
10196 if (cfun->machine->some_ld_name)
10197 return cfun->machine->some_ld_name;
10198
10199 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10200 if (INSN_P (insn)
10201 && for_each_rtx (&PATTERN (insn),
10202 rs6000_get_some_local_dynamic_name_1, 0))
10203 return cfun->machine->some_ld_name;
10204
37409796 10205 gcc_unreachable ();
c4501e62
JJ
10206}
10207
10208/* Helper function for rs6000_get_some_local_dynamic_name. */
10209
10210static int
a2369ed3 10211rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
10212{
10213 rtx x = *px;
10214
10215 if (GET_CODE (x) == SYMBOL_REF)
10216 {
10217 const char *str = XSTR (x, 0);
10218 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10219 {
10220 cfun->machine->some_ld_name = str;
10221 return 1;
10222 }
10223 }
10224
10225 return 0;
10226}
10227
85b776df
AM
10228/* Write out a function code label. */
10229
10230void
10231rs6000_output_function_entry (FILE *file, const char *fname)
10232{
10233 if (fname[0] != '.')
10234 {
10235 switch (DEFAULT_ABI)
10236 {
10237 default:
37409796 10238 gcc_unreachable ();
85b776df
AM
10239
10240 case ABI_AIX:
10241 if (DOT_SYMBOLS)
10242 putc ('.', file);
10243 else
10244 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10245 break;
10246
10247 case ABI_V4:
10248 case ABI_DARWIN:
10249 break;
10250 }
10251 }
10252 if (TARGET_AIX)
10253 RS6000_OUTPUT_BASENAME (file, fname);
10254 else
10255 assemble_name (file, fname);
10256}
10257
9878760c
RK
10258/* Print an operand. Recognize special options, documented below. */
10259
38c1f2d7 10260#if TARGET_ELF
d9407988 10261#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 10262#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
10263#else
10264#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 10265#define SMALL_DATA_REG 0
ba5e43aa
MM
10266#endif
10267
9878760c 10268void
a2369ed3 10269print_operand (FILE *file, rtx x, int code)
9878760c
RK
10270{
10271 int i;
a260abc9 10272 HOST_WIDE_INT val;
0ba1b2ff 10273 unsigned HOST_WIDE_INT uval;
9878760c
RK
10274
10275 switch (code)
10276 {
a8b3aeda 10277 case '.':
a85d226b
RK
10278 /* Write out an instruction after the call which may be replaced
10279 with glue code by the loader. This depends on the AIX version. */
10280 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
10281 return;
10282
81eace42
GK
10283 /* %a is output_address. */
10284
9854d9ed
RK
10285 case 'A':
10286 /* If X is a constant integer whose low-order 5 bits are zero,
10287 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 10288 in the AIX assembler where "sri" with a zero shift count
20e26713 10289 writes a trash instruction. */
9854d9ed 10290 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 10291 putc ('l', file);
9854d9ed 10292 else
76229ac8 10293 putc ('r', file);
9854d9ed
RK
10294 return;
10295
10296 case 'b':
e2c953b6
DE
10297 /* If constant, low-order 16 bits of constant, unsigned.
10298 Otherwise, write normally. */
10299 if (INT_P (x))
10300 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10301 else
10302 print_operand (file, x, 0);
cad12a8d
RK
10303 return;
10304
a260abc9
DE
10305 case 'B':
10306 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10307 for 64-bit mask direction. */
9390387d 10308 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 10309 return;
a260abc9 10310
81eace42
GK
10311 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10312 output_operand. */
10313
423c1189
AH
10314 case 'c':
10315 /* X is a CR register. Print the number of the GT bit of the CR. */
10316 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10317 output_operand_lossage ("invalid %%E value");
10318 else
10319 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10320 return;
10321
10322 case 'D':
cef6b86c 10323 /* Like 'J' but get to the GT bit only. */
37409796 10324 gcc_assert (GET_CODE (x) == REG);
423c1189 10325
cef6b86c
EB
10326 /* Bit 1 is GT bit. */
10327 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 10328
cef6b86c
EB
10329 /* Add one for shift count in rlinm for scc. */
10330 fprintf (file, "%d", i + 1);
423c1189
AH
10331 return;
10332
9854d9ed 10333 case 'E':
39a10a29 10334 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
10335 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10336 output_operand_lossage ("invalid %%E value");
78fbdbf7 10337 else
39a10a29 10338 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 10339 return;
9854d9ed
RK
10340
10341 case 'f':
10342 /* X is a CR register. Print the shift count needed to move it
10343 to the high-order four bits. */
10344 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10345 output_operand_lossage ("invalid %%f value");
10346 else
9ebbca7d 10347 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10348 return;
10349
10350 case 'F':
10351 /* Similar, but print the count for the rotate in the opposite
10352 direction. */
10353 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10354 output_operand_lossage ("invalid %%F value");
10355 else
9ebbca7d 10356 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10357 return;
10358
10359 case 'G':
10360 /* X is a constant integer. If it is negative, print "m",
43aa4e05 10361 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
10362 if (GET_CODE (x) != CONST_INT)
10363 output_operand_lossage ("invalid %%G value");
10364 else if (INTVAL (x) >= 0)
76229ac8 10365 putc ('z', file);
9854d9ed 10366 else
76229ac8 10367 putc ('m', file);
9854d9ed 10368 return;
e2c953b6 10369
9878760c 10370 case 'h':
a4f6c312
SS
10371 /* If constant, output low-order five bits. Otherwise, write
10372 normally. */
9878760c 10373 if (INT_P (x))
5f59ecb7 10374 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
10375 else
10376 print_operand (file, x, 0);
10377 return;
10378
64305719 10379 case 'H':
a4f6c312
SS
10380 /* If constant, output low-order six bits. Otherwise, write
10381 normally. */
64305719 10382 if (INT_P (x))
5f59ecb7 10383 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
10384 else
10385 print_operand (file, x, 0);
10386 return;
10387
9854d9ed
RK
10388 case 'I':
10389 /* Print `i' if this is a constant, else nothing. */
9878760c 10390 if (INT_P (x))
76229ac8 10391 putc ('i', file);
9878760c
RK
10392 return;
10393
9854d9ed
RK
10394 case 'j':
10395 /* Write the bit number in CCR for jump. */
10396 i = ccr_bit (x, 0);
10397 if (i == -1)
10398 output_operand_lossage ("invalid %%j code");
9878760c 10399 else
9854d9ed 10400 fprintf (file, "%d", i);
9878760c
RK
10401 return;
10402
9854d9ed
RK
10403 case 'J':
10404 /* Similar, but add one for shift count in rlinm for scc and pass
10405 scc flag to `ccr_bit'. */
10406 i = ccr_bit (x, 1);
10407 if (i == -1)
10408 output_operand_lossage ("invalid %%J code");
10409 else
a0466a68
RK
10410 /* If we want bit 31, write a shift count of zero, not 32. */
10411 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
10412 return;
10413
9854d9ed
RK
10414 case 'k':
10415 /* X must be a constant. Write the 1's complement of the
10416 constant. */
9878760c 10417 if (! INT_P (x))
9854d9ed 10418 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
10419 else
10420 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
10421 return;
10422
81eace42 10423 case 'K':
9ebbca7d
GK
10424 /* X must be a symbolic constant on ELF. Write an
10425 expression suitable for an 'addi' that adds in the low 16
10426 bits of the MEM. */
10427 if (GET_CODE (x) != CONST)
10428 {
10429 print_operand_address (file, x);
10430 fputs ("@l", file);
10431 }
10432 else
10433 {
10434 if (GET_CODE (XEXP (x, 0)) != PLUS
10435 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10436 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10437 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 10438 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
10439 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10440 fputs ("@l", file);
ed8d2920
MM
10441 /* For GNU as, there must be a non-alphanumeric character
10442 between 'l' and the number. The '-' is added by
10443 print_operand() already. */
10444 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10445 fputs ("+", file);
9ebbca7d
GK
10446 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10447 }
81eace42
GK
10448 return;
10449
10450 /* %l is output_asm_label. */
9ebbca7d 10451
9854d9ed
RK
10452 case 'L':
10453 /* Write second word of DImode or DFmode reference. Works on register
10454 or non-indexed memory only. */
10455 if (GET_CODE (x) == REG)
fb5c67a7 10456 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
10457 else if (GET_CODE (x) == MEM)
10458 {
10459 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 10460 we have already done it, we can just use an offset of word. */
9854d9ed
RK
10461 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10462 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
10463 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10464 UNITS_PER_WORD));
9854d9ed 10465 else
d7624dc0
RK
10466 output_address (XEXP (adjust_address_nv (x, SImode,
10467 UNITS_PER_WORD),
10468 0));
ed8908e7 10469
ba5e43aa 10470 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10471 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10472 reg_names[SMALL_DATA_REG]);
9854d9ed 10473 }
9878760c 10474 return;
f676971a 10475
9878760c
RK
10476 case 'm':
10477 /* MB value for a mask operand. */
b1765bde 10478 if (! mask_operand (x, SImode))
9878760c
RK
10479 output_operand_lossage ("invalid %%m value");
10480
0ba1b2ff 10481 fprintf (file, "%d", extract_MB (x));
9878760c
RK
10482 return;
10483
10484 case 'M':
10485 /* ME value for a mask operand. */
b1765bde 10486 if (! mask_operand (x, SImode))
a260abc9 10487 output_operand_lossage ("invalid %%M value");
9878760c 10488
0ba1b2ff 10489 fprintf (file, "%d", extract_ME (x));
9878760c
RK
10490 return;
10491
81eace42
GK
10492 /* %n outputs the negative of its operand. */
10493
9878760c
RK
10494 case 'N':
10495 /* Write the number of elements in the vector times 4. */
10496 if (GET_CODE (x) != PARALLEL)
10497 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
10498 else
10499 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
10500 return;
10501
10502 case 'O':
10503 /* Similar, but subtract 1 first. */
10504 if (GET_CODE (x) != PARALLEL)
1427100a 10505 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
10506 else
10507 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
10508 return;
10509
9854d9ed
RK
10510 case 'p':
10511 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10512 if (! INT_P (x)
2bfcf297 10513 || INT_LOWPART (x) < 0
9854d9ed
RK
10514 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10515 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
10516 else
10517 fprintf (file, "%d", i);
9854d9ed
RK
10518 return;
10519
9878760c
RK
10520 case 'P':
10521 /* The operand must be an indirect memory reference. The result
8bb418a3 10522 is the register name. */
9878760c
RK
10523 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10524 || REGNO (XEXP (x, 0)) >= 32)
10525 output_operand_lossage ("invalid %%P value");
e2c953b6 10526 else
fb5c67a7 10527 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
10528 return;
10529
dfbdccdb
GK
10530 case 'q':
10531 /* This outputs the logical code corresponding to a boolean
10532 expression. The expression may have one or both operands
39a10a29 10533 negated (if one, only the first one). For condition register
c4ad648e
AM
10534 logical operations, it will also treat the negated
10535 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 10536 {
63bc1d05 10537 const char *const *t = 0;
dfbdccdb
GK
10538 const char *s;
10539 enum rtx_code code = GET_CODE (x);
10540 static const char * const tbl[3][3] = {
10541 { "and", "andc", "nor" },
10542 { "or", "orc", "nand" },
10543 { "xor", "eqv", "xor" } };
10544
10545 if (code == AND)
10546 t = tbl[0];
10547 else if (code == IOR)
10548 t = tbl[1];
10549 else if (code == XOR)
10550 t = tbl[2];
10551 else
10552 output_operand_lossage ("invalid %%q value");
10553
10554 if (GET_CODE (XEXP (x, 0)) != NOT)
10555 s = t[0];
10556 else
10557 {
10558 if (GET_CODE (XEXP (x, 1)) == NOT)
10559 s = t[2];
10560 else
10561 s = t[1];
10562 }
f676971a 10563
dfbdccdb
GK
10564 fputs (s, file);
10565 }
10566 return;
10567
2c4a9cff
DE
10568 case 'Q':
10569 if (TARGET_MFCRF)
3b6ce0af 10570 fputc (',', file);
5efb1046 10571 /* FALLTHRU */
2c4a9cff
DE
10572 else
10573 return;
10574
9854d9ed
RK
10575 case 'R':
10576 /* X is a CR register. Print the mask for `mtcrf'. */
10577 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10578 output_operand_lossage ("invalid %%R value");
10579 else
9ebbca7d 10580 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 10581 return;
9854d9ed
RK
10582
10583 case 's':
10584 /* Low 5 bits of 32 - value */
10585 if (! INT_P (x))
10586 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
10587 else
10588 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 10589 return;
9854d9ed 10590
a260abc9 10591 case 'S':
0ba1b2ff 10592 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
10593 CONST_INT 32-bit mask is considered sign-extended so any
10594 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 10595 if (! mask64_operand (x, DImode))
a260abc9
DE
10596 output_operand_lossage ("invalid %%S value");
10597
0ba1b2ff 10598 uval = INT_LOWPART (x);
a260abc9 10599
0ba1b2ff 10600 if (uval & 1) /* Clear Left */
a260abc9 10601 {
f099d360
GK
10602#if HOST_BITS_PER_WIDE_INT > 64
10603 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10604#endif
0ba1b2ff 10605 i = 64;
a260abc9 10606 }
0ba1b2ff 10607 else /* Clear Right */
a260abc9 10608 {
0ba1b2ff 10609 uval = ~uval;
f099d360
GK
10610#if HOST_BITS_PER_WIDE_INT > 64
10611 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10612#endif
0ba1b2ff 10613 i = 63;
a260abc9 10614 }
0ba1b2ff
AM
10615 while (uval != 0)
10616 --i, uval >>= 1;
37409796 10617 gcc_assert (i >= 0);
0ba1b2ff
AM
10618 fprintf (file, "%d", i);
10619 return;
a260abc9 10620
a3170dc6
AH
10621 case 't':
10622 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 10623 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
10624
10625 /* Bit 3 is OV bit. */
10626 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10627
10628 /* If we want bit 31, write a shift count of zero, not 32. */
10629 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10630 return;
10631
cccf3bdc
DE
10632 case 'T':
10633 /* Print the symbolic name of a branch target register. */
10634 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10635 && REGNO (x) != COUNT_REGISTER_REGNUM))
10636 output_operand_lossage ("invalid %%T value");
e2c953b6 10637 else if (REGNO (x) == LINK_REGISTER_REGNUM)
cccf3bdc
DE
10638 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10639 else
10640 fputs ("ctr", file);
10641 return;
10642
9854d9ed 10643 case 'u':
802a0058 10644 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
10645 if (! INT_P (x))
10646 output_operand_lossage ("invalid %%u value");
e2c953b6 10647 else
f676971a 10648 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 10649 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
10650 return;
10651
802a0058
MM
10652 case 'v':
10653 /* High-order 16 bits of constant for use in signed operand. */
10654 if (! INT_P (x))
10655 output_operand_lossage ("invalid %%v value");
e2c953b6 10656 else
134c32f6
DE
10657 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10658 (INT_LOWPART (x) >> 16) & 0xffff);
10659 return;
802a0058 10660
9854d9ed
RK
10661 case 'U':
10662 /* Print `u' if this has an auto-increment or auto-decrement. */
10663 if (GET_CODE (x) == MEM
10664 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10665 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
76229ac8 10666 putc ('u', file);
9854d9ed 10667 return;
9878760c 10668
e0cd0770
JC
10669 case 'V':
10670 /* Print the trap code for this operand. */
10671 switch (GET_CODE (x))
10672 {
10673 case EQ:
10674 fputs ("eq", file); /* 4 */
10675 break;
10676 case NE:
10677 fputs ("ne", file); /* 24 */
10678 break;
10679 case LT:
10680 fputs ("lt", file); /* 16 */
10681 break;
10682 case LE:
10683 fputs ("le", file); /* 20 */
10684 break;
10685 case GT:
10686 fputs ("gt", file); /* 8 */
10687 break;
10688 case GE:
10689 fputs ("ge", file); /* 12 */
10690 break;
10691 case LTU:
10692 fputs ("llt", file); /* 2 */
10693 break;
10694 case LEU:
10695 fputs ("lle", file); /* 6 */
10696 break;
10697 case GTU:
10698 fputs ("lgt", file); /* 1 */
10699 break;
10700 case GEU:
10701 fputs ("lge", file); /* 5 */
10702 break;
10703 default:
37409796 10704 gcc_unreachable ();
e0cd0770
JC
10705 }
10706 break;
10707
9854d9ed
RK
10708 case 'w':
10709 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10710 normally. */
10711 if (INT_P (x))
f676971a 10712 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 10713 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
10714 else
10715 print_operand (file, x, 0);
9878760c
RK
10716 return;
10717
9854d9ed 10718 case 'W':
e2c953b6 10719 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
10720 val = (GET_CODE (x) == CONST_INT
10721 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10722
10723 if (val < 0)
10724 i = -1;
9854d9ed 10725 else
e2c953b6
DE
10726 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10727 if ((val <<= 1) < 0)
10728 break;
10729
10730#if HOST_BITS_PER_WIDE_INT == 32
10731 if (GET_CODE (x) == CONST_INT && i >= 0)
10732 i += 32; /* zero-extend high-part was all 0's */
10733 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10734 {
10735 val = CONST_DOUBLE_LOW (x);
10736
37409796
NS
10737 gcc_assert (val);
10738 if (val < 0)
e2c953b6
DE
10739 --i;
10740 else
10741 for ( ; i < 64; i++)
10742 if ((val <<= 1) < 0)
10743 break;
10744 }
10745#endif
10746
10747 fprintf (file, "%d", i + 1);
9854d9ed 10748 return;
9878760c 10749
9854d9ed
RK
10750 case 'X':
10751 if (GET_CODE (x) == MEM
4d588c14 10752 && legitimate_indexed_address_p (XEXP (x, 0), 0))
76229ac8 10753 putc ('x', file);
9854d9ed 10754 return;
9878760c 10755
9854d9ed
RK
10756 case 'Y':
10757 /* Like 'L', for third word of TImode */
10758 if (GET_CODE (x) == REG)
fb5c67a7 10759 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 10760 else if (GET_CODE (x) == MEM)
9878760c 10761 {
9854d9ed
RK
10762 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10763 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 10764 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 10765 else
d7624dc0 10766 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 10767 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10768 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10769 reg_names[SMALL_DATA_REG]);
9878760c
RK
10770 }
10771 return;
f676971a 10772
9878760c 10773 case 'z':
b4ac57ab
RS
10774 /* X is a SYMBOL_REF. Write out the name preceded by a
10775 period and without any trailing data in brackets. Used for function
4d30c363
MM
10776 names. If we are configured for System V (or the embedded ABI) on
10777 the PowerPC, do not emit the period, since those systems do not use
10778 TOCs and the like. */
37409796 10779 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 10780
c4ad648e
AM
10781 /* Mark the decl as referenced so that cgraph will output the
10782 function. */
9bf6462a 10783 if (SYMBOL_REF_DECL (x))
c4ad648e 10784 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 10785
85b776df 10786 /* For macho, check to see if we need a stub. */
f9da97f0
AP
10787 if (TARGET_MACHO)
10788 {
10789 const char *name = XSTR (x, 0);
a031e781 10790#if TARGET_MACHO
3b48085e 10791 if (MACHOPIC_INDIRECT
11abc112
MM
10792 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10793 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
10794#endif
10795 assemble_name (file, name);
10796 }
85b776df 10797 else if (!DOT_SYMBOLS)
9739c90c 10798 assemble_name (file, XSTR (x, 0));
85b776df
AM
10799 else
10800 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
10801 return;
10802
9854d9ed
RK
10803 case 'Z':
10804 /* Like 'L', for last word of TImode. */
10805 if (GET_CODE (x) == REG)
fb5c67a7 10806 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
10807 else if (GET_CODE (x) == MEM)
10808 {
10809 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10810 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 10811 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 10812 else
d7624dc0 10813 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 10814 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10815 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10816 reg_names[SMALL_DATA_REG]);
9854d9ed 10817 }
5c23c401 10818 return;
0ac081f6 10819
a3170dc6 10820 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
10821 case 'y':
10822 {
10823 rtx tmp;
10824
37409796 10825 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
10826
10827 tmp = XEXP (x, 0);
10828
90d3ff1c
AM
10829 /* Ugly hack because %y is overloaded. */
10830 if (TARGET_E500 && GET_MODE_SIZE (GET_MODE (x)) == 8)
a3170dc6
AH
10831 {
10832 /* Handle [reg]. */
10833 if (GET_CODE (tmp) == REG)
10834 {
10835 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10836 break;
10837 }
10838 /* Handle [reg+UIMM]. */
10839 else if (GET_CODE (tmp) == PLUS &&
10840 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10841 {
10842 int x;
10843
37409796 10844 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
10845
10846 x = INTVAL (XEXP (tmp, 1));
10847 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10848 break;
10849 }
10850
10851 /* Fall through. Must be [reg+reg]. */
10852 }
850e8d3d
DN
10853 if (TARGET_ALTIVEC
10854 && GET_CODE (tmp) == AND
10855 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10856 && INTVAL (XEXP (tmp, 1)) == -16)
10857 tmp = XEXP (tmp, 0);
0ac081f6 10858 if (GET_CODE (tmp) == REG)
c62f2db5 10859 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 10860 else
0ac081f6 10861 {
37409796 10862 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
10863 && REG_P (XEXP (tmp, 0))
10864 && REG_P (XEXP (tmp, 1)));
bb8df8a6 10865
0ac081f6
AH
10866 if (REGNO (XEXP (tmp, 0)) == 0)
10867 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10868 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10869 else
10870 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10871 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10872 }
0ac081f6
AH
10873 break;
10874 }
f676971a 10875
9878760c
RK
10876 case 0:
10877 if (GET_CODE (x) == REG)
10878 fprintf (file, "%s", reg_names[REGNO (x)]);
10879 else if (GET_CODE (x) == MEM)
10880 {
10881 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10882 know the width from the mode. */
10883 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
10884 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10885 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 10886 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
10887 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10888 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 10889 else
a54d04b7 10890 output_address (XEXP (x, 0));
9878760c
RK
10891 }
10892 else
a54d04b7 10893 output_addr_const (file, x);
a85d226b 10894 return;
9878760c 10895
c4501e62
JJ
10896 case '&':
10897 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10898 return;
10899
9878760c
RK
10900 default:
10901 output_operand_lossage ("invalid %%xn code");
10902 }
10903}
10904\f
10905/* Print the address of an operand. */
10906
10907void
a2369ed3 10908print_operand_address (FILE *file, rtx x)
9878760c
RK
10909{
10910 if (GET_CODE (x) == REG)
4697a36c 10911 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
10912 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10913 || GET_CODE (x) == LABEL_REF)
9878760c
RK
10914 {
10915 output_addr_const (file, x);
ba5e43aa 10916 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10917 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10918 reg_names[SMALL_DATA_REG]);
37409796
NS
10919 else
10920 gcc_assert (!TARGET_TOC);
9878760c
RK
10921 }
10922 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10923 {
9024f4b8 10924 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 10925 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
10926 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10927 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 10928 else
4697a36c
MM
10929 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10930 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
10931 }
10932 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
10933 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10934 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
10935#if TARGET_ELF
10936 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 10937 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
10938 {
10939 output_addr_const (file, XEXP (x, 1));
10940 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10941 }
c859cda6
DJ
10942#endif
10943#if TARGET_MACHO
10944 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 10945 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
10946 {
10947 fprintf (file, "lo16(");
10948 output_addr_const (file, XEXP (x, 1));
10949 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10950 }
3cb999d8 10951#endif
4d588c14 10952 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 10953 {
2bfcf297 10954 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 10955 {
2bfcf297
DB
10956 rtx contains_minus = XEXP (x, 1);
10957 rtx minus, symref;
10958 const char *name;
f676971a 10959
9ebbca7d 10960 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 10961 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
10962 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10963 contains_minus = XEXP (contains_minus, 0);
10964
2bfcf297
DB
10965 minus = XEXP (contains_minus, 0);
10966 symref = XEXP (minus, 0);
10967 XEXP (contains_minus, 0) = symref;
10968 if (TARGET_ELF)
10969 {
10970 char *newname;
10971
10972 name = XSTR (symref, 0);
10973 newname = alloca (strlen (name) + sizeof ("@toc"));
10974 strcpy (newname, name);
10975 strcat (newname, "@toc");
10976 XSTR (symref, 0) = newname;
10977 }
10978 output_addr_const (file, XEXP (x, 1));
10979 if (TARGET_ELF)
10980 XSTR (symref, 0) = name;
9ebbca7d
GK
10981 XEXP (contains_minus, 0) = minus;
10982 }
10983 else
10984 output_addr_const (file, XEXP (x, 1));
10985
10986 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10987 }
9878760c 10988 else
37409796 10989 gcc_unreachable ();
9878760c
RK
10990}
10991\f
88cad84b 10992/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
10993 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10994 is defined. It also needs to handle DI-mode objects on 64-bit
10995 targets. */
10996
10997static bool
a2369ed3 10998rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 10999{
f4f4921e 11000#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 11001 /* Special handling for SI values. */
84dcde01 11002 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 11003 {
301d03af 11004 static int recurse = 0;
f676971a 11005
301d03af
RS
11006 /* For -mrelocatable, we mark all addresses that need to be fixed up
11007 in the .fixup section. */
11008 if (TARGET_RELOCATABLE
d6b5193b
RS
11009 && in_section != toc_section
11010 && in_section != text_section
4325ca90 11011 && !unlikely_text_section_p (in_section)
301d03af
RS
11012 && !recurse
11013 && GET_CODE (x) != CONST_INT
11014 && GET_CODE (x) != CONST_DOUBLE
11015 && CONSTANT_P (x))
11016 {
11017 char buf[256];
11018
11019 recurse = 1;
11020 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11021 fixuplabelno++;
11022 ASM_OUTPUT_LABEL (asm_out_file, buf);
11023 fprintf (asm_out_file, "\t.long\t(");
11024 output_addr_const (asm_out_file, x);
11025 fprintf (asm_out_file, ")@fixup\n");
11026 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11027 ASM_OUTPUT_ALIGN (asm_out_file, 2);
11028 fprintf (asm_out_file, "\t.long\t");
11029 assemble_name (asm_out_file, buf);
11030 fprintf (asm_out_file, "\n\t.previous\n");
11031 recurse = 0;
11032 return true;
11033 }
11034 /* Remove initial .'s to turn a -mcall-aixdesc function
11035 address into the address of the descriptor, not the function
11036 itself. */
11037 else if (GET_CODE (x) == SYMBOL_REF
11038 && XSTR (x, 0)[0] == '.'
11039 && DEFAULT_ABI == ABI_AIX)
11040 {
11041 const char *name = XSTR (x, 0);
11042 while (*name == '.')
11043 name++;
11044
11045 fprintf (asm_out_file, "\t.long\t%s\n", name);
11046 return true;
11047 }
11048 }
f4f4921e 11049#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
11050 return default_assemble_integer (x, size, aligned_p);
11051}
93638d7a
AM
11052
11053#ifdef HAVE_GAS_HIDDEN
11054/* Emit an assembler directive to set symbol visibility for DECL to
11055 VISIBILITY_TYPE. */
11056
5add3202 11057static void
a2369ed3 11058rs6000_assemble_visibility (tree decl, int vis)
93638d7a 11059{
93638d7a
AM
11060 /* Functions need to have their entry point symbol visibility set as
11061 well as their descriptor symbol visibility. */
85b776df
AM
11062 if (DEFAULT_ABI == ABI_AIX
11063 && DOT_SYMBOLS
11064 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 11065 {
25fdb4dc 11066 static const char * const visibility_types[] = {
c4ad648e 11067 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
11068 };
11069
11070 const char *name, *type;
93638d7a
AM
11071
11072 name = ((* targetm.strip_name_encoding)
11073 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 11074 type = visibility_types[vis];
93638d7a 11075
25fdb4dc
RH
11076 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11077 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 11078 }
25fdb4dc
RH
11079 else
11080 default_assemble_visibility (decl, vis);
93638d7a
AM
11081}
11082#endif
301d03af 11083\f
39a10a29 11084enum rtx_code
a2369ed3 11085rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
11086{
11087 /* Reversal of FP compares takes care -- an ordered compare
11088 becomes an unordered compare and vice versa. */
f676971a 11089 if (mode == CCFPmode
bc9ec0e0
GK
11090 && (!flag_finite_math_only
11091 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11092 || code == UNEQ || code == LTGT))
bab6226b 11093 return reverse_condition_maybe_unordered (code);
39a10a29 11094 else
bab6226b 11095 return reverse_condition (code);
39a10a29
GK
11096}
11097
39a10a29
GK
11098/* Generate a compare for CODE. Return a brand-new rtx that
11099 represents the result of the compare. */
a4f6c312 11100
39a10a29 11101static rtx
a2369ed3 11102rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
11103{
11104 enum machine_mode comp_mode;
11105 rtx compare_result;
11106
11107 if (rs6000_compare_fp_p)
11108 comp_mode = CCFPmode;
11109 else if (code == GTU || code == LTU
c4ad648e 11110 || code == GEU || code == LEU)
39a10a29 11111 comp_mode = CCUNSmode;
60934f9c
NS
11112 else if ((code == EQ || code == NE)
11113 && GET_CODE (rs6000_compare_op0) == SUBREG
11114 && GET_CODE (rs6000_compare_op1) == SUBREG
11115 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
11116 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
11117 /* These are unsigned values, perhaps there will be a later
11118 ordering compare that can be shared with this one.
11119 Unfortunately we cannot detect the signedness of the operands
11120 for non-subregs. */
11121 comp_mode = CCUNSmode;
39a10a29
GK
11122 else
11123 comp_mode = CCmode;
11124
11125 /* First, the compare. */
11126 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 11127
cef6b86c 11128 /* E500 FP compare instructions on the GPRs. Yuck! */
993f19a8
AH
11129 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11130 && rs6000_compare_fp_p)
a3170dc6 11131 {
64022b5d 11132 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
11133 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
11134
11135 if (op_mode == VOIDmode)
11136 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 11137
cef6b86c
EB
11138 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11139 This explains the following mess. */
423c1189 11140
a3170dc6
AH
11141 switch (code)
11142 {
423c1189 11143 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
11144 switch (op_mode)
11145 {
11146 case SFmode:
11147 cmp = flag_unsafe_math_optimizations
11148 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
11149 rs6000_compare_op1)
11150 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
11151 rs6000_compare_op1);
11152 break;
11153
11154 case DFmode:
11155 cmp = flag_unsafe_math_optimizations
11156 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
11157 rs6000_compare_op1)
11158 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
11159 rs6000_compare_op1);
11160 break;
11161
11162 default:
11163 gcc_unreachable ();
11164 }
a3170dc6 11165 break;
bb8df8a6 11166
423c1189 11167 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
11168 switch (op_mode)
11169 {
11170 case SFmode:
11171 cmp = flag_unsafe_math_optimizations
11172 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
11173 rs6000_compare_op1)
11174 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
11175 rs6000_compare_op1);
11176 break;
bb8df8a6 11177
37409796
NS
11178 case DFmode:
11179 cmp = flag_unsafe_math_optimizations
11180 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
11181 rs6000_compare_op1)
11182 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
11183 rs6000_compare_op1);
11184 break;
11185
11186 default:
11187 gcc_unreachable ();
11188 }
a3170dc6 11189 break;
bb8df8a6 11190
423c1189 11191 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
11192 switch (op_mode)
11193 {
11194 case SFmode:
11195 cmp = flag_unsafe_math_optimizations
11196 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11197 rs6000_compare_op1)
11198 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11199 rs6000_compare_op1);
11200 break;
bb8df8a6 11201
37409796
NS
11202 case DFmode:
11203 cmp = flag_unsafe_math_optimizations
11204 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11205 rs6000_compare_op1)
11206 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11207 rs6000_compare_op1);
11208 break;
11209
11210 default:
11211 gcc_unreachable ();
11212 }
a3170dc6 11213 break;
4d4cbc0e 11214 default:
37409796 11215 gcc_unreachable ();
a3170dc6
AH
11216 }
11217
11218 /* Synthesize LE and GE from LT/GT || EQ. */
11219 if (code == LE || code == GE || code == LEU || code == GEU)
11220 {
a3170dc6
AH
11221 emit_insn (cmp);
11222
11223 switch (code)
11224 {
11225 case LE: code = LT; break;
11226 case GE: code = GT; break;
11227 case LEU: code = LT; break;
11228 case GEU: code = GT; break;
37409796 11229 default: gcc_unreachable ();
a3170dc6
AH
11230 }
11231
a3170dc6
AH
11232 compare_result2 = gen_reg_rtx (CCFPmode);
11233
11234 /* Do the EQ. */
37409796
NS
11235 switch (op_mode)
11236 {
11237 case SFmode:
11238 cmp = flag_unsafe_math_optimizations
11239 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11240 rs6000_compare_op1)
11241 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11242 rs6000_compare_op1);
11243 break;
11244
11245 case DFmode:
11246 cmp = flag_unsafe_math_optimizations
11247 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11248 rs6000_compare_op1)
11249 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11250 rs6000_compare_op1);
11251 break;
11252
11253 default:
11254 gcc_unreachable ();
11255 }
a3170dc6
AH
11256 emit_insn (cmp);
11257
a3170dc6 11258 /* OR them together. */
64022b5d
AH
11259 or_result = gen_reg_rtx (CCFPmode);
11260 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11261 compare_result2);
a3170dc6
AH
11262 compare_result = or_result;
11263 code = EQ;
11264 }
11265 else
11266 {
a3170dc6 11267 if (code == NE || code == LTGT)
a3170dc6 11268 code = NE;
423c1189
AH
11269 else
11270 code = EQ;
a3170dc6
AH
11271 }
11272
11273 emit_insn (cmp);
11274 }
11275 else
de17c25f
DE
11276 {
11277 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11278 CLOBBERs to match cmptf_internal2 pattern. */
11279 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11280 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 11281 && !TARGET_IEEEQUAD
de17c25f
DE
11282 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11283 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11284 gen_rtvec (9,
11285 gen_rtx_SET (VOIDmode,
11286 compare_result,
11287 gen_rtx_COMPARE (comp_mode,
11288 rs6000_compare_op0,
11289 rs6000_compare_op1)),
11290 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11291 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11292 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11293 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11294 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11295 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11296 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11297 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
11298 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11299 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11300 {
11301 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11302 comp_mode = CCEQmode;
11303 compare_result = gen_reg_rtx (CCEQmode);
11304 if (TARGET_64BIT)
11305 emit_insn (gen_stack_protect_testdi (compare_result,
11306 rs6000_compare_op0, op1));
11307 else
11308 emit_insn (gen_stack_protect_testsi (compare_result,
11309 rs6000_compare_op0, op1));
11310 }
de17c25f
DE
11311 else
11312 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11313 gen_rtx_COMPARE (comp_mode,
11314 rs6000_compare_op0,
11315 rs6000_compare_op1)));
11316 }
f676971a 11317
ca5adc63 11318 /* Some kinds of FP comparisons need an OR operation;
e7108df9 11319 under flag_finite_math_only we don't bother. */
39a10a29 11320 if (rs6000_compare_fp_p
e7108df9
DE
11321 && !flag_finite_math_only
11322 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
39a10a29
GK
11323 && (code == LE || code == GE
11324 || code == UNEQ || code == LTGT
11325 || code == UNGT || code == UNLT))
11326 {
11327 enum rtx_code or1, or2;
11328 rtx or1_rtx, or2_rtx, compare2_rtx;
11329 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 11330
39a10a29
GK
11331 switch (code)
11332 {
11333 case LE: or1 = LT; or2 = EQ; break;
11334 case GE: or1 = GT; or2 = EQ; break;
11335 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11336 case LTGT: or1 = LT; or2 = GT; break;
11337 case UNGT: or1 = UNORDERED; or2 = GT; break;
11338 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 11339 default: gcc_unreachable ();
39a10a29
GK
11340 }
11341 validate_condition_mode (or1, comp_mode);
11342 validate_condition_mode (or2, comp_mode);
1c563bed
KH
11343 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11344 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
11345 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11346 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11347 const_true_rtx);
11348 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11349
11350 compare_result = or_result;
11351 code = EQ;
11352 }
11353
11354 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 11355
1c563bed 11356 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
11357}
11358
11359
11360/* Emit the RTL for an sCOND pattern. */
11361
11362void
a2369ed3 11363rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
11364{
11365 rtx condition_rtx;
11366 enum machine_mode op_mode;
b7053a3f 11367 enum rtx_code cond_code;
39a10a29
GK
11368
11369 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
11370 cond_code = GET_CODE (condition_rtx);
11371
423c1189
AH
11372 if (TARGET_E500 && rs6000_compare_fp_p
11373 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11374 {
11375 rtx t;
11376
11377 PUT_MODE (condition_rtx, SImode);
11378 t = XEXP (condition_rtx, 0);
11379
37409796 11380 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
11381
11382 if (cond_code == NE)
64022b5d 11383 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 11384
64022b5d 11385 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
11386 return;
11387 }
11388
b7053a3f
GK
11389 if (cond_code == NE
11390 || cond_code == GE || cond_code == LE
11391 || cond_code == GEU || cond_code == LEU
11392 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11393 {
11394 rtx not_result = gen_reg_rtx (CCEQmode);
11395 rtx not_op, rev_cond_rtx;
11396 enum machine_mode cc_mode;
f676971a 11397
b7053a3f
GK
11398 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11399
1c563bed 11400 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 11401 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
11402 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11403 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11404 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11405 }
39a10a29
GK
11406
11407 op_mode = GET_MODE (rs6000_compare_op0);
11408 if (op_mode == VOIDmode)
11409 op_mode = GET_MODE (rs6000_compare_op1);
11410
11411 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11412 {
11413 PUT_MODE (condition_rtx, DImode);
11414 convert_move (result, condition_rtx, 0);
11415 }
11416 else
11417 {
11418 PUT_MODE (condition_rtx, SImode);
11419 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11420 }
11421}
11422
39a10a29
GK
11423/* Emit a branch of kind CODE to location LOC. */
11424
11425void
a2369ed3 11426rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
11427{
11428 rtx condition_rtx, loc_ref;
11429
11430 condition_rtx = rs6000_generate_compare (code);
11431 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11432 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11433 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11434 loc_ref, pc_rtx)));
11435}
11436
12a4e8c5
GK
11437/* Return the string to output a conditional branch to LABEL, which is
11438 the operand number of the label, or -1 if the branch is really a
f676971a 11439 conditional return.
12a4e8c5
GK
11440
11441 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11442 condition code register and its mode specifies what kind of
11443 comparison we made.
11444
a0ab749a 11445 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
11446
11447 INSN is the insn. */
11448
11449char *
a2369ed3 11450output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
11451{
11452 static char string[64];
11453 enum rtx_code code = GET_CODE (op);
11454 rtx cc_reg = XEXP (op, 0);
11455 enum machine_mode mode = GET_MODE (cc_reg);
11456 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 11457 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
11458 int really_reversed = reversed ^ need_longbranch;
11459 char *s = string;
11460 const char *ccode;
11461 const char *pred;
11462 rtx note;
11463
39a10a29
GK
11464 validate_condition_mode (code, mode);
11465
11466 /* Work out which way this really branches. We could use
11467 reverse_condition_maybe_unordered here always but this
11468 makes the resulting assembler clearer. */
12a4e8c5 11469 if (really_reversed)
de40e1df
DJ
11470 {
11471 /* Reversal of FP compares takes care -- an ordered compare
11472 becomes an unordered compare and vice versa. */
11473 if (mode == CCFPmode)
11474 code = reverse_condition_maybe_unordered (code);
11475 else
11476 code = reverse_condition (code);
11477 }
12a4e8c5 11478
993f19a8 11479 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
11480 {
11481 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11482 to the GT bit. */
37409796
NS
11483 switch (code)
11484 {
11485 case EQ:
11486 /* Opposite of GT. */
11487 code = GT;
11488 break;
11489
11490 case NE:
11491 code = UNLE;
11492 break;
11493
11494 default:
11495 gcc_unreachable ();
11496 }
a3170dc6
AH
11497 }
11498
39a10a29 11499 switch (code)
12a4e8c5
GK
11500 {
11501 /* Not all of these are actually distinct opcodes, but
11502 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
11503 case NE: case LTGT:
11504 ccode = "ne"; break;
11505 case EQ: case UNEQ:
11506 ccode = "eq"; break;
f676971a 11507 case GE: case GEU:
50a0b056 11508 ccode = "ge"; break;
f676971a 11509 case GT: case GTU: case UNGT:
50a0b056 11510 ccode = "gt"; break;
f676971a 11511 case LE: case LEU:
50a0b056 11512 ccode = "le"; break;
f676971a 11513 case LT: case LTU: case UNLT:
50a0b056 11514 ccode = "lt"; break;
12a4e8c5
GK
11515 case UNORDERED: ccode = "un"; break;
11516 case ORDERED: ccode = "nu"; break;
11517 case UNGE: ccode = "nl"; break;
11518 case UNLE: ccode = "ng"; break;
11519 default:
37409796 11520 gcc_unreachable ();
12a4e8c5 11521 }
f676971a
EC
11522
11523 /* Maybe we have a guess as to how likely the branch is.
94a54f47 11524 The old mnemonics don't have a way to specify this information. */
f4857b9b 11525 pred = "";
12a4e8c5
GK
11526 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11527 if (note != NULL_RTX)
11528 {
11529 /* PROB is the difference from 50%. */
11530 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
11531
11532 /* Only hint for highly probable/improbable branches on newer
11533 cpus as static prediction overrides processor dynamic
11534 prediction. For older cpus we may as well always hint, but
11535 assume not taken for branches that are very close to 50% as a
11536 mispredicted taken branch is more expensive than a
f676971a 11537 mispredicted not-taken branch. */
ec507f2d 11538 if (rs6000_always_hint
2c9e13f3
JH
11539 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
11540 && br_prob_note_reliable_p (note)))
f4857b9b
AM
11541 {
11542 if (abs (prob) > REG_BR_PROB_BASE / 20
11543 && ((prob > 0) ^ need_longbranch))
c4ad648e 11544 pred = "+";
f4857b9b
AM
11545 else
11546 pred = "-";
11547 }
12a4e8c5 11548 }
12a4e8c5
GK
11549
11550 if (label == NULL)
94a54f47 11551 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 11552 else
94a54f47 11553 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 11554
37c67319 11555 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 11556 Assume they'd only be the first character.... */
37c67319
GK
11557 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11558 *s++ = '%';
94a54f47 11559 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
11560
11561 if (label != NULL)
11562 {
11563 /* If the branch distance was too far, we may have to use an
11564 unconditional branch to go the distance. */
11565 if (need_longbranch)
44518ddd 11566 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
11567 else
11568 s += sprintf (s, ",%s", label);
11569 }
11570
11571 return string;
11572}
50a0b056 11573
64022b5d 11574/* Return the string to flip the GT bit on a CR. */
423c1189 11575char *
64022b5d 11576output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
11577{
11578 static char string[64];
11579 int a, b;
11580
37409796
NS
11581 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11582 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 11583
64022b5d
AH
11584 /* GT bit. */
11585 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11586 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
11587
11588 sprintf (string, "crnot %d,%d", a, b);
11589 return string;
11590}
11591
21213b4c
DP
11592/* Return insn index for the vector compare instruction for given CODE,
11593 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11594 not available. */
11595
11596static int
94ff898d 11597get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
11598 enum machine_mode dest_mode,
11599 enum machine_mode op_mode)
11600{
11601 if (!TARGET_ALTIVEC)
11602 return INSN_NOT_AVAILABLE;
11603
11604 switch (code)
11605 {
11606 case EQ:
11607 if (dest_mode == V16QImode && op_mode == V16QImode)
11608 return UNSPEC_VCMPEQUB;
11609 if (dest_mode == V8HImode && op_mode == V8HImode)
11610 return UNSPEC_VCMPEQUH;
11611 if (dest_mode == V4SImode && op_mode == V4SImode)
11612 return UNSPEC_VCMPEQUW;
11613 if (dest_mode == V4SImode && op_mode == V4SFmode)
11614 return UNSPEC_VCMPEQFP;
11615 break;
11616 case GE:
11617 if (dest_mode == V4SImode && op_mode == V4SFmode)
11618 return UNSPEC_VCMPGEFP;
11619 case GT:
11620 if (dest_mode == V16QImode && op_mode == V16QImode)
11621 return UNSPEC_VCMPGTSB;
11622 if (dest_mode == V8HImode && op_mode == V8HImode)
11623 return UNSPEC_VCMPGTSH;
11624 if (dest_mode == V4SImode && op_mode == V4SImode)
11625 return UNSPEC_VCMPGTSW;
11626 if (dest_mode == V4SImode && op_mode == V4SFmode)
11627 return UNSPEC_VCMPGTFP;
11628 break;
11629 case GTU:
11630 if (dest_mode == V16QImode && op_mode == V16QImode)
11631 return UNSPEC_VCMPGTUB;
11632 if (dest_mode == V8HImode && op_mode == V8HImode)
11633 return UNSPEC_VCMPGTUH;
11634 if (dest_mode == V4SImode && op_mode == V4SImode)
11635 return UNSPEC_VCMPGTUW;
11636 break;
11637 default:
11638 break;
11639 }
11640 return INSN_NOT_AVAILABLE;
11641}
11642
11643/* Emit vector compare for operands OP0 and OP1 using code RCODE.
11644 DMODE is expected destination mode. This is a recursive function. */
11645
11646static rtx
11647rs6000_emit_vector_compare (enum rtx_code rcode,
11648 rtx op0, rtx op1,
11649 enum machine_mode dmode)
11650{
11651 int vec_cmp_insn;
11652 rtx mask;
11653 enum machine_mode dest_mode;
11654 enum machine_mode op_mode = GET_MODE (op1);
11655
37409796
NS
11656 gcc_assert (TARGET_ALTIVEC);
11657 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
11658
11659 /* Floating point vector compare instructions uses destination V4SImode.
11660 Move destination to appropriate mode later. */
11661 if (dmode == V4SFmode)
11662 dest_mode = V4SImode;
11663 else
11664 dest_mode = dmode;
11665
11666 mask = gen_reg_rtx (dest_mode);
11667 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11668
11669 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11670 {
11671 bool swap_operands = false;
11672 bool try_again = false;
11673 switch (rcode)
11674 {
11675 case LT:
11676 rcode = GT;
11677 swap_operands = true;
11678 try_again = true;
11679 break;
11680 case LTU:
11681 rcode = GTU;
11682 swap_operands = true;
11683 try_again = true;
11684 break;
11685 case NE:
11686 /* Treat A != B as ~(A==B). */
11687 {
11688 enum insn_code nor_code;
11689 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11690 dest_mode);
94ff898d 11691
21213b4c 11692 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
37409796 11693 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
11694 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11695
11696 if (dmode != dest_mode)
11697 {
11698 rtx temp = gen_reg_rtx (dest_mode);
11699 convert_move (temp, mask, 0);
11700 return temp;
11701 }
11702 return mask;
11703 }
11704 break;
11705 case GE:
11706 case GEU:
11707 case LE:
11708 case LEU:
11709 /* Try GT/GTU/LT/LTU OR EQ */
11710 {
11711 rtx c_rtx, eq_rtx;
11712 enum insn_code ior_code;
11713 enum rtx_code new_code;
11714
37409796
NS
11715 switch (rcode)
11716 {
11717 case GE:
11718 new_code = GT;
11719 break;
11720
11721 case GEU:
11722 new_code = GTU;
11723 break;
11724
11725 case LE:
11726 new_code = LT;
11727 break;
11728
11729 case LEU:
11730 new_code = LTU;
11731 break;
11732
11733 default:
11734 gcc_unreachable ();
11735 }
21213b4c
DP
11736
11737 c_rtx = rs6000_emit_vector_compare (new_code,
11738 op0, op1, dest_mode);
11739 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11740 dest_mode);
11741
11742 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
37409796 11743 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
11744 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11745 if (dmode != dest_mode)
11746 {
11747 rtx temp = gen_reg_rtx (dest_mode);
11748 convert_move (temp, mask, 0);
11749 return temp;
11750 }
11751 return mask;
11752 }
11753 break;
11754 default:
37409796 11755 gcc_unreachable ();
21213b4c
DP
11756 }
11757
11758 if (try_again)
11759 {
11760 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
11761 /* You only get two chances. */
11762 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
11763 }
11764
11765 if (swap_operands)
11766 {
11767 rtx tmp;
11768 tmp = op0;
11769 op0 = op1;
11770 op1 = tmp;
11771 }
11772 }
11773
915167f5
GK
11774 emit_insn (gen_rtx_SET (VOIDmode, mask,
11775 gen_rtx_UNSPEC (dest_mode,
11776 gen_rtvec (2, op0, op1),
11777 vec_cmp_insn)));
21213b4c
DP
11778 if (dmode != dest_mode)
11779 {
11780 rtx temp = gen_reg_rtx (dest_mode);
11781 convert_move (temp, mask, 0);
11782 return temp;
11783 }
11784 return mask;
11785}
11786
11787/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11788 valid insn doesn exist for given mode. */
11789
11790static int
11791get_vsel_insn (enum machine_mode mode)
11792{
11793 switch (mode)
11794 {
11795 case V4SImode:
11796 return UNSPEC_VSEL4SI;
11797 break;
11798 case V4SFmode:
11799 return UNSPEC_VSEL4SF;
11800 break;
11801 case V8HImode:
11802 return UNSPEC_VSEL8HI;
11803 break;
11804 case V16QImode:
11805 return UNSPEC_VSEL16QI;
11806 break;
11807 default:
11808 return INSN_NOT_AVAILABLE;
11809 break;
11810 }
11811 return INSN_NOT_AVAILABLE;
11812}
11813
11814/* Emit vector select insn where DEST is destination using
11815 operands OP1, OP2 and MASK. */
11816
11817static void
11818rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11819{
11820 rtx t, temp;
11821 enum machine_mode dest_mode = GET_MODE (dest);
11822 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11823
11824 temp = gen_reg_rtx (dest_mode);
94ff898d 11825
bb8df8a6 11826 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 11827 select op2. */
915167f5
GK
11828 t = gen_rtx_SET (VOIDmode, temp,
11829 gen_rtx_UNSPEC (dest_mode,
11830 gen_rtvec (3, op2, op1, mask),
11831 vsel_insn_index));
21213b4c
DP
11832 emit_insn (t);
11833 emit_move_insn (dest, temp);
11834 return;
11835}
11836
94ff898d 11837/* Emit vector conditional expression.
21213b4c
DP
11838 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11839 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11840
11841int
11842rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11843 rtx cond, rtx cc_op0, rtx cc_op1)
11844{
11845 enum machine_mode dest_mode = GET_MODE (dest);
11846 enum rtx_code rcode = GET_CODE (cond);
11847 rtx mask;
11848
11849 if (!TARGET_ALTIVEC)
11850 return 0;
11851
11852 /* Get the vector mask for the given relational operations. */
11853 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11854
11855 rs6000_emit_vector_select (dest, op1, op2, mask);
11856
11857 return 1;
11858}
11859
50a0b056
GK
11860/* Emit a conditional move: move TRUE_COND to DEST if OP of the
11861 operands of the last comparison is nonzero/true, FALSE_COND if it
11862 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 11863
50a0b056 11864int
a2369ed3 11865rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
11866{
11867 enum rtx_code code = GET_CODE (op);
11868 rtx op0 = rs6000_compare_op0;
11869 rtx op1 = rs6000_compare_op1;
11870 REAL_VALUE_TYPE c1;
3148ad6d
DJ
11871 enum machine_mode compare_mode = GET_MODE (op0);
11872 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 11873 rtx temp;
add2402e 11874 bool is_against_zero;
50a0b056 11875
a3c9585f 11876 /* These modes should always match. */
a3170dc6
AH
11877 if (GET_MODE (op1) != compare_mode
11878 /* In the isel case however, we can use a compare immediate, so
11879 op1 may be a small constant. */
11880 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 11881 return 0;
178c3eff 11882 if (GET_MODE (true_cond) != result_mode)
3148ad6d 11883 return 0;
178c3eff 11884 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
11885 return 0;
11886
50a0b056 11887 /* First, work out if the hardware can do this at all, or
a3c9585f 11888 if it's too slow.... */
50a0b056 11889 if (! rs6000_compare_fp_p)
a3170dc6
AH
11890 {
11891 if (TARGET_ISEL)
11892 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11893 return 0;
11894 }
fef98bf2 11895 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 11896 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 11897 return 0;
50a0b056 11898
add2402e 11899 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 11900
add2402e
GK
11901 /* A floating-point subtract might overflow, underflow, or produce
11902 an inexact result, thus changing the floating-point flags, so it
11903 can't be generated if we care about that. It's safe if one side
11904 of the construct is zero, since then no subtract will be
11905 generated. */
ebb109ad 11906 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
11907 && flag_trapping_math && ! is_against_zero)
11908 return 0;
11909
50a0b056
GK
11910 /* Eliminate half of the comparisons by switching operands, this
11911 makes the remaining code simpler. */
11912 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 11913 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
11914 {
11915 code = reverse_condition_maybe_unordered (code);
11916 temp = true_cond;
11917 true_cond = false_cond;
11918 false_cond = temp;
11919 }
11920
11921 /* UNEQ and LTGT take four instructions for a comparison with zero,
11922 it'll probably be faster to use a branch here too. */
bc9ec0e0 11923 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 11924 return 0;
f676971a 11925
50a0b056
GK
11926 if (GET_CODE (op1) == CONST_DOUBLE)
11927 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 11928
b6d08ca1 11929 /* We're going to try to implement comparisons by performing
50a0b056
GK
11930 a subtract, then comparing against zero. Unfortunately,
11931 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 11932 know that the operand is finite and the comparison
50a0b056 11933 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 11934 if (HONOR_INFINITIES (compare_mode)
50a0b056 11935 && code != GT && code != UNGE
045572c7 11936 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
11937 /* Constructs of the form (a OP b ? a : b) are safe. */
11938 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 11939 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
11940 && ! rtx_equal_p (op1, true_cond))))
11941 return 0;
add2402e 11942
50a0b056
GK
11943 /* At this point we know we can use fsel. */
11944
11945 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
11946 if (! is_against_zero)
11947 {
11948 temp = gen_reg_rtx (compare_mode);
11949 emit_insn (gen_rtx_SET (VOIDmode, temp,
11950 gen_rtx_MINUS (compare_mode, op0, op1)));
11951 op0 = temp;
11952 op1 = CONST0_RTX (compare_mode);
11953 }
50a0b056
GK
11954
11955 /* If we don't care about NaNs we can reduce some of the comparisons
11956 down to faster ones. */
bc9ec0e0 11957 if (! HONOR_NANS (compare_mode))
50a0b056
GK
11958 switch (code)
11959 {
11960 case GT:
11961 code = LE;
11962 temp = true_cond;
11963 true_cond = false_cond;
11964 false_cond = temp;
11965 break;
11966 case UNGE:
11967 code = GE;
11968 break;
11969 case UNEQ:
11970 code = EQ;
11971 break;
11972 default:
11973 break;
11974 }
11975
11976 /* Now, reduce everything down to a GE. */
11977 switch (code)
11978 {
11979 case GE:
11980 break;
11981
11982 case LE:
3148ad6d
DJ
11983 temp = gen_reg_rtx (compare_mode);
11984 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
11985 op0 = temp;
11986 break;
11987
11988 case ORDERED:
3148ad6d
DJ
11989 temp = gen_reg_rtx (compare_mode);
11990 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
11991 op0 = temp;
11992 break;
11993
11994 case EQ:
3148ad6d 11995 temp = gen_reg_rtx (compare_mode);
f676971a 11996 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
11997 gen_rtx_NEG (compare_mode,
11998 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
11999 op0 = temp;
12000 break;
12001
12002 case UNGE:
bc9ec0e0 12003 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 12004 temp = gen_reg_rtx (result_mode);
50a0b056 12005 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 12006 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12007 gen_rtx_GE (VOIDmode,
12008 op0, op1),
12009 true_cond, false_cond)));
bc9ec0e0
GK
12010 false_cond = true_cond;
12011 true_cond = temp;
50a0b056 12012
3148ad6d
DJ
12013 temp = gen_reg_rtx (compare_mode);
12014 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12015 op0 = temp;
12016 break;
12017
12018 case GT:
bc9ec0e0 12019 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 12020 temp = gen_reg_rtx (result_mode);
50a0b056 12021 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 12022 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12023 gen_rtx_GE (VOIDmode,
12024 op0, op1),
12025 true_cond, false_cond)));
bc9ec0e0
GK
12026 true_cond = false_cond;
12027 false_cond = temp;
50a0b056 12028
3148ad6d
DJ
12029 temp = gen_reg_rtx (compare_mode);
12030 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12031 op0 = temp;
12032 break;
12033
12034 default:
37409796 12035 gcc_unreachable ();
50a0b056
GK
12036 }
12037
12038 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 12039 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12040 gen_rtx_GE (VOIDmode,
12041 op0, op1),
12042 true_cond, false_cond)));
12043 return 1;
12044}
12045
a3170dc6
AH
12046/* Same as above, but for ints (isel). */
12047
12048static int
a2369ed3 12049rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
12050{
12051 rtx condition_rtx, cr;
12052
12053 /* All isel implementations thus far are 32-bits. */
12054 if (GET_MODE (rs6000_compare_op0) != SImode)
12055 return 0;
12056
12057 /* We still have to do the compare, because isel doesn't do a
12058 compare, it just looks at the CRx bits set by a previous compare
12059 instruction. */
12060 condition_rtx = rs6000_generate_compare (GET_CODE (op));
12061 cr = XEXP (condition_rtx, 0);
12062
12063 if (GET_MODE (cr) == CCmode)
12064 emit_insn (gen_isel_signed (dest, condition_rtx,
12065 true_cond, false_cond, cr));
12066 else
12067 emit_insn (gen_isel_unsigned (dest, condition_rtx,
12068 true_cond, false_cond, cr));
12069
12070 return 1;
12071}
12072
12073const char *
a2369ed3 12074output_isel (rtx *operands)
a3170dc6
AH
12075{
12076 enum rtx_code code;
12077
12078 code = GET_CODE (operands[1]);
12079 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
12080 {
12081 PUT_CODE (operands[1], reverse_condition (code));
12082 return "isel %0,%3,%2,%j1";
12083 }
12084 else
12085 return "isel %0,%2,%3,%j1";
12086}
12087
50a0b056 12088void
a2369ed3 12089rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
12090{
12091 enum machine_mode mode = GET_MODE (op0);
5dc8d536 12092 enum rtx_code c;
50a0b056 12093 rtx target;
5dc8d536
AH
12094
12095 if (code == SMAX || code == SMIN)
12096 c = GE;
12097 else
12098 c = GEU;
12099
50a0b056 12100 if (code == SMAX || code == UMAX)
f676971a 12101 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
12102 op0, op1, mode, 0);
12103 else
f676971a 12104 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 12105 op1, op0, mode, 0);
37409796 12106 gcc_assert (target);
50a0b056
GK
12107 if (target != dest)
12108 emit_move_insn (dest, target);
12109}
46c07df8 12110
915167f5
GK
12111/* Emit instructions to perform a load-reserved/store-conditional operation.
12112 The operation performed is an atomic
12113 (set M (CODE:MODE M OP))
12114 If not NULL, BEFORE is atomically set to M before the operation, and
12115 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 12116 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
12117 Either OP or M may be wrapped in a NOT operation. */
12118
12119void
12120rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
12121 rtx m, rtx op, rtx before_param, rtx after_param,
12122 bool sync_p)
12123{
12124 enum machine_mode used_mode;
12125 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
12126 rtx used_m;
12127 rtvec vec;
12128 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12129 rtx shift = NULL_RTX;
bb8df8a6 12130
915167f5
GK
12131 if (sync_p)
12132 emit_insn (gen_memory_barrier ());
bb8df8a6 12133
915167f5
GK
12134 if (GET_CODE (m) == NOT)
12135 used_m = XEXP (m, 0);
12136 else
12137 used_m = m;
12138
12139 /* If this is smaller than SImode, we'll have to use SImode with
12140 adjustments. */
12141 if (mode == QImode || mode == HImode)
12142 {
12143 rtx newop, oldop;
12144
12145 if (MEM_ALIGN (used_m) >= 32)
12146 {
12147 int ishift = 0;
12148 if (BYTES_BIG_ENDIAN)
12149 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 12150
915167f5
GK
12151 shift = GEN_INT (ishift);
12152 }
12153 else
12154 {
12155 rtx addrSI, aligned_addr;
a9c9d3fa 12156 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 12157
915167f5
GK
12158 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
12159 XEXP (used_m, 0)));
12160 shift = gen_reg_rtx (SImode);
12161
12162 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
12163 GEN_INT (shift_mask)));
12164 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
12165
12166 aligned_addr = expand_binop (Pmode, and_optab,
12167 XEXP (used_m, 0),
12168 GEN_INT (-4), NULL_RTX,
12169 1, OPTAB_LIB_WIDEN);
12170 used_m = change_address (used_m, SImode, aligned_addr);
12171 set_mem_align (used_m, 32);
12172 /* It's safe to keep the old alias set of USED_M, because
12173 the operation is atomic and only affects the original
12174 USED_M. */
12175 if (GET_CODE (m) == NOT)
12176 m = gen_rtx_NOT (SImode, used_m);
12177 else
12178 m = used_m;
12179 }
12180
12181 if (GET_CODE (op) == NOT)
12182 {
12183 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
12184 oldop = gen_rtx_NOT (SImode, oldop);
12185 }
12186 else
12187 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 12188
915167f5
GK
12189 switch (code)
12190 {
12191 case IOR:
12192 case XOR:
12193 newop = expand_binop (SImode, and_optab,
12194 oldop, GEN_INT (imask), NULL_RTX,
12195 1, OPTAB_LIB_WIDEN);
12196 emit_insn (gen_ashlsi3 (newop, newop, shift));
12197 break;
12198
12199 case AND:
12200 newop = expand_binop (SImode, ior_optab,
12201 oldop, GEN_INT (~imask), NULL_RTX,
12202 1, OPTAB_LIB_WIDEN);
a9c9d3fa 12203 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
12204 break;
12205
12206 case PLUS:
9f0076e5 12207 case MINUS:
915167f5
GK
12208 {
12209 rtx mask;
bb8df8a6 12210
915167f5
GK
12211 newop = expand_binop (SImode, and_optab,
12212 oldop, GEN_INT (imask), NULL_RTX,
12213 1, OPTAB_LIB_WIDEN);
12214 emit_insn (gen_ashlsi3 (newop, newop, shift));
12215
12216 mask = gen_reg_rtx (SImode);
12217 emit_move_insn (mask, GEN_INT (imask));
12218 emit_insn (gen_ashlsi3 (mask, mask, shift));
12219
9f0076e5
DE
12220 if (code == PLUS)
12221 newop = gen_rtx_PLUS (SImode, m, newop);
12222 else
12223 newop = gen_rtx_MINUS (SImode, m, newop);
12224 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
12225 newop = gen_rtx_IOR (SImode, newop,
12226 gen_rtx_AND (SImode,
12227 gen_rtx_NOT (SImode, mask),
12228 m));
12229 break;
12230 }
12231
12232 default:
12233 gcc_unreachable ();
12234 }
12235
a9c9d3fa
GK
12236 if (GET_CODE (m) == NOT)
12237 {
12238 rtx mask, xorm;
12239
12240 mask = gen_reg_rtx (SImode);
12241 emit_move_insn (mask, GEN_INT (imask));
12242 emit_insn (gen_ashlsi3 (mask, mask, shift));
12243
12244 xorm = gen_rtx_XOR (SImode, used_m, mask);
12245 /* Depending on the value of 'op', the XOR or the operation might
12246 be able to be simplified away. */
12247 newop = simplify_gen_binary (code, SImode, xorm, newop);
12248 }
915167f5
GK
12249 op = newop;
12250 used_mode = SImode;
12251 before = gen_reg_rtx (used_mode);
12252 after = gen_reg_rtx (used_mode);
12253 }
12254 else
12255 {
12256 used_mode = mode;
12257 before = before_param;
12258 after = after_param;
12259
12260 if (before == NULL_RTX)
12261 before = gen_reg_rtx (used_mode);
12262 if (after == NULL_RTX)
12263 after = gen_reg_rtx (used_mode);
12264 }
bb8df8a6 12265
9f0076e5
DE
12266 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12267 && used_mode != mode)
915167f5
GK
12268 the_op = op; /* Computed above. */
12269 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12270 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12271 else
12272 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12273
12274 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12275 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12276 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
12277 gen_rtx_UNSPEC (used_mode,
12278 gen_rtvec (1, the_op),
12279 UNSPEC_SYNC_OP));
915167f5
GK
12280 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12281
9f0076e5 12282 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
12283 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12284 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12285 else
12286 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12287 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12288
12289 /* Shift and mask the return values properly. */
12290 if (used_mode != mode && before_param)
12291 {
12292 emit_insn (gen_lshrsi3 (before, before, shift));
12293 convert_move (before_param, before, 1);
12294 }
12295
12296 if (used_mode != mode && after_param)
12297 {
12298 emit_insn (gen_lshrsi3 (after, after, shift));
12299 convert_move (after_param, after, 1);
12300 }
12301
12302 /* The previous sequence will end with a branch that's dependent on
12303 the conditional store, so placing an isync will ensure that no
12304 other instructions (especially, no load or store instructions)
12305 can start before the atomic operation completes. */
12306 if (sync_p)
12307 emit_insn (gen_isync ());
12308}
12309
b52110d4
DE
12310/* A subroutine of the atomic operation splitters. Jump to LABEL if
12311 COND is true. Mark the jump as unlikely to be taken. */
12312
12313static void
12314emit_unlikely_jump (rtx cond, rtx label)
12315{
12316 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12317 rtx x;
12318
12319 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12320 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12321 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12322}
12323
12324/* A subroutine of the atomic operation splitters. Emit a load-locked
12325 instruction in MODE. */
12326
12327static void
12328emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12329{
12330 rtx (*fn) (rtx, rtx) = NULL;
12331 if (mode == SImode)
12332 fn = gen_load_locked_si;
12333 else if (mode == DImode)
12334 fn = gen_load_locked_di;
12335 emit_insn (fn (reg, mem));
12336}
12337
12338/* A subroutine of the atomic operation splitters. Emit a store-conditional
12339 instruction in MODE. */
12340
12341static void
12342emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12343{
12344 rtx (*fn) (rtx, rtx, rtx) = NULL;
12345 if (mode == SImode)
12346 fn = gen_store_conditional_si;
12347 else if (mode == DImode)
12348 fn = gen_store_conditional_di;
12349
9f0076e5 12350 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
12351 if (PPC405_ERRATUM77)
12352 emit_insn (gen_memory_barrier ());
12353
12354 emit_insn (fn (res, mem, val));
12355}
12356
9f0076e5 12357/* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 12358 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
12359 operand of the binary operator. BEFORE and AFTER are optional locations to
12360 return the value of MEM either before of after the operation. SCRATCH is
12361 a scratch register. */
12362
12363void
12364rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12365 rtx before, rtx after, rtx scratch)
12366{
12367 enum machine_mode mode = GET_MODE (mem);
12368 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12369
12370 emit_insn (gen_memory_barrier ());
12371
12372 label = gen_label_rtx ();
12373 emit_label (label);
12374 label = gen_rtx_LABEL_REF (VOIDmode, label);
12375
12376 if (before == NULL_RTX)
12377 before = scratch;
12378 emit_load_locked (mode, before, mem);
12379
12380 if (code == NOT)
12381 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12382 else if (code == AND)
12383 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12384 else
12385 x = gen_rtx_fmt_ee (code, mode, before, val);
12386
12387 if (after != NULL_RTX)
12388 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12389 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12390
12391 emit_store_conditional (mode, cond, mem, scratch);
12392
12393 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12394 emit_unlikely_jump (x, label);
12395
12396 emit_insn (gen_isync ());
12397}
12398
b52110d4
DE
12399/* Expand an atomic compare and swap operation. MEM is the memory on which
12400 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12401 value to be stored. SCRATCH is a scratch GPR. */
12402
12403void
12404rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12405 rtx scratch)
12406{
12407 enum machine_mode mode = GET_MODE (mem);
12408 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12409
12410 emit_insn (gen_memory_barrier ());
12411
12412 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12413 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12414 emit_label (XEXP (label1, 0));
12415
12416 emit_load_locked (mode, retval, mem);
12417
12418 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12419 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12420
12421 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12422 emit_unlikely_jump (x, label2);
12423
12424 emit_move_insn (scratch, newval);
12425 emit_store_conditional (mode, cond, mem, scratch);
12426
12427 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12428 emit_unlikely_jump (x, label1);
12429
12430 emit_insn (gen_isync ());
12431 emit_label (XEXP (label2, 0));
12432}
12433
12434/* Expand an atomic test and set operation. MEM is the memory on which
12435 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12436
12437void
12438rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12439{
12440 enum machine_mode mode = GET_MODE (mem);
12441 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12442
12443 emit_insn (gen_memory_barrier ());
12444
12445 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12446 emit_label (XEXP (label, 0));
12447
12448 emit_load_locked (mode, retval, mem);
12449 emit_move_insn (scratch, val);
12450 emit_store_conditional (mode, cond, mem, scratch);
12451
12452 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12453 emit_unlikely_jump (x, label);
12454
12455 emit_insn (gen_isync ());
12456}
12457
9fc75b97
DE
12458void
12459rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
12460{
12461 enum machine_mode mode = GET_MODE (mem);
12462 rtx addrSI, align, wdst, shift, mask;
12463 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
12464 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12465
12466 /* Shift amount for subword relative to aligned word. */
12467 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
12468 shift = gen_reg_rtx (SImode);
12469 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12470 GEN_INT (shift_mask)));
12471 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12472
12473 /* Shift and mask old value into position within word. */
12474 oldval = convert_modes (SImode, mode, oldval, 1);
12475 oldval = expand_binop (SImode, and_optab,
12476 oldval, GEN_INT (imask), NULL_RTX,
12477 1, OPTAB_LIB_WIDEN);
12478 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
12479
12480 /* Shift and mask new value into position within word. */
12481 newval = convert_modes (SImode, mode, newval, 1);
12482 newval = expand_binop (SImode, and_optab,
12483 newval, GEN_INT (imask), NULL_RTX,
12484 1, OPTAB_LIB_WIDEN);
12485 emit_insn (gen_ashlsi3 (newval, newval, shift));
12486
12487 /* Mask for insertion. */
12488 mask = gen_reg_rtx (SImode);
12489 emit_move_insn (mask, GEN_INT (imask));
12490 emit_insn (gen_ashlsi3 (mask, mask, shift));
12491
12492 /* Address of aligned word containing subword. */
12493 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
12494 NULL_RTX, 1, OPTAB_LIB_WIDEN);
12495 mem = change_address (mem, SImode, align);
12496 set_mem_align (mem, 32);
12497 MEM_VOLATILE_P (mem) = 1;
12498
12499 wdst = gen_reg_rtx (SImode);
12500 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
12501 oldval, newval, mem));
12502
12503 emit_move_insn (dst, gen_lowpart (mode, wdst));
12504}
12505
12506void
12507rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
12508 rtx oldval, rtx newval, rtx mem,
12509 rtx scratch)
12510{
12511 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12512
12513 emit_insn (gen_memory_barrier ());
12514 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12515 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12516 emit_label (XEXP (label1, 0));
12517
12518 emit_load_locked (SImode, scratch, mem);
12519
12520 /* Mask subword within loaded value for comparison with oldval.
12521 Use UNSPEC_AND to avoid clobber.*/
12522 emit_insn (gen_rtx_SET (SImode, dest,
12523 gen_rtx_UNSPEC (SImode,
12524 gen_rtvec (2, scratch, mask),
12525 UNSPEC_AND)));
12526
12527 x = gen_rtx_COMPARE (CCmode, dest, oldval);
12528 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12529
12530 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12531 emit_unlikely_jump (x, label2);
12532
12533 /* Clear subword within loaded value for insertion of new value. */
12534 emit_insn (gen_rtx_SET (SImode, scratch,
12535 gen_rtx_AND (SImode,
12536 gen_rtx_NOT (SImode, mask), scratch)));
12537 emit_insn (gen_iorsi3 (scratch, scratch, newval));
12538 emit_store_conditional (SImode, cond, mem, scratch);
12539
12540 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12541 emit_unlikely_jump (x, label1);
12542
12543 emit_insn (gen_isync ());
12544 emit_label (XEXP (label2, 0));
12545}
12546
12547
b52110d4 12548 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
12549 multi-register moves. It will emit at most one instruction for
12550 each register that is accessed; that is, it won't emit li/lis pairs
12551 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12552 register. */
46c07df8 12553
46c07df8 12554void
a9baceb1 12555rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 12556{
a9baceb1
GK
12557 /* The register number of the first register being moved. */
12558 int reg;
12559 /* The mode that is to be moved. */
12560 enum machine_mode mode;
12561 /* The mode that the move is being done in, and its size. */
12562 enum machine_mode reg_mode;
12563 int reg_mode_size;
12564 /* The number of registers that will be moved. */
12565 int nregs;
12566
12567 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12568 mode = GET_MODE (dst);
c8b622ff 12569 nregs = hard_regno_nregs[reg][mode];
a9baceb1
GK
12570 if (FP_REGNO_P (reg))
12571 reg_mode = DFmode;
12572 else if (ALTIVEC_REGNO_P (reg))
12573 reg_mode = V16QImode;
12574 else
12575 reg_mode = word_mode;
12576 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 12577
37409796 12578 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 12579
a9baceb1
GK
12580 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12581 {
12582 /* Move register range backwards, if we might have destructive
12583 overlap. */
12584 int i;
12585 for (i = nregs - 1; i >= 0; i--)
f676971a 12586 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
12587 simplify_gen_subreg (reg_mode, dst, mode,
12588 i * reg_mode_size),
12589 simplify_gen_subreg (reg_mode, src, mode,
12590 i * reg_mode_size)));
12591 }
46c07df8
HP
12592 else
12593 {
a9baceb1
GK
12594 int i;
12595 int j = -1;
12596 bool used_update = false;
46c07df8 12597
c1e55850 12598 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
12599 {
12600 rtx breg;
3a1f863f 12601
a9baceb1
GK
12602 if (GET_CODE (XEXP (src, 0)) == PRE_INC
12603 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
12604 {
12605 rtx delta_rtx;
a9baceb1 12606 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
12607 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12608 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12609 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
12610 emit_insn (TARGET_32BIT
12611 ? gen_addsi3 (breg, breg, delta_rtx)
12612 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 12613 src = replace_equiv_address (src, breg);
3a1f863f 12614 }
d04b6e6e 12615 else if (! rs6000_offsettable_memref_p (src))
c1e55850 12616 {
13e2e16e 12617 rtx basereg;
c1e55850
GK
12618 basereg = gen_rtx_REG (Pmode, reg);
12619 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 12620 src = replace_equiv_address (src, basereg);
c1e55850 12621 }
3a1f863f 12622
0423421f
AM
12623 breg = XEXP (src, 0);
12624 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12625 breg = XEXP (breg, 0);
12626
12627 /* If the base register we are using to address memory is
12628 also a destination reg, then change that register last. */
12629 if (REG_P (breg)
12630 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
12631 && REGNO (breg) < REGNO (dst) + nregs)
12632 j = REGNO (breg) - REGNO (dst);
c4ad648e 12633 }
46c07df8 12634
a9baceb1 12635 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
12636 {
12637 rtx breg;
12638
a9baceb1
GK
12639 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12640 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
12641 {
12642 rtx delta_rtx;
a9baceb1 12643 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
12644 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12645 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12646 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
12647
12648 /* We have to update the breg before doing the store.
12649 Use store with update, if available. */
12650
12651 if (TARGET_UPDATE)
12652 {
a9baceb1 12653 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
12654 emit_insn (TARGET_32BIT
12655 ? (TARGET_POWERPC64
12656 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12657 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
12658 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 12659 used_update = true;
3a1f863f
DE
12660 }
12661 else
a9baceb1
GK
12662 emit_insn (TARGET_32BIT
12663 ? gen_addsi3 (breg, breg, delta_rtx)
12664 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 12665 dst = replace_equiv_address (dst, breg);
3a1f863f 12666 }
37409796 12667 else
d04b6e6e 12668 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
12669 }
12670
46c07df8 12671 for (i = 0; i < nregs; i++)
f676971a 12672 {
3a1f863f
DE
12673 /* Calculate index to next subword. */
12674 ++j;
f676971a 12675 if (j == nregs)
3a1f863f 12676 j = 0;
46c07df8 12677
112cdef5 12678 /* If compiler already emitted move of first word by
a9baceb1 12679 store with update, no need to do anything. */
3a1f863f 12680 if (j == 0 && used_update)
a9baceb1 12681 continue;
f676971a 12682
a9baceb1
GK
12683 emit_insn (gen_rtx_SET (VOIDmode,
12684 simplify_gen_subreg (reg_mode, dst, mode,
12685 j * reg_mode_size),
12686 simplify_gen_subreg (reg_mode, src, mode,
12687 j * reg_mode_size)));
3a1f863f 12688 }
46c07df8
HP
12689 }
12690}
12691
12a4e8c5 12692\f
a4f6c312
SS
12693/* This page contains routines that are used to determine what the
12694 function prologue and epilogue code will do and write them out. */
9878760c 12695
a4f6c312
SS
12696/* Return the first fixed-point register that is required to be
12697 saved. 32 if none. */
9878760c
RK
12698
12699int
863d938c 12700first_reg_to_save (void)
9878760c
RK
12701{
12702 int first_reg;
12703
12704 /* Find lowest numbered live register. */
12705 for (first_reg = 13; first_reg <= 31; first_reg++)
f676971a 12706 if (regs_ever_live[first_reg]
a38d360d 12707 && (! call_used_regs[first_reg]
1db02437 12708 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 12709 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
12710 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12711 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
12712 break;
12713
ee890fe2 12714#if TARGET_MACHO
93638d7a
AM
12715 if (flag_pic
12716 && current_function_uses_pic_offset_table
12717 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 12718 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
12719#endif
12720
9878760c
RK
12721 return first_reg;
12722}
12723
12724/* Similar, for FP regs. */
12725
12726int
863d938c 12727first_fp_reg_to_save (void)
9878760c
RK
12728{
12729 int first_reg;
12730
12731 /* Find lowest numbered live register. */
12732 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12733 if (regs_ever_live[first_reg])
12734 break;
12735
12736 return first_reg;
12737}
00b960c7
AH
12738
12739/* Similar, for AltiVec regs. */
12740
12741static int
863d938c 12742first_altivec_reg_to_save (void)
00b960c7
AH
12743{
12744 int i;
12745
12746 /* Stack frame remains as is unless we are in AltiVec ABI. */
12747 if (! TARGET_ALTIVEC_ABI)
12748 return LAST_ALTIVEC_REGNO + 1;
12749
12750 /* Find lowest numbered live register. */
12751 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12752 if (regs_ever_live[i])
12753 break;
12754
12755 return i;
12756}
12757
12758/* Return a 32-bit mask of the AltiVec registers we need to set in
12759 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
12760 the 32-bit word is 0. */
12761
12762static unsigned int
863d938c 12763compute_vrsave_mask (void)
00b960c7
AH
12764{
12765 unsigned int i, mask = 0;
12766
12767 /* First, find out if we use _any_ altivec registers. */
12768 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12769 if (regs_ever_live[i])
12770 mask |= ALTIVEC_REG_BIT (i);
12771
12772 if (mask == 0)
12773 return mask;
12774
00b960c7
AH
12775 /* Next, remove the argument registers from the set. These must
12776 be in the VRSAVE mask set by the caller, so we don't need to add
12777 them in again. More importantly, the mask we compute here is
12778 used to generate CLOBBERs in the set_vrsave insn, and we do not
12779 wish the argument registers to die. */
a6cf80f2 12780 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
12781 mask &= ~ALTIVEC_REG_BIT (i);
12782
12783 /* Similarly, remove the return value from the set. */
12784 {
12785 bool yes = false;
12786 diddle_return_value (is_altivec_return_reg, &yes);
12787 if (yes)
12788 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12789 }
12790
12791 return mask;
12792}
12793
d62294f5 12794/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
12795 size of prologues/epilogues by calling our own save/restore-the-world
12796 routines. */
d62294f5
FJ
12797
12798static void
f57fe068
AM
12799compute_save_world_info (rs6000_stack_t *info_ptr)
12800{
12801 info_ptr->world_save_p = 1;
12802 info_ptr->world_save_p
12803 = (WORLD_SAVE_P (info_ptr)
12804 && DEFAULT_ABI == ABI_DARWIN
12805 && ! (current_function_calls_setjmp && flag_exceptions)
12806 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12807 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12808 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12809 && info_ptr->cr_save_p);
f676971a 12810
d62294f5
FJ
12811 /* This will not work in conjunction with sibcalls. Make sure there
12812 are none. (This check is expensive, but seldom executed.) */
f57fe068 12813 if (WORLD_SAVE_P (info_ptr))
f676971a 12814 {
d62294f5
FJ
12815 rtx insn;
12816 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
12817 if ( GET_CODE (insn) == CALL_INSN
12818 && SIBLING_CALL_P (insn))
12819 {
12820 info_ptr->world_save_p = 0;
12821 break;
12822 }
d62294f5 12823 }
f676971a 12824
f57fe068 12825 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
12826 {
12827 /* Even if we're not touching VRsave, make sure there's room on the
12828 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 12829 will attempt to save it. */
d62294f5
FJ
12830 info_ptr->vrsave_size = 4;
12831
12832 /* "Save" the VRsave register too if we're saving the world. */
12833 if (info_ptr->vrsave_mask == 0)
c4ad648e 12834 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
12835
12836 /* Because the Darwin register save/restore routines only handle
c4ad648e 12837 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 12838 check. */
37409796
NS
12839 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12840 && (info_ptr->first_altivec_reg_save
12841 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 12842 }
f676971a 12843 return;
d62294f5
FJ
12844}
12845
12846
00b960c7 12847static void
a2369ed3 12848is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
12849{
12850 bool *yes = (bool *) xyes;
12851 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12852 *yes = true;
12853}
12854
4697a36c
MM
12855\f
12856/* Calculate the stack information for the current function. This is
12857 complicated by having two separate calling sequences, the AIX calling
12858 sequence and the V.4 calling sequence.
12859
592696dd 12860 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 12861 32-bit 64-bit
4697a36c 12862 SP----> +---------------------------------------+
a260abc9 12863 | back chain to caller | 0 0
4697a36c 12864 +---------------------------------------+
a260abc9 12865 | saved CR | 4 8 (8-11)
4697a36c 12866 +---------------------------------------+
a260abc9 12867 | saved LR | 8 16
4697a36c 12868 +---------------------------------------+
a260abc9 12869 | reserved for compilers | 12 24
4697a36c 12870 +---------------------------------------+
a260abc9 12871 | reserved for binders | 16 32
4697a36c 12872 +---------------------------------------+
a260abc9 12873 | saved TOC pointer | 20 40
4697a36c 12874 +---------------------------------------+
a260abc9 12875 | Parameter save area (P) | 24 48
4697a36c 12876 +---------------------------------------+
a260abc9 12877 | Alloca space (A) | 24+P etc.
802a0058 12878 +---------------------------------------+
a7df97e6 12879 | Local variable space (L) | 24+P+A
4697a36c 12880 +---------------------------------------+
a7df97e6 12881 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 12882 +---------------------------------------+
00b960c7
AH
12883 | Save area for AltiVec registers (W) | 24+P+A+L+X
12884 +---------------------------------------+
12885 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12886 +---------------------------------------+
12887 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 12888 +---------------------------------------+
00b960c7
AH
12889 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12890 +---------------------------------------+
12891 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
12892 +---------------------------------------+
12893 old SP->| back chain to caller's caller |
12894 +---------------------------------------+
12895
5376a30c
KR
12896 The required alignment for AIX configurations is two words (i.e., 8
12897 or 16 bytes).
12898
12899
4697a36c
MM
12900 V.4 stack frames look like:
12901
12902 SP----> +---------------------------------------+
12903 | back chain to caller | 0
12904 +---------------------------------------+
5eb387b8 12905 | caller's saved LR | 4
4697a36c
MM
12906 +---------------------------------------+
12907 | Parameter save area (P) | 8
12908 +---------------------------------------+
a7df97e6 12909 | Alloca space (A) | 8+P
f676971a 12910 +---------------------------------------+
a7df97e6 12911 | Varargs save area (V) | 8+P+A
f676971a 12912 +---------------------------------------+
a7df97e6 12913 | Local variable space (L) | 8+P+A+V
f676971a 12914 +---------------------------------------+
a7df97e6 12915 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 12916 +---------------------------------------+
00b960c7
AH
12917 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12918 +---------------------------------------+
12919 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12920 +---------------------------------------+
12921 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12922 +---------------------------------------+
c4ad648e
AM
12923 | SPE: area for 64-bit GP registers |
12924 +---------------------------------------+
12925 | SPE alignment padding |
12926 +---------------------------------------+
00b960c7 12927 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 12928 +---------------------------------------+
00b960c7 12929 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 12930 +---------------------------------------+
00b960c7 12931 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
12932 +---------------------------------------+
12933 old SP->| back chain to caller's caller |
12934 +---------------------------------------+
b6c9286a 12935
5376a30c
KR
12936 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12937 given. (But note below and in sysv4.h that we require only 8 and
12938 may round up the size of our stack frame anyways. The historical
12939 reason is early versions of powerpc-linux which didn't properly
12940 align the stack at program startup. A happy side-effect is that
12941 -mno-eabi libraries can be used with -meabi programs.)
12942
50d440bc 12943 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
12944 the stack alignment requirements may differ. If -mno-eabi is not
12945 given, the required stack alignment is 8 bytes; if -mno-eabi is
12946 given, the required alignment is 16 bytes. (But see V.4 comment
12947 above.) */
4697a36c 12948
61b2fbe7
MM
12949#ifndef ABI_STACK_BOUNDARY
12950#define ABI_STACK_BOUNDARY STACK_BOUNDARY
12951#endif
12952
d1d0c603 12953static rs6000_stack_t *
863d938c 12954rs6000_stack_info (void)
4697a36c 12955{
022123e6 12956 static rs6000_stack_t info;
4697a36c 12957 rs6000_stack_t *info_ptr = &info;
327e5343 12958 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 12959 int ehrd_size;
64045029 12960 int save_align;
44688022 12961 HOST_WIDE_INT non_fixed_size;
4697a36c 12962
022123e6 12963 memset (&info, 0, sizeof (info));
4697a36c 12964
c19de7aa
AH
12965 if (TARGET_SPE)
12966 {
12967 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 12968 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
12969 cfun->machine->insn_chain_scanned_p
12970 = spe_func_has_64bit_regs_p () + 1;
12971 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
12972 }
12973
a4f6c312 12974 /* Select which calling sequence. */
178274da 12975 info_ptr->abi = DEFAULT_ABI;
9878760c 12976
a4f6c312 12977 /* Calculate which registers need to be saved & save area size. */
4697a36c 12978 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 12979 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
906fb125 12980 even if it currently looks like we won't. */
2bfcf297 12981 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
12982 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12983 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
1db02437
FS
12984 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12985 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
906fb125
GK
12986 else
12987 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
4697a36c 12988
a3170dc6
AH
12989 /* For the SPE, we have an additional upper 32-bits on each GPR.
12990 Ideally we should save the entire 64-bits only when the upper
12991 half is used in SIMD instructions. Since we only record
12992 registers live (not the size they are used in), this proves
12993 difficult because we'd have to traverse the instruction chain at
12994 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
12995 so we opt to save the GPRs in 64-bits always if but one register
12996 gets used in 64-bits. Otherwise, all the registers in the frame
12997 get saved in 32-bits.
a3170dc6 12998
c19de7aa 12999 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 13000 traditional GP save area will be empty. */
c19de7aa 13001 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13002 info_ptr->gp_size = 0;
13003
4697a36c
MM
13004 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13005 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13006
00b960c7
AH
13007 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13008 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13009 - info_ptr->first_altivec_reg_save);
13010
592696dd 13011 /* Does this function call anything? */
71f123ca
FS
13012 info_ptr->calls_p = (! current_function_is_leaf
13013 || cfun->machine->ra_needs_full_frame);
b6c9286a 13014
a4f6c312 13015 /* Determine if we need to save the link register. */
022123e6
AM
13016 if ((DEFAULT_ABI == ABI_AIX
13017 && current_function_profile
13018 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
13019#ifdef TARGET_RELOCATABLE
13020 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13021#endif
13022 || (info_ptr->first_fp_reg_save != 64
13023 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 13024 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 13025 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
13026 || info_ptr->calls_p
13027 || rs6000_ra_ever_killed ())
4697a36c
MM
13028 {
13029 info_ptr->lr_save_p = 1;
9ebbca7d 13030 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
4697a36c
MM
13031 }
13032
9ebbca7d 13033 /* Determine if we need to save the condition code registers. */
f676971a 13034 if (regs_ever_live[CR2_REGNO]
9ebbca7d
GK
13035 || regs_ever_live[CR3_REGNO]
13036 || regs_ever_live[CR4_REGNO])
4697a36c
MM
13037 {
13038 info_ptr->cr_save_p = 1;
178274da 13039 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
13040 info_ptr->cr_size = reg_size;
13041 }
13042
83720594
RH
13043 /* If the current function calls __builtin_eh_return, then we need
13044 to allocate stack space for registers that will hold data for
13045 the exception handler. */
13046 if (current_function_calls_eh_return)
13047 {
13048 unsigned int i;
13049 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
13050 continue;
a3170dc6
AH
13051
13052 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
13053 ehrd_size = i * (TARGET_SPE_ABI
13054 && info_ptr->spe_64bit_regs_used != 0
13055 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
13056 }
13057 else
13058 ehrd_size = 0;
13059
592696dd 13060 /* Determine various sizes. */
4697a36c
MM
13061 info_ptr->reg_size = reg_size;
13062 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 13063 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 13064 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 13065 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
13066 if (FRAME_GROWS_DOWNWARD)
13067 info_ptr->vars_size
5b667039
JJ
13068 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
13069 + info_ptr->parm_size,
7d5175e1 13070 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
13071 - (info_ptr->fixed_size + info_ptr->vars_size
13072 + info_ptr->parm_size);
00b960c7 13073
c19de7aa 13074 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13075 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
13076 else
13077 info_ptr->spe_gp_size = 0;
13078
4d774ff8
HP
13079 if (TARGET_ALTIVEC_ABI)
13080 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 13081 else
4d774ff8
HP
13082 info_ptr->vrsave_mask = 0;
13083
13084 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
13085 info_ptr->vrsave_size = 4;
13086 else
13087 info_ptr->vrsave_size = 0;
b6c9286a 13088
d62294f5
FJ
13089 compute_save_world_info (info_ptr);
13090
592696dd 13091 /* Calculate the offsets. */
178274da 13092 switch (DEFAULT_ABI)
4697a36c 13093 {
b6c9286a 13094 case ABI_NONE:
24d304eb 13095 default:
37409796 13096 gcc_unreachable ();
b6c9286a
MM
13097
13098 case ABI_AIX:
ee890fe2 13099 case ABI_DARWIN:
b6c9286a
MM
13100 info_ptr->fp_save_offset = - info_ptr->fp_size;
13101 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
13102
13103 if (TARGET_ALTIVEC_ABI)
13104 {
13105 info_ptr->vrsave_save_offset
13106 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
13107
13108 /* Align stack so vector save area is on a quadword boundary. */
13109 if (info_ptr->altivec_size != 0)
13110 info_ptr->altivec_padding_size
13111 = 16 - (-info_ptr->vrsave_save_offset % 16);
13112 else
13113 info_ptr->altivec_padding_size = 0;
13114
13115 info_ptr->altivec_save_offset
13116 = info_ptr->vrsave_save_offset
13117 - info_ptr->altivec_padding_size
13118 - info_ptr->altivec_size;
13119
13120 /* Adjust for AltiVec case. */
13121 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
13122 }
13123 else
13124 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
13125 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
13126 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
13127 break;
13128
13129 case ABI_V4:
b6c9286a
MM
13130 info_ptr->fp_save_offset = - info_ptr->fp_size;
13131 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 13132 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 13133
c19de7aa 13134 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
13135 {
13136 /* Align stack so SPE GPR save area is aligned on a
13137 double-word boundary. */
13138 if (info_ptr->spe_gp_size != 0)
13139 info_ptr->spe_padding_size
13140 = 8 - (-info_ptr->cr_save_offset % 8);
13141 else
13142 info_ptr->spe_padding_size = 0;
13143
13144 info_ptr->spe_gp_save_offset
13145 = info_ptr->cr_save_offset
13146 - info_ptr->spe_padding_size
13147 - info_ptr->spe_gp_size;
13148
13149 /* Adjust for SPE case. */
022123e6 13150 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 13151 }
a3170dc6 13152 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
13153 {
13154 info_ptr->vrsave_save_offset
13155 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
13156
13157 /* Align stack so vector save area is on a quadword boundary. */
13158 if (info_ptr->altivec_size != 0)
13159 info_ptr->altivec_padding_size
13160 = 16 - (-info_ptr->vrsave_save_offset % 16);
13161 else
13162 info_ptr->altivec_padding_size = 0;
13163
13164 info_ptr->altivec_save_offset
13165 = info_ptr->vrsave_save_offset
13166 - info_ptr->altivec_padding_size
13167 - info_ptr->altivec_size;
13168
13169 /* Adjust for AltiVec case. */
022123e6 13170 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
13171 }
13172 else
022123e6
AM
13173 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
13174 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
13175 info_ptr->lr_save_offset = reg_size;
13176 break;
4697a36c
MM
13177 }
13178
64045029 13179 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
13180 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
13181 + info_ptr->gp_size
13182 + info_ptr->altivec_size
13183 + info_ptr->altivec_padding_size
a3170dc6
AH
13184 + info_ptr->spe_gp_size
13185 + info_ptr->spe_padding_size
00b960c7
AH
13186 + ehrd_size
13187 + info_ptr->cr_size
022123e6 13188 + info_ptr->vrsave_size,
64045029 13189 save_align);
00b960c7 13190
44688022 13191 non_fixed_size = (info_ptr->vars_size
ff381587 13192 + info_ptr->parm_size
5b667039 13193 + info_ptr->save_size);
ff381587 13194
44688022
AM
13195 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
13196 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
13197
13198 /* Determine if we need to allocate any stack frame:
13199
a4f6c312
SS
13200 For AIX we need to push the stack if a frame pointer is needed
13201 (because the stack might be dynamically adjusted), if we are
13202 debugging, if we make calls, or if the sum of fp_save, gp_save,
13203 and local variables are more than the space needed to save all
13204 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13205 + 18*8 = 288 (GPR13 reserved).
ff381587 13206
a4f6c312
SS
13207 For V.4 we don't have the stack cushion that AIX uses, but assume
13208 that the debugger can handle stackless frames. */
ff381587
MM
13209
13210 if (info_ptr->calls_p)
13211 info_ptr->push_p = 1;
13212
178274da 13213 else if (DEFAULT_ABI == ABI_V4)
44688022 13214 info_ptr->push_p = non_fixed_size != 0;
ff381587 13215
178274da
AM
13216 else if (frame_pointer_needed)
13217 info_ptr->push_p = 1;
13218
13219 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
13220 info_ptr->push_p = 1;
13221
ff381587 13222 else
44688022 13223 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 13224
a4f6c312 13225 /* Zero offsets if we're not saving those registers. */
8dda1a21 13226 if (info_ptr->fp_size == 0)
4697a36c
MM
13227 info_ptr->fp_save_offset = 0;
13228
8dda1a21 13229 if (info_ptr->gp_size == 0)
4697a36c
MM
13230 info_ptr->gp_save_offset = 0;
13231
00b960c7
AH
13232 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
13233 info_ptr->altivec_save_offset = 0;
13234
13235 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
13236 info_ptr->vrsave_save_offset = 0;
13237
c19de7aa
AH
13238 if (! TARGET_SPE_ABI
13239 || info_ptr->spe_64bit_regs_used == 0
13240 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
13241 info_ptr->spe_gp_save_offset = 0;
13242
c81fc13e 13243 if (! info_ptr->lr_save_p)
4697a36c
MM
13244 info_ptr->lr_save_offset = 0;
13245
c81fc13e 13246 if (! info_ptr->cr_save_p)
4697a36c
MM
13247 info_ptr->cr_save_offset = 0;
13248
13249 return info_ptr;
13250}
13251
c19de7aa
AH
13252/* Return true if the current function uses any GPRs in 64-bit SIMD
13253 mode. */
13254
13255static bool
863d938c 13256spe_func_has_64bit_regs_p (void)
c19de7aa
AH
13257{
13258 rtx insns, insn;
13259
13260 /* Functions that save and restore all the call-saved registers will
13261 need to save/restore the registers in 64-bits. */
13262 if (current_function_calls_eh_return
13263 || current_function_calls_setjmp
13264 || current_function_has_nonlocal_goto)
13265 return true;
13266
13267 insns = get_insns ();
13268
13269 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13270 {
13271 if (INSN_P (insn))
13272 {
13273 rtx i;
13274
b5a5beb9
AH
13275 /* FIXME: This should be implemented with attributes...
13276
13277 (set_attr "spe64" "true")....then,
13278 if (get_spe64(insn)) return true;
13279
13280 It's the only reliable way to do the stuff below. */
13281
c19de7aa 13282 i = PATTERN (insn);
f82f556d
AH
13283 if (GET_CODE (i) == SET)
13284 {
13285 enum machine_mode mode = GET_MODE (SET_SRC (i));
13286
13287 if (SPE_VECTOR_MODE (mode))
13288 return true;
13289 if (TARGET_E500_DOUBLE && mode == DFmode)
13290 return true;
13291 }
c19de7aa
AH
13292 }
13293 }
13294
13295 return false;
13296}
13297
d1d0c603 13298static void
a2369ed3 13299debug_stack_info (rs6000_stack_t *info)
9878760c 13300{
d330fd93 13301 const char *abi_string;
24d304eb 13302
c81fc13e 13303 if (! info)
4697a36c
MM
13304 info = rs6000_stack_info ();
13305
13306 fprintf (stderr, "\nStack information for function %s:\n",
13307 ((current_function_decl && DECL_NAME (current_function_decl))
13308 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13309 : "<unknown>"));
13310
24d304eb
RK
13311 switch (info->abi)
13312 {
b6c9286a
MM
13313 default: abi_string = "Unknown"; break;
13314 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 13315 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 13316 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 13317 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
13318 }
13319
13320 fprintf (stderr, "\tABI = %5s\n", abi_string);
13321
00b960c7
AH
13322 if (TARGET_ALTIVEC_ABI)
13323 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13324
a3170dc6
AH
13325 if (TARGET_SPE_ABI)
13326 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13327
4697a36c
MM
13328 if (info->first_gp_reg_save != 32)
13329 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13330
13331 if (info->first_fp_reg_save != 64)
13332 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 13333
00b960c7
AH
13334 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13335 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13336 info->first_altivec_reg_save);
13337
4697a36c
MM
13338 if (info->lr_save_p)
13339 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 13340
4697a36c
MM
13341 if (info->cr_save_p)
13342 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13343
00b960c7
AH
13344 if (info->vrsave_mask)
13345 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13346
4697a36c
MM
13347 if (info->push_p)
13348 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13349
13350 if (info->calls_p)
13351 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13352
4697a36c
MM
13353 if (info->gp_save_offset)
13354 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13355
13356 if (info->fp_save_offset)
13357 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13358
00b960c7
AH
13359 if (info->altivec_save_offset)
13360 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13361 info->altivec_save_offset);
13362
a3170dc6
AH
13363 if (info->spe_gp_save_offset)
13364 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13365 info->spe_gp_save_offset);
13366
00b960c7
AH
13367 if (info->vrsave_save_offset)
13368 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13369 info->vrsave_save_offset);
13370
4697a36c
MM
13371 if (info->lr_save_offset)
13372 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13373
13374 if (info->cr_save_offset)
13375 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13376
13377 if (info->varargs_save_offset)
13378 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13379
13380 if (info->total_size)
d1d0c603
JJ
13381 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13382 info->total_size);
4697a36c 13383
4697a36c 13384 if (info->vars_size)
d1d0c603
JJ
13385 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13386 info->vars_size);
4697a36c
MM
13387
13388 if (info->parm_size)
13389 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13390
13391 if (info->fixed_size)
13392 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13393
13394 if (info->gp_size)
13395 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13396
a3170dc6
AH
13397 if (info->spe_gp_size)
13398 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13399
4697a36c
MM
13400 if (info->fp_size)
13401 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13402
00b960c7
AH
13403 if (info->altivec_size)
13404 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13405
13406 if (info->vrsave_size)
13407 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13408
13409 if (info->altivec_padding_size)
13410 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13411 info->altivec_padding_size);
13412
a3170dc6
AH
13413 if (info->spe_padding_size)
13414 fprintf (stderr, "\tspe_padding_size = %5d\n",
13415 info->spe_padding_size);
13416
4697a36c
MM
13417 if (info->cr_size)
13418 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
13419
13420 if (info->save_size)
13421 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
13422
13423 if (info->reg_size != 4)
13424 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
13425
13426 fprintf (stderr, "\n");
9878760c 13427}
71f123ca
FS
13428
13429rtx
a2369ed3 13430rs6000_return_addr (int count, rtx frame)
71f123ca 13431{
a4f6c312
SS
13432 /* Currently we don't optimize very well between prolog and body
13433 code and for PIC code the code can be actually quite bad, so
13434 don't try to be too clever here. */
f1384257 13435 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
13436 {
13437 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
13438
13439 return
13440 gen_rtx_MEM
13441 (Pmode,
13442 memory_address
13443 (Pmode,
13444 plus_constant (copy_to_reg
13445 (gen_rtx_MEM (Pmode,
13446 memory_address (Pmode, frame))),
13447 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
13448 }
13449
8c29550d 13450 cfun->machine->ra_need_lr = 1;
9e2f7ec7 13451 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
71f123ca
FS
13452}
13453
5e1bf043
DJ
13454/* Say whether a function is a candidate for sibcall handling or not.
13455 We do not allow indirect calls to be optimized into sibling calls.
13456 Also, we can't do it if there are any vector parameters; there's
13457 nowhere to put the VRsave code so it works; note that functions with
13458 vector parameters are required to have a prototype, so the argument
13459 type info must be available here. (The tail recursion case can work
13460 with vector parameters, but there's no way to distinguish here.) */
4977bab6 13461static bool
a2369ed3 13462rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
13463{
13464 tree type;
4977bab6 13465 if (decl)
5e1bf043
DJ
13466 {
13467 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 13468 {
4977bab6 13469 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
13470 type; type = TREE_CHAIN (type))
13471 {
c15b529f 13472 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 13473 return false;
5e1bf043 13474 }
c4ad648e 13475 }
5e1bf043 13476 if (DEFAULT_ABI == ABI_DARWIN
4977bab6 13477 || (*targetm.binds_local_p) (decl))
2bcc50d0 13478 {
4977bab6 13479 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
13480
13481 if (!lookup_attribute ("longcall", attr_list)
13482 || lookup_attribute ("shortcall", attr_list))
4977bab6 13483 return true;
2bcc50d0 13484 }
5e1bf043 13485 }
4977bab6 13486 return false;
5e1bf043
DJ
13487}
13488
e7e64a25
AS
13489/* NULL if INSN insn is valid within a low-overhead loop.
13490 Otherwise return why doloop cannot be applied.
9419649c
DE
13491 PowerPC uses the COUNT register for branch on table instructions. */
13492
e7e64a25
AS
13493static const char *
13494rs6000_invalid_within_doloop (rtx insn)
9419649c
DE
13495{
13496 if (CALL_P (insn))
e7e64a25 13497 return "Function call in the loop.";
9419649c
DE
13498
13499 if (JUMP_P (insn)
13500 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13501 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 13502 return "Computed branch in the loop.";
9419649c 13503
e7e64a25 13504 return NULL;
9419649c
DE
13505}
13506
71f123ca 13507static int
863d938c 13508rs6000_ra_ever_killed (void)
71f123ca
FS
13509{
13510 rtx top;
5e1bf043
DJ
13511 rtx reg;
13512 rtx insn;
71f123ca 13513
dd292d0a 13514 if (current_function_is_thunk)
71f123ca 13515 return 0;
eb0424da 13516
36f7e964
AH
13517 /* regs_ever_live has LR marked as used if any sibcalls are present,
13518 but this should not force saving and restoring in the
13519 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 13520 clobbers LR, so that is inappropriate. */
36f7e964 13521
5e1bf043
DJ
13522 /* Also, the prologue can generate a store into LR that
13523 doesn't really count, like this:
36f7e964 13524
5e1bf043
DJ
13525 move LR->R0
13526 bcl to set PIC register
13527 move LR->R31
13528 move R0->LR
36f7e964
AH
13529
13530 When we're called from the epilogue, we need to avoid counting
13531 this as a store. */
f676971a 13532
71f123ca
FS
13533 push_topmost_sequence ();
13534 top = get_insns ();
13535 pop_topmost_sequence ();
5e1bf043 13536 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
71f123ca 13537
5e1bf043
DJ
13538 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13539 {
13540 if (INSN_P (insn))
13541 {
022123e6
AM
13542 if (CALL_P (insn))
13543 {
13544 if (!SIBLING_CALL_P (insn))
13545 return 1;
13546 }
13547 else if (find_regno_note (insn, REG_INC, LINK_REGISTER_REGNUM))
5e1bf043 13548 return 1;
36f7e964
AH
13549 else if (set_of (reg, insn) != NULL_RTX
13550 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
13551 return 1;
13552 }
13553 }
13554 return 0;
71f123ca 13555}
4697a36c 13556\f
8cd8f856
GK
13557/* Add a REG_MAYBE_DEAD note to the insn. */
13558static void
a2369ed3 13559rs6000_maybe_dead (rtx insn)
8cd8f856
GK
13560{
13561 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13562 const0_rtx,
13563 REG_NOTES (insn));
13564}
13565
9ebbca7d 13566/* Emit instructions needed to load the TOC register.
c7ca610e 13567 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 13568 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
13569
13570void
a2369ed3 13571rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 13572{
027fbf43 13573 rtx dest, insn;
1db02437 13574 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 13575
7f970b70 13576 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 13577 {
7f970b70
AM
13578 char buf[30];
13579 rtx lab, tmp1, tmp2, got, tempLR;
13580
13581 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13582 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13583 if (flag_pic == 2)
13584 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13585 else
13586 got = rs6000_got_sym ();
13587 tmp1 = tmp2 = dest;
13588 if (!fromprolog)
13589 {
13590 tmp1 = gen_reg_rtx (Pmode);
13591 tmp2 = gen_reg_rtx (Pmode);
13592 }
13593 tempLR = (fromprolog
13594 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13595 : gen_reg_rtx (Pmode));
13596 insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13597 if (fromprolog)
13598 rs6000_maybe_dead (insn);
13599 insn = emit_move_insn (tmp1, tempLR);
13600 if (fromprolog)
13601 rs6000_maybe_dead (insn);
13602 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13603 if (fromprolog)
13604 rs6000_maybe_dead (insn);
13605 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13606 if (fromprolog)
13607 rs6000_maybe_dead (insn);
13608 }
13609 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13610 {
13611 rtx tempLR = (fromprolog
13612 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13613 : gen_reg_rtx (Pmode));
13614
13615 insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
027fbf43
JJ
13616 if (fromprolog)
13617 rs6000_maybe_dead (insn);
7f970b70 13618 insn = emit_move_insn (dest, tempLR);
027fbf43
JJ
13619 if (fromprolog)
13620 rs6000_maybe_dead (insn);
20b71b17
AM
13621 }
13622 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13623 {
13624 char buf[30];
13625 rtx tempLR = (fromprolog
13626 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13627 : gen_reg_rtx (Pmode));
13628 rtx temp0 = (fromprolog
13629 ? gen_rtx_REG (Pmode, 0)
13630 : gen_reg_rtx (Pmode));
20b71b17 13631
20b71b17
AM
13632 if (fromprolog)
13633 {
ccbca5e4 13634 rtx symF, symL;
38c1f2d7 13635
20b71b17
AM
13636 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13637 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 13638
20b71b17
AM
13639 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13640 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13641
13642 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13643 symF)));
13644 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13645 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13646 symL,
13647 symF)));
9ebbca7d
GK
13648 }
13649 else
20b71b17
AM
13650 {
13651 rtx tocsym;
20b71b17
AM
13652
13653 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
ccbca5e4 13654 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
027fbf43
JJ
13655 emit_move_insn (dest, tempLR);
13656 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 13657 }
027fbf43
JJ
13658 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13659 if (fromprolog)
13660 rs6000_maybe_dead (insn);
9ebbca7d 13661 }
20b71b17
AM
13662 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13663 {
13664 /* This is for AIX code running in non-PIC ELF32. */
13665 char buf[30];
13666 rtx realsym;
13667 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13668 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13669
027fbf43
JJ
13670 insn = emit_insn (gen_elf_high (dest, realsym));
13671 if (fromprolog)
13672 rs6000_maybe_dead (insn);
13673 insn = emit_insn (gen_elf_low (dest, dest, realsym));
13674 if (fromprolog)
13675 rs6000_maybe_dead (insn);
20b71b17 13676 }
37409796 13677 else
9ebbca7d 13678 {
37409796 13679 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 13680
9ebbca7d 13681 if (TARGET_32BIT)
027fbf43 13682 insn = emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 13683 else
027fbf43
JJ
13684 insn = emit_insn (gen_load_toc_aix_di (dest));
13685 if (fromprolog)
13686 rs6000_maybe_dead (insn);
9ebbca7d
GK
13687 }
13688}
13689
d1d0c603
JJ
13690/* Emit instructions to restore the link register after determining where
13691 its value has been stored. */
13692
13693void
13694rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13695{
13696 rs6000_stack_t *info = rs6000_stack_info ();
13697 rtx operands[2];
13698
13699 operands[0] = source;
13700 operands[1] = scratch;
13701
13702 if (info->lr_save_p)
13703 {
13704 rtx frame_rtx = stack_pointer_rtx;
13705 HOST_WIDE_INT sp_offset = 0;
13706 rtx tmp;
13707
13708 if (frame_pointer_needed
13709 || current_function_calls_alloca
13710 || info->total_size > 32767)
13711 {
0be76840 13712 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 13713 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
13714 frame_rtx = operands[1];
13715 }
13716 else if (info->push_p)
13717 sp_offset = info->total_size;
13718
13719 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 13720 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
13721 emit_move_insn (tmp, operands[0]);
13722 }
13723 else
13724 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13725}
13726
f103e34d
GK
13727static GTY(()) int set = -1;
13728
f676971a 13729int
863d938c 13730get_TOC_alias_set (void)
9ebbca7d 13731{
f103e34d
GK
13732 if (set == -1)
13733 set = new_alias_set ();
13734 return set;
f676971a 13735}
9ebbca7d 13736
c1207243 13737/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
13738 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13739 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 13740#if TARGET_ELF
3c9eb5f4 13741static int
f676971a 13742uses_TOC (void)
9ebbca7d 13743{
c4501e62 13744 rtx insn;
38c1f2d7 13745
c4501e62
JJ
13746 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13747 if (INSN_P (insn))
13748 {
13749 rtx pat = PATTERN (insn);
13750 int i;
9ebbca7d 13751
f676971a 13752 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
13753 for (i = 0; i < XVECLEN (pat, 0); i++)
13754 {
13755 rtx sub = XVECEXP (pat, 0, i);
13756 if (GET_CODE (sub) == USE)
13757 {
13758 sub = XEXP (sub, 0);
13759 if (GET_CODE (sub) == UNSPEC
13760 && XINT (sub, 1) == UNSPEC_TOC)
13761 return 1;
13762 }
13763 }
13764 }
13765 return 0;
9ebbca7d 13766}
c954844a 13767#endif
38c1f2d7 13768
9ebbca7d 13769rtx
f676971a 13770create_TOC_reference (rtx symbol)
9ebbca7d 13771{
b69542f7
AM
13772 if (no_new_pseudos)
13773 regs_ever_live[TOC_REGISTER] = 1;
f676971a 13774 return gen_rtx_PLUS (Pmode,
a8a05998 13775 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
13776 gen_rtx_CONST (Pmode,
13777 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 13778 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 13779}
38c1f2d7 13780
fc4767bb
JJ
13781/* If _Unwind_* has been called from within the same module,
13782 toc register is not guaranteed to be saved to 40(1) on function
13783 entry. Save it there in that case. */
c7ca610e 13784
9ebbca7d 13785void
863d938c 13786rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
13787{
13788 rtx mem;
13789 rtx stack_top = gen_reg_rtx (Pmode);
13790 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
13791 rtx opcode = gen_reg_rtx (SImode);
13792 rtx tocompare = gen_reg_rtx (SImode);
13793 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 13794
8308679f 13795 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
13796 emit_move_insn (stack_top, mem);
13797
8308679f
DE
13798 mem = gen_frame_mem (Pmode,
13799 gen_rtx_PLUS (Pmode, stack_top,
13800 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 13801 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
13802 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13803 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 13804 : 0xE8410028, SImode));
9ebbca7d 13805
fc4767bb 13806 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 13807 SImode, NULL_RTX, NULL_RTX,
fc4767bb 13808 no_toc_save_needed);
9ebbca7d 13809
8308679f
DE
13810 mem = gen_frame_mem (Pmode,
13811 gen_rtx_PLUS (Pmode, stack_top,
13812 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
13813 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13814 emit_label (no_toc_save_needed);
9ebbca7d 13815}
38c1f2d7 13816\f
0be76840
DE
13817/* This ties together stack memory (MEM with an alias set of frame_alias_set)
13818 and the change to the stack pointer. */
ba4828e0 13819
9ebbca7d 13820static void
863d938c 13821rs6000_emit_stack_tie (void)
9ebbca7d 13822{
0be76840
DE
13823 rtx mem = gen_frame_mem (BLKmode,
13824 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 13825
9ebbca7d
GK
13826 emit_insn (gen_stack_tie (mem));
13827}
38c1f2d7 13828
9ebbca7d
GK
13829/* Emit the correct code for allocating stack space, as insns.
13830 If COPY_R12, make sure a copy of the old frame is left in r12.
13831 The generated code may use hard register 0 as a temporary. */
13832
13833static void
a2369ed3 13834rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 13835{
9ebbca7d
GK
13836 rtx insn;
13837 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13838 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
13839 rtx todec = gen_int_mode (-size, Pmode);
13840
13841 if (INTVAL (todec) != -size)
13842 {
d4ee4d25 13843 warning (0, "stack frame too large");
61168ff1
RS
13844 emit_insn (gen_trap ());
13845 return;
13846 }
a157febd
GK
13847
13848 if (current_function_limit_stack)
13849 {
13850 if (REG_P (stack_limit_rtx)
f676971a 13851 && REGNO (stack_limit_rtx) > 1
a157febd
GK
13852 && REGNO (stack_limit_rtx) <= 31)
13853 {
5b71a4e7 13854 emit_insn (TARGET_32BIT
9ebbca7d
GK
13855 ? gen_addsi3 (tmp_reg,
13856 stack_limit_rtx,
13857 GEN_INT (size))
13858 : gen_adddi3 (tmp_reg,
13859 stack_limit_rtx,
13860 GEN_INT (size)));
5b71a4e7 13861
9ebbca7d
GK
13862 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13863 const0_rtx));
a157febd
GK
13864 }
13865 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 13866 && TARGET_32BIT
f607bc57 13867 && DEFAULT_ABI == ABI_V4)
a157febd 13868 {
9ebbca7d 13869 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
13870 gen_rtx_PLUS (Pmode,
13871 stack_limit_rtx,
9ebbca7d 13872 GEN_INT (size)));
5b71a4e7 13873
9ebbca7d
GK
13874 emit_insn (gen_elf_high (tmp_reg, toload));
13875 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13876 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13877 const0_rtx));
a157febd
GK
13878 }
13879 else
d4ee4d25 13880 warning (0, "stack limit expression is not supported");
a157febd
GK
13881 }
13882
9ebbca7d
GK
13883 if (copy_r12 || ! TARGET_UPDATE)
13884 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13885
38c1f2d7
MM
13886 if (TARGET_UPDATE)
13887 {
9ebbca7d 13888 if (size > 32767)
38c1f2d7 13889 {
9ebbca7d 13890 /* Need a note here so that try_split doesn't get confused. */
9390387d 13891 if (get_last_insn () == NULL_RTX)
2e040219 13892 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
13893 insn = emit_move_insn (tmp_reg, todec);
13894 try_split (PATTERN (insn), insn, 0);
13895 todec = tmp_reg;
38c1f2d7 13896 }
5b71a4e7
DE
13897
13898 insn = emit_insn (TARGET_32BIT
13899 ? gen_movsi_update (stack_reg, stack_reg,
13900 todec, stack_reg)
c4ad648e 13901 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 13902 todec, stack_reg));
38c1f2d7
MM
13903 }
13904 else
13905 {
5b71a4e7
DE
13906 insn = emit_insn (TARGET_32BIT
13907 ? gen_addsi3 (stack_reg, stack_reg, todec)
13908 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
13909 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13910 gen_rtx_REG (Pmode, 12));
13911 }
f676971a 13912
9ebbca7d 13913 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 13914 REG_NOTES (insn) =
9ebbca7d 13915 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 13916 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
13917 gen_rtx_PLUS (Pmode, stack_reg,
13918 GEN_INT (-size))),
13919 REG_NOTES (insn));
13920}
13921
a4f6c312
SS
13922/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13923 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13924 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13925 deduce these equivalences by itself so it wasn't necessary to hold
13926 its hand so much. */
9ebbca7d
GK
13927
13928static void
f676971a 13929rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 13930 rtx reg2, rtx rreg)
9ebbca7d
GK
13931{
13932 rtx real, temp;
13933
e56c4463
JL
13934 /* copy_rtx will not make unique copies of registers, so we need to
13935 ensure we don't have unwanted sharing here. */
13936 if (reg == reg2)
13937 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13938
13939 if (reg == rreg)
13940 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13941
9ebbca7d
GK
13942 real = copy_rtx (PATTERN (insn));
13943
89e7058f
AH
13944 if (reg2 != NULL_RTX)
13945 real = replace_rtx (real, reg2, rreg);
f676971a
EC
13946
13947 real = replace_rtx (real, reg,
9ebbca7d
GK
13948 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
13949 STACK_POINTER_REGNUM),
13950 GEN_INT (val)));
f676971a 13951
9ebbca7d
GK
13952 /* We expect that 'real' is either a SET or a PARALLEL containing
13953 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13954 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13955
13956 if (GET_CODE (real) == SET)
13957 {
13958 rtx set = real;
f676971a 13959
9ebbca7d
GK
13960 temp = simplify_rtx (SET_SRC (set));
13961 if (temp)
13962 SET_SRC (set) = temp;
13963 temp = simplify_rtx (SET_DEST (set));
13964 if (temp)
13965 SET_DEST (set) = temp;
13966 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 13967 {
9ebbca7d
GK
13968 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13969 if (temp)
13970 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 13971 }
38c1f2d7 13972 }
37409796 13973 else
9ebbca7d
GK
13974 {
13975 int i;
37409796
NS
13976
13977 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
13978 for (i = 0; i < XVECLEN (real, 0); i++)
13979 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13980 {
13981 rtx set = XVECEXP (real, 0, i);
f676971a 13982
9ebbca7d
GK
13983 temp = simplify_rtx (SET_SRC (set));
13984 if (temp)
13985 SET_SRC (set) = temp;
13986 temp = simplify_rtx (SET_DEST (set));
13987 if (temp)
13988 SET_DEST (set) = temp;
13989 if (GET_CODE (SET_DEST (set)) == MEM)
13990 {
13991 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13992 if (temp)
13993 XEXP (SET_DEST (set), 0) = temp;
13994 }
13995 RTX_FRAME_RELATED_P (set) = 1;
13996 }
13997 }
c19de7aa
AH
13998
13999 if (TARGET_SPE)
14000 real = spe_synthesize_frame_save (real);
14001
9ebbca7d
GK
14002 RTX_FRAME_RELATED_P (insn) = 1;
14003 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14004 real,
14005 REG_NOTES (insn));
38c1f2d7
MM
14006}
14007
c19de7aa
AH
14008/* Given an SPE frame note, return a PARALLEL of SETs with the
14009 original note, plus a synthetic register save. */
14010
14011static rtx
a2369ed3 14012spe_synthesize_frame_save (rtx real)
c19de7aa
AH
14013{
14014 rtx synth, offset, reg, real2;
14015
14016 if (GET_CODE (real) != SET
14017 || GET_MODE (SET_SRC (real)) != V2SImode)
14018 return real;
14019
14020 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14021 frame related note. The parallel contains a set of the register
41f3a930 14022 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
14023 This is so we can differentiate between 64-bit and 32-bit saves.
14024 Words cannot describe this nastiness. */
14025
37409796
NS
14026 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14027 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14028 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
14029
14030 /* Transform:
14031 (set (mem (plus (reg x) (const y)))
14032 (reg z))
14033 into:
14034 (set (mem (plus (reg x) (const y+4)))
41f3a930 14035 (reg z+1200))
c19de7aa
AH
14036 */
14037
14038 real2 = copy_rtx (real);
14039 PUT_MODE (SET_DEST (real2), SImode);
14040 reg = SET_SRC (real2);
14041 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14042 synth = copy_rtx (real2);
14043
14044 if (BYTES_BIG_ENDIAN)
14045 {
14046 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14047 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14048 }
14049
14050 reg = SET_SRC (synth);
41f3a930 14051
c19de7aa 14052 synth = replace_rtx (synth, reg,
41f3a930 14053 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
14054
14055 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14056 synth = replace_rtx (synth, offset,
14057 GEN_INT (INTVAL (offset)
14058 + (BYTES_BIG_ENDIAN ? 0 : 4)));
14059
14060 RTX_FRAME_RELATED_P (synth) = 1;
14061 RTX_FRAME_RELATED_P (real2) = 1;
14062 if (BYTES_BIG_ENDIAN)
14063 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14064 else
14065 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14066
14067 return real;
14068}
14069
00b960c7
AH
14070/* Returns an insn that has a vrsave set operation with the
14071 appropriate CLOBBERs. */
14072
14073static rtx
a2369ed3 14074generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
14075{
14076 int nclobs, i;
14077 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 14078 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 14079
a004eb82
AH
14080 clobs[0]
14081 = gen_rtx_SET (VOIDmode,
14082 vrsave,
14083 gen_rtx_UNSPEC_VOLATILE (SImode,
14084 gen_rtvec (2, reg, vrsave),
3aca4bff 14085 UNSPECV_SET_VRSAVE));
00b960c7
AH
14086
14087 nclobs = 1;
14088
9aa86737
AH
14089 /* We need to clobber the registers in the mask so the scheduler
14090 does not move sets to VRSAVE before sets of AltiVec registers.
14091
14092 However, if the function receives nonlocal gotos, reload will set
14093 all call saved registers live. We will end up with:
14094
14095 (set (reg 999) (mem))
14096 (parallel [ (set (reg vrsave) (unspec blah))
14097 (clobber (reg 999))])
14098
14099 The clobber will cause the store into reg 999 to be dead, and
14100 flow will attempt to delete an epilogue insn. In this case, we
14101 need an unspec use/set of the register. */
00b960c7
AH
14102
14103 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 14104 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
14105 {
14106 if (!epiloguep || call_used_regs [i])
14107 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
14108 gen_rtx_REG (V4SImode, i));
14109 else
14110 {
14111 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
14112
14113 clobs[nclobs++]
a004eb82
AH
14114 = gen_rtx_SET (VOIDmode,
14115 reg,
14116 gen_rtx_UNSPEC (V4SImode,
14117 gen_rtvec (1, reg), 27));
9aa86737
AH
14118 }
14119 }
00b960c7
AH
14120
14121 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
14122
14123 for (i = 0; i < nclobs; ++i)
14124 XVECEXP (insn, 0, i) = clobs[i];
14125
14126 return insn;
14127}
14128
89e7058f
AH
14129/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14130 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
14131
14132static void
f676971a 14133emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 14134 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
14135{
14136 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
14137 rtx replacea, replaceb;
14138
14139 int_rtx = GEN_INT (offset);
14140
14141 /* Some cases that need register indexed addressing. */
14142 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 14143 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
14144 || (TARGET_SPE_ABI
14145 && SPE_VECTOR_MODE (mode)
14146 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
14147 {
14148 /* Whomever calls us must make sure r11 is available in the
c4ad648e 14149 flow path of instructions in the prologue. */
89e7058f
AH
14150 offset_rtx = gen_rtx_REG (Pmode, 11);
14151 emit_move_insn (offset_rtx, int_rtx);
14152
14153 replacea = offset_rtx;
14154 replaceb = int_rtx;
14155 }
14156 else
14157 {
14158 offset_rtx = int_rtx;
14159 replacea = NULL_RTX;
14160 replaceb = NULL_RTX;
14161 }
14162
14163 reg = gen_rtx_REG (mode, regno);
14164 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 14165 mem = gen_frame_mem (mode, addr);
89e7058f
AH
14166
14167 insn = emit_move_insn (mem, reg);
14168
14169 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
14170}
14171
a3170dc6
AH
14172/* Emit an offset memory reference suitable for a frame store, while
14173 converting to a valid addressing mode. */
14174
14175static rtx
a2369ed3 14176gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
14177{
14178 rtx int_rtx, offset_rtx;
14179
14180 int_rtx = GEN_INT (offset);
14181
4d4cbc0e
AH
14182 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
14183 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
14184 {
14185 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14186 emit_move_insn (offset_rtx, int_rtx);
14187 }
14188 else
14189 offset_rtx = int_rtx;
14190
0be76840 14191 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
14192}
14193
6d0a8091
DJ
14194/* Look for user-defined global regs. We should not save and restore these,
14195 and cannot use stmw/lmw if there are any in its range. */
14196
14197static bool
14198no_global_regs_above (int first_greg)
14199{
14200 int i;
14201 for (i = 0; i < 32 - first_greg; i++)
14202 if (global_regs[first_greg + i])
14203 return false;
14204 return true;
14205}
14206
699c914a
MS
14207#ifndef TARGET_FIX_AND_CONTINUE
14208#define TARGET_FIX_AND_CONTINUE 0
14209#endif
14210
9ebbca7d
GK
14211/* Emit function prologue as insns. */
14212
9878760c 14213void
863d938c 14214rs6000_emit_prologue (void)
9878760c 14215{
4697a36c 14216 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 14217 enum machine_mode reg_mode = Pmode;
327e5343 14218 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
14219 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14220 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
14221 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 14222 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
14223 rtx insn;
14224 int saving_FPRs_inline;
14225 int using_store_multiple;
14226 HOST_WIDE_INT sp_offset = 0;
f676971a 14227
699c914a
MS
14228 if (TARGET_FIX_AND_CONTINUE)
14229 {
14230 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 14231 address by modifying the first 5 instructions of the function
699c914a
MS
14232 to branch to the overriding function. This is necessary to
14233 permit function pointers that point to the old function to
14234 actually forward to the new function. */
14235 emit_insn (gen_nop ());
14236 emit_insn (gen_nop ());
de2ab0ca 14237 emit_insn (gen_nop ());
699c914a
MS
14238 emit_insn (gen_nop ());
14239 emit_insn (gen_nop ());
14240 }
14241
14242 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14243 {
14244 reg_mode = V2SImode;
14245 reg_size = 8;
14246 }
a3170dc6 14247
9ebbca7d 14248 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
14249 && (!TARGET_SPE_ABI
14250 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
14251 && info->first_gp_reg_save < 31
14252 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 14253 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 14254 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 14255 || current_function_calls_eh_return
8c29550d 14256 || cfun->machine->ra_need_lr);
9ebbca7d
GK
14257
14258 /* For V.4, update stack before we do any saving and set back pointer. */
fc4767bb 14259 if (info->push_p
acd0b319
AM
14260 && (DEFAULT_ABI == ABI_V4
14261 || current_function_calls_eh_return))
9ebbca7d
GK
14262 {
14263 if (info->total_size < 32767)
14264 sp_offset = info->total_size;
14265 else
14266 frame_reg_rtx = frame_ptr_rtx;
f676971a 14267 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
14268 (frame_reg_rtx != sp_reg_rtx
14269 && (info->cr_save_p
14270 || info->lr_save_p
14271 || info->first_fp_reg_save < 64
14272 || info->first_gp_reg_save < 32
14273 )));
14274 if (frame_reg_rtx != sp_reg_rtx)
14275 rs6000_emit_stack_tie ();
14276 }
14277
d62294f5 14278 /* Handle world saves specially here. */
f57fe068 14279 if (WORLD_SAVE_P (info))
d62294f5
FJ
14280 {
14281 int i, j, sz;
14282 rtx treg;
14283 rtvec p;
14284
14285 /* save_world expects lr in r0. */
14286 if (info->lr_save_p)
c4ad648e
AM
14287 {
14288 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14289 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14290 RTX_FRAME_RELATED_P (insn) = 1;
14291 }
d62294f5
FJ
14292
14293 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 14294 assumptions about the offsets of various bits of the stack
992d08b1 14295 frame. */
37409796
NS
14296 gcc_assert (info->gp_save_offset == -220
14297 && info->fp_save_offset == -144
14298 && info->lr_save_offset == 8
14299 && info->cr_save_offset == 4
14300 && info->push_p
14301 && info->lr_save_p
14302 && (!current_function_calls_eh_return
14303 || info->ehrd_offset == -432)
14304 && info->vrsave_save_offset == -224
14305 && info->altivec_save_offset == (-224 -16 -192));
d62294f5
FJ
14306
14307 treg = gen_rtx_REG (SImode, 11);
14308 emit_move_insn (treg, GEN_INT (-info->total_size));
14309
14310 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 14311 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
14312
14313 /* Preserve CR2 for save_world prologues */
14314 sz = 6;
14315 sz += 32 - info->first_gp_reg_save;
14316 sz += 64 - info->first_fp_reg_save;
14317 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14318 p = rtvec_alloc (sz);
14319 j = 0;
14320 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
c4ad648e
AM
14321 gen_rtx_REG (Pmode,
14322 LINK_REGISTER_REGNUM));
d62294f5 14323 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14324 gen_rtx_SYMBOL_REF (Pmode,
14325 "*save_world"));
d62294f5 14326 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
14327 properly. */
14328 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14329 {
14330 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14331 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14332 GEN_INT (info->fp_save_offset
14333 + sp_offset + 8 * i));
0be76840 14334 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
14335
14336 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14337 }
d62294f5 14338 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14339 {
14340 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14341 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14342 GEN_INT (info->altivec_save_offset
14343 + sp_offset + 16 * i));
0be76840 14344 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
14345
14346 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14347 }
d62294f5 14348 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14349 {
14350 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14351 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14352 GEN_INT (info->gp_save_offset
14353 + sp_offset + reg_size * i));
0be76840 14354 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14355
14356 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14357 }
14358
14359 {
14360 /* CR register traditionally saved as CR2. */
14361 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14362 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14363 GEN_INT (info->cr_save_offset
14364 + sp_offset));
0be76840 14365 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14366
14367 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14368 }
d62294f5
FJ
14369 /* Prevent any attempt to delete the setting of r0 and treg! */
14370 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
14371 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
14372 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
14373
14374 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14375 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
c4ad648e 14376 NULL_RTX, NULL_RTX);
d62294f5
FJ
14377
14378 if (current_function_calls_eh_return)
c4ad648e
AM
14379 {
14380 unsigned int i;
14381 for (i = 0; ; ++i)
14382 {
14383 unsigned int regno = EH_RETURN_DATA_REGNO (i);
14384 if (regno == INVALID_REGNUM)
14385 break;
14386 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14387 info->ehrd_offset + sp_offset
14388 + reg_size * (int) i,
14389 info->total_size);
14390 }
14391 }
d62294f5
FJ
14392 }
14393
9aa86737 14394 /* Save AltiVec registers if needed. */
f57fe068 14395 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9aa86737
AH
14396 {
14397 int i;
14398
14399 /* There should be a non inline version of this, for when we
14400 are saving lots of vector registers. */
14401 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14402 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14403 {
14404 rtx areg, savereg, mem;
14405 int offset;
14406
14407 offset = info->altivec_save_offset + sp_offset
14408 + 16 * (i - info->first_altivec_reg_save);
14409
14410 savereg = gen_rtx_REG (V4SImode, i);
14411
14412 areg = gen_rtx_REG (Pmode, 0);
14413 emit_move_insn (areg, GEN_INT (offset));
14414
14415 /* AltiVec addressing mode is [reg+reg]. */
0be76840
DE
14416 mem = gen_frame_mem (V4SImode,
14417 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
9aa86737
AH
14418
14419 insn = emit_move_insn (mem, savereg);
14420
5c242421
SB
14421 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14422 areg, GEN_INT (offset));
9aa86737
AH
14423 }
14424 }
14425
14426 /* VRSAVE is a bit vector representing which AltiVec registers
14427 are used. The OS uses this to determine which vector
14428 registers to save on a context switch. We need to save
14429 VRSAVE on the stack frame, add whatever AltiVec registers we
14430 used in this function, and do the corresponding magic in the
14431 epilogue. */
14432
4d774ff8 14433 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
bcb604b6 14434 && info->vrsave_mask != 0)
9aa86737 14435 {
a004eb82 14436 rtx reg, mem, vrsave;
9aa86737
AH
14437 int offset;
14438
eab97e44
AM
14439 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
14440 as frame_reg_rtx and r11 as the static chain pointer for
14441 nested functions. */
14442 reg = gen_rtx_REG (SImode, 0);
a004eb82 14443 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
b188f760
AH
14444 if (TARGET_MACHO)
14445 emit_insn (gen_get_vrsave_internal (reg));
14446 else
14447 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
9aa86737 14448
bcb604b6
FJ
14449 if (!WORLD_SAVE_P (info))
14450 {
14451 /* Save VRSAVE. */
14452 offset = info->vrsave_save_offset + sp_offset;
0be76840
DE
14453 mem = gen_frame_mem (SImode,
14454 gen_rtx_PLUS (Pmode, frame_reg_rtx,
14455 GEN_INT (offset)));
bcb604b6
FJ
14456 insn = emit_move_insn (mem, reg);
14457 }
9aa86737
AH
14458
14459 /* Include the registers in the mask. */
14460 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14461
14462 insn = emit_insn (generate_set_vrsave (reg, info, 0));
14463 }
14464
9ebbca7d 14465 /* If we use the link register, get it into r0. */
f57fe068 14466 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8
GK
14467 {
14468 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14469 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14470 RTX_FRAME_RELATED_P (insn) = 1;
14471 }
9ebbca7d
GK
14472
14473 /* If we need to save CR, put it into r12. */
f57fe068 14474 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 14475 {
f8a57be8 14476 rtx set;
f676971a 14477
9ebbca7d 14478 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
14479 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14480 RTX_FRAME_RELATED_P (insn) = 1;
14481 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14482 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14483 But that's OK. All we have to do is specify that _one_ condition
14484 code register is saved in this stack slot. The thrower's epilogue
14485 will then restore all the call-saved registers.
14486 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14487 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14488 gen_rtx_REG (SImode, CR2_REGNO));
14489 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14490 set,
14491 REG_NOTES (insn));
9ebbca7d
GK
14492 }
14493
a4f6c312
SS
14494 /* Do any required saving of fpr's. If only one or two to save, do
14495 it ourselves. Otherwise, call function. */
f57fe068 14496 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
14497 {
14498 int i;
14499 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 14500 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d 14501 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
14502 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14503 info->first_fp_reg_save + i,
14504 info->fp_save_offset + sp_offset + 8 * i,
14505 info->total_size);
9ebbca7d 14506 }
f57fe068 14507 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
14508 {
14509 int i;
14510 char rname[30];
520a57c8 14511 const char *alloc_rname;
9ebbca7d
GK
14512 rtvec p;
14513 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
14514
14515 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14516 gen_rtx_REG (Pmode,
9ebbca7d
GK
14517 LINK_REGISTER_REGNUM));
14518 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14519 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 14520 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
14521 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14522 gen_rtx_SYMBOL_REF (Pmode,
14523 alloc_rname));
14524 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14525 {
14526 rtx addr, reg, mem;
14527 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14528 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 14529 GEN_INT (info->fp_save_offset
9ebbca7d 14530 + sp_offset + 8*i));
0be76840 14531 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
14532
14533 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14534 }
14535 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14536 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
14537 NULL_RTX, NULL_RTX);
14538 }
b6c9286a 14539
9ebbca7d
GK
14540 /* Save GPRs. This is done as a PARALLEL if we are using
14541 the store-multiple instructions. */
f57fe068 14542 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 14543 {
308c142a 14544 rtvec p;
9ebbca7d
GK
14545 int i;
14546 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
14547 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14548 {
14549 rtx addr, reg, mem;
14550 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
14551 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14552 GEN_INT (info->gp_save_offset
14553 + sp_offset
9ebbca7d 14554 + reg_size * i));
0be76840 14555 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
14556
14557 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14558 }
14559 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14560 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 14561 NULL_RTX, NULL_RTX);
b6c9286a 14562 }
f57fe068 14563 else if (!WORLD_SAVE_P (info))
b6c9286a 14564 {
9ebbca7d
GK
14565 int i;
14566 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
14567 if ((regs_ever_live[info->first_gp_reg_save + i]
14568 && (!call_used_regs[info->first_gp_reg_save + i]
14569 || (i + info->first_gp_reg_save
b4db40bf
JJ
14570 == RS6000_PIC_OFFSET_TABLE_REGNUM
14571 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 14572 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14573 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 14574 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
a3170dc6
AH
14575 {
14576 rtx addr, reg, mem;
14577 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14578
c19de7aa 14579 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14580 {
14581 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14582 rtx b;
14583
14584 if (!SPE_CONST_OFFSET_OK (offset))
14585 {
14586 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14587 emit_move_insn (b, GEN_INT (offset));
14588 }
14589 else
14590 b = GEN_INT (offset);
14591
14592 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
0be76840 14593 mem = gen_frame_mem (V2SImode, addr);
a3170dc6
AH
14594 insn = emit_move_insn (mem, reg);
14595
14596 if (GET_CODE (b) == CONST_INT)
14597 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14598 NULL_RTX, NULL_RTX);
14599 else
14600 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14601 b, GEN_INT (offset));
14602 }
14603 else
14604 {
f676971a
EC
14605 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14606 GEN_INT (info->gp_save_offset
14607 + sp_offset
a3170dc6 14608 + reg_size * i));
0be76840 14609 mem = gen_frame_mem (reg_mode, addr);
a3170dc6
AH
14610
14611 insn = emit_move_insn (mem, reg);
f676971a 14612 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
a3170dc6
AH
14613 NULL_RTX, NULL_RTX);
14614 }
14615 }
9ebbca7d
GK
14616 }
14617
83720594
RH
14618 /* ??? There's no need to emit actual instructions here, but it's the
14619 easiest way to get the frame unwind information emitted. */
f57fe068 14620 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
83720594 14621 {
78e1b90d
DE
14622 unsigned int i, regno;
14623
fc4767bb
JJ
14624 /* In AIX ABI we need to pretend we save r2 here. */
14625 if (TARGET_AIX)
14626 {
14627 rtx addr, reg, mem;
14628
14629 reg = gen_rtx_REG (reg_mode, 2);
14630 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14631 GEN_INT (sp_offset + 5 * reg_size));
0be76840 14632 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
14633
14634 insn = emit_move_insn (mem, reg);
f676971a 14635 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
14636 NULL_RTX, NULL_RTX);
14637 PATTERN (insn) = gen_blockage ();
14638 }
14639
83720594
RH
14640 for (i = 0; ; ++i)
14641 {
83720594
RH
14642 regno = EH_RETURN_DATA_REGNO (i);
14643 if (regno == INVALID_REGNUM)
14644 break;
14645
89e7058f
AH
14646 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14647 info->ehrd_offset + sp_offset
14648 + reg_size * (int) i,
14649 info->total_size);
83720594
RH
14650 }
14651 }
14652
9ebbca7d 14653 /* Save lr if we used it. */
f57fe068 14654 if (!WORLD_SAVE_P (info) && info->lr_save_p)
9ebbca7d
GK
14655 {
14656 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14657 GEN_INT (info->lr_save_offset + sp_offset));
14658 rtx reg = gen_rtx_REG (Pmode, 0);
14659 rtx mem = gen_rtx_MEM (Pmode, addr);
0be76840 14660 /* This should not be of frame_alias_set, because of
9ebbca7d 14661 __builtin_return_address. */
f676971a 14662
9ebbca7d 14663 insn = emit_move_insn (mem, reg);
f676971a 14664 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 14665 NULL_RTX, NULL_RTX);
9ebbca7d
GK
14666 }
14667
14668 /* Save CR if we use any that must be preserved. */
f57fe068 14669 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
14670 {
14671 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14672 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 14673 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
14674 /* See the large comment above about why CR2_REGNO is used. */
14675 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 14676
9ebbca7d
GK
14677 /* If r12 was used to hold the original sp, copy cr into r0 now
14678 that it's free. */
14679 if (REGNO (frame_reg_rtx) == 12)
14680 {
f8a57be8
GK
14681 rtx set;
14682
9ebbca7d 14683 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
14684 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14685 RTX_FRAME_RELATED_P (insn) = 1;
14686 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14687 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14688 set,
14689 REG_NOTES (insn));
f676971a 14690
9ebbca7d
GK
14691 }
14692 insn = emit_move_insn (mem, cr_save_rtx);
14693
f676971a 14694 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 14695 NULL_RTX, NULL_RTX);
9ebbca7d
GK
14696 }
14697
f676971a 14698 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 14699 for which it was done previously. */
f57fe068 14700 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 14701 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
9ebbca7d
GK
14702 rs6000_emit_allocate_stack (info->total_size, FALSE);
14703
14704 /* Set frame pointer, if needed. */
14705 if (frame_pointer_needed)
14706 {
7d5175e1 14707 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
14708 sp_reg_rtx);
14709 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 14710 }
9878760c 14711
1db02437 14712 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 14713 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
14714 || (DEFAULT_ABI == ABI_V4
14715 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
1db02437 14716 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
c4ad648e
AM
14717 {
14718 /* If emit_load_toc_table will use the link register, we need to save
14719 it. We use R12 for this purpose because emit_load_toc_table
14720 can use register 0. This allows us to use a plain 'blr' to return
14721 from the procedure more often. */
14722 int save_LR_around_toc_setup = (TARGET_ELF
14723 && DEFAULT_ABI != ABI_AIX
14724 && flag_pic
14725 && ! info->lr_save_p
14726 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14727 if (save_LR_around_toc_setup)
14728 {
14729 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
f8a57be8 14730
c4ad648e
AM
14731 insn = emit_move_insn (frame_ptr_rtx, lr);
14732 rs6000_maybe_dead (insn);
14733 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 14734
c4ad648e 14735 rs6000_emit_load_toc_table (TRUE);
f8a57be8 14736
c4ad648e
AM
14737 insn = emit_move_insn (lr, frame_ptr_rtx);
14738 rs6000_maybe_dead (insn);
14739 RTX_FRAME_RELATED_P (insn) = 1;
14740 }
14741 else
14742 rs6000_emit_load_toc_table (TRUE);
14743 }
ee890fe2 14744
fcce224d 14745#if TARGET_MACHO
ee890fe2
SS
14746 if (DEFAULT_ABI == ABI_DARWIN
14747 && flag_pic && current_function_uses_pic_offset_table)
14748 {
f8a57be8 14749 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11abc112 14750 rtx src = machopic_function_base_sym ();
ee890fe2 14751
6d0a8091
DJ
14752 /* Save and restore LR locally around this call (in R0). */
14753 if (!info->lr_save_p)
14754 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14755
f8a57be8 14756 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
ee890fe2 14757
f676971a 14758 insn = emit_move_insn (gen_rtx_REG (Pmode,
f8a57be8
GK
14759 RS6000_PIC_OFFSET_TABLE_REGNUM),
14760 lr);
14761 rs6000_maybe_dead (insn);
6d0a8091
DJ
14762
14763 if (!info->lr_save_p)
14764 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
ee890fe2 14765 }
fcce224d 14766#endif
9ebbca7d
GK
14767}
14768
9ebbca7d 14769/* Write function prologue. */
a4f6c312 14770
08c148a8 14771static void
f676971a 14772rs6000_output_function_prologue (FILE *file,
a2369ed3 14773 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
14774{
14775 rs6000_stack_t *info = rs6000_stack_info ();
14776
4697a36c
MM
14777 if (TARGET_DEBUG_STACK)
14778 debug_stack_info (info);
9878760c 14779
a4f6c312
SS
14780 /* Write .extern for any function we will call to save and restore
14781 fp values. */
14782 if (info->first_fp_reg_save < 64
14783 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 14784 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 14785 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
14786 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14787 RESTORE_FP_SUFFIX);
9878760c 14788
c764f757
RK
14789 /* Write .extern for AIX common mode routines, if needed. */
14790 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14791 {
f6709c70
JW
14792 fputs ("\t.extern __mulh\n", file);
14793 fputs ("\t.extern __mull\n", file);
14794 fputs ("\t.extern __divss\n", file);
14795 fputs ("\t.extern __divus\n", file);
14796 fputs ("\t.extern __quoss\n", file);
14797 fputs ("\t.extern __quous\n", file);
c764f757
RK
14798 common_mode_defined = 1;
14799 }
9878760c 14800
9ebbca7d 14801 if (! HAVE_prologue)
979721f8 14802 {
9ebbca7d 14803 start_sequence ();
9dda4cc8 14804
a4f6c312
SS
14805 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14806 the "toplevel" insn chain. */
2e040219 14807 emit_note (NOTE_INSN_DELETED);
9ebbca7d 14808 rs6000_emit_prologue ();
2e040219 14809 emit_note (NOTE_INSN_DELETED);
178c3eff 14810
a3c9585f 14811 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
14812 {
14813 rtx insn;
14814 unsigned addr = 0;
14815 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14816 {
14817 INSN_ADDRESSES_NEW (insn, addr);
14818 addr += 4;
14819 }
14820 }
9dda4cc8 14821
9ebbca7d 14822 if (TARGET_DEBUG_STACK)
a4f6c312 14823 debug_rtx_list (get_insns (), 100);
c9d691e9 14824 final (get_insns (), file, FALSE);
9ebbca7d 14825 end_sequence ();
979721f8
MM
14826 }
14827
9ebbca7d
GK
14828 rs6000_pic_labelno++;
14829}
f676971a 14830
9ebbca7d 14831/* Emit function epilogue as insns.
9878760c 14832
9ebbca7d
GK
14833 At present, dwarf2out_frame_debug_expr doesn't understand
14834 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14835 anywhere in the epilogue. Most of the insns below would in any case
14836 need special notes to explain where r11 is in relation to the stack. */
9878760c 14837
9ebbca7d 14838void
a2369ed3 14839rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
14840{
14841 rs6000_stack_t *info;
14842 int restoring_FPRs_inline;
14843 int using_load_multiple;
14844 int using_mfcr_multiple;
14845 int use_backchain_to_restore_sp;
14846 int sp_offset = 0;
14847 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14848 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 14849 enum machine_mode reg_mode = Pmode;
327e5343 14850 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
14851 int i;
14852
c19de7aa
AH
14853 info = rs6000_stack_info ();
14854
14855 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14856 {
14857 reg_mode = V2SImode;
14858 reg_size = 8;
14859 }
14860
9ebbca7d 14861 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
14862 && (!TARGET_SPE_ABI
14863 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
14864 && info->first_gp_reg_save < 31
14865 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 14866 restoring_FPRs_inline = (sibcall
83720594 14867 || current_function_calls_eh_return
9ebbca7d
GK
14868 || info->first_fp_reg_save == 64
14869 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 14870 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
14871 || current_function_calls_alloca
14872 || info->total_size > 32767);
14873 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14874 || rs6000_cpu == PROCESSOR_PPC603
14875 || rs6000_cpu == PROCESSOR_PPC750
14876 || optimize_size);
14877
f57fe068 14878 if (WORLD_SAVE_P (info))
d62294f5
FJ
14879 {
14880 int i, j;
14881 char rname[30];
14882 const char *alloc_rname;
14883 rtvec p;
14884
14885 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
14886 stack slot (which is not likely to be our caller.)
14887 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14888 rest_world is similar, except any R10 parameter is ignored.
14889 The exception-handling stuff that was here in 2.95 is no
14890 longer necessary. */
d62294f5
FJ
14891
14892 p = rtvec_alloc (9
14893 + 1
f676971a 14894 + 32 - info->first_gp_reg_save
c4ad648e
AM
14895 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14896 + 63 + 1 - info->first_fp_reg_save);
d62294f5 14897
c4ad648e
AM
14898 strcpy (rname, ((current_function_calls_eh_return) ?
14899 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
14900 alloc_rname = ggc_strdup (rname);
14901
14902 j = 0;
14903 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14904 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14905 gen_rtx_REG (Pmode,
14906 LINK_REGISTER_REGNUM));
d62294f5 14907 RTVEC_ELT (p, j++)
c4ad648e 14908 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 14909 /* The instruction pattern requires a clobber here;
c4ad648e 14910 it is shared with the restVEC helper. */
d62294f5 14911 RTVEC_ELT (p, j++)
c4ad648e 14912 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
14913
14914 {
c4ad648e
AM
14915 /* CR register traditionally saved as CR2. */
14916 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14917 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14918 GEN_INT (info->cr_save_offset));
0be76840 14919 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14920
14921 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
14922 }
14923
14924 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14925 {
14926 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14927 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14928 GEN_INT (info->gp_save_offset
14929 + reg_size * i));
0be76840 14930 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14931
14932 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14933 }
d62294f5 14934 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14935 {
14936 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14937 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14938 GEN_INT (info->altivec_save_offset
14939 + 16 * i));
0be76840 14940 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
14941
14942 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14943 }
d62294f5 14944 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
14945 {
14946 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14947 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14948 GEN_INT (info->fp_save_offset
14949 + 8 * i));
0be76840 14950 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
14951
14952 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14953 }
d62294f5 14954 RTVEC_ELT (p, j++)
c4ad648e 14955 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 14956 RTVEC_ELT (p, j++)
c4ad648e 14957 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 14958 RTVEC_ELT (p, j++)
c4ad648e 14959 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 14960 RTVEC_ELT (p, j++)
c4ad648e 14961 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 14962 RTVEC_ELT (p, j++)
c4ad648e 14963 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
14964 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14965
14966 return;
14967 }
14968
9ebbca7d
GK
14969 /* If we have a frame pointer, a call to alloca, or a large stack
14970 frame, restore the old stack pointer using the backchain. Otherwise,
14971 we know what size to update it with. */
14972 if (use_backchain_to_restore_sp)
bacbde18 14973 {
9ebbca7d
GK
14974 /* Under V.4, don't reset the stack pointer until after we're done
14975 loading the saved registers. */
f607bc57 14976 if (DEFAULT_ABI == ABI_V4)
9ebbca7d 14977 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
4697a36c 14978
9ebbca7d
GK
14979 emit_move_insn (frame_reg_rtx,
14980 gen_rtx_MEM (Pmode, sp_reg_rtx));
bacbde18 14981 }
9ebbca7d 14982 else if (info->push_p)
85638c0d 14983 {
fc4767bb
JJ
14984 if (DEFAULT_ABI == ABI_V4
14985 || current_function_calls_eh_return)
9ebbca7d
GK
14986 sp_offset = info->total_size;
14987 else
14988 {
14989 emit_insn (TARGET_32BIT
14990 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14991 GEN_INT (info->total_size))
14992 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14993 GEN_INT (info->total_size)));
14994 }
85638c0d 14995 }
f676971a 14996
9aa86737
AH
14997 /* Restore AltiVec registers if needed. */
14998 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14999 {
15000 int i;
15001
15002 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15003 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15004 {
15005 rtx addr, areg, mem;
15006
15007 areg = gen_rtx_REG (Pmode, 0);
15008 emit_move_insn
15009 (areg, GEN_INT (info->altivec_save_offset
15010 + sp_offset
15011 + 16 * (i - info->first_altivec_reg_save)));
15012
15013 /* AltiVec addressing mode is [reg+reg]. */
15014 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 15015 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
15016
15017 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15018 }
15019 }
15020
15021 /* Restore VRSAVE if needed. */
44688022 15022 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 15023 && info->vrsave_mask != 0)
9aa86737
AH
15024 {
15025 rtx addr, mem, reg;
15026
15027 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15028 GEN_INT (info->vrsave_save_offset + sp_offset));
0be76840 15029 mem = gen_frame_mem (SImode, addr);
9aa86737
AH
15030 reg = gen_rtx_REG (SImode, 12);
15031 emit_move_insn (reg, mem);
15032
15033 emit_insn (generate_set_vrsave (reg, info, 1));
15034 }
15035
9ebbca7d
GK
15036 /* Get the old lr if we saved it. */
15037 if (info->lr_save_p)
b6c9286a 15038 {
a3170dc6
AH
15039 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
15040 info->lr_save_offset + sp_offset);
ba4828e0 15041
9ebbca7d 15042 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 15043 }
f676971a 15044
9ebbca7d
GK
15045 /* Get the old cr if we saved it. */
15046 if (info->cr_save_p)
15047 {
15048 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15049 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15050 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 15051
9ebbca7d
GK
15052 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
15053 }
f676971a 15054
9ebbca7d 15055 /* Set LR here to try to overlap restores below. */
4697a36c 15056 if (info->lr_save_p)
9ebbca7d
GK
15057 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
15058 gen_rtx_REG (Pmode, 0));
f676971a 15059
83720594
RH
15060 /* Load exception handler data registers, if needed. */
15061 if (current_function_calls_eh_return)
15062 {
78e1b90d
DE
15063 unsigned int i, regno;
15064
fc4767bb
JJ
15065 if (TARGET_AIX)
15066 {
15067 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15068 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15069 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15070
15071 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
15072 }
15073
83720594
RH
15074 for (i = 0; ; ++i)
15075 {
a3170dc6 15076 rtx mem;
83720594
RH
15077
15078 regno = EH_RETURN_DATA_REGNO (i);
15079 if (regno == INVALID_REGNUM)
15080 break;
15081
a3170dc6
AH
15082 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
15083 info->ehrd_offset + sp_offset
15084 + reg_size * (int) i);
83720594
RH
15085
15086 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
15087 }
15088 }
f676971a 15089
9ebbca7d
GK
15090 /* Restore GPRs. This is done as a PARALLEL if we are using
15091 the load-multiple instructions. */
15092 if (using_load_multiple)
979721f8 15093 {
9ebbca7d
GK
15094 rtvec p;
15095 p = rtvec_alloc (32 - info->first_gp_reg_save);
15096 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 15097 {
f676971a
EC
15098 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15099 GEN_INT (info->gp_save_offset
15100 + sp_offset
9ebbca7d 15101 + reg_size * i));
0be76840 15102 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 15103
f676971a 15104 RTVEC_ELT (p, i) =
9ebbca7d
GK
15105 gen_rtx_SET (VOIDmode,
15106 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15107 mem);
979721f8 15108 }
9ebbca7d 15109 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 15110 }
9ebbca7d
GK
15111 else
15112 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
15113 if ((regs_ever_live[info->first_gp_reg_save + i]
15114 && (!call_used_regs[info->first_gp_reg_save + i]
15115 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
b4db40bf 15116 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 15117 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 15118 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 15119 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9ebbca7d 15120 {
f676971a
EC
15121 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15122 GEN_INT (info->gp_save_offset
15123 + sp_offset
9ebbca7d 15124 + reg_size * i));
0be76840 15125 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 15126
a3170dc6 15127 /* Restore 64-bit quantities for SPE. */
c19de7aa 15128 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
15129 {
15130 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
15131 rtx b;
15132
15133 if (!SPE_CONST_OFFSET_OK (offset))
15134 {
15135 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15136 emit_move_insn (b, GEN_INT (offset));
15137 }
15138 else
15139 b = GEN_INT (offset);
15140
15141 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
0be76840 15142 mem = gen_frame_mem (V2SImode, addr);
a3170dc6
AH
15143 }
15144
f676971a 15145 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 15146 info->first_gp_reg_save + i), mem);
9ebbca7d 15147 }
9878760c 15148
9ebbca7d
GK
15149 /* Restore fpr's if we need to do it without calling a function. */
15150 if (restoring_FPRs_inline)
15151 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 15152 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d
GK
15153 && ! call_used_regs[info->first_fp_reg_save+i]))
15154 {
15155 rtx addr, mem;
15156 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
15157 GEN_INT (info->fp_save_offset
15158 + sp_offset
a4f6c312 15159 + 8 * i));
0be76840 15160 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15161
f676971a 15162 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
15163 info->first_fp_reg_save + i),
15164 mem);
15165 }
8d30c4ee 15166
9ebbca7d
GK
15167 /* If we saved cr, restore it here. Just those that were used. */
15168 if (info->cr_save_p)
979721f8 15169 {
9ebbca7d 15170 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 15171 int count = 0;
f676971a 15172
9ebbca7d 15173 if (using_mfcr_multiple)
979721f8 15174 {
9ebbca7d
GK
15175 for (i = 0; i < 8; i++)
15176 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
e35b9579 15177 count++;
37409796 15178 gcc_assert (count);
e35b9579
GK
15179 }
15180
15181 if (using_mfcr_multiple && count > 1)
15182 {
15183 rtvec p;
15184 int ndx;
f676971a 15185
e35b9579 15186 p = rtvec_alloc (count);
9ebbca7d 15187
e35b9579 15188 ndx = 0;
9ebbca7d
GK
15189 for (i = 0; i < 8; i++)
15190 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15191 {
15192 rtvec r = rtvec_alloc (2);
15193 RTVEC_ELT (r, 0) = r12_rtx;
15194 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 15195 RTVEC_ELT (p, ndx) =
f676971a 15196 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 15197 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 15198 ndx++;
9ebbca7d
GK
15199 }
15200 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 15201 gcc_assert (ndx == count);
979721f8
MM
15202 }
15203 else
9ebbca7d
GK
15204 for (i = 0; i < 8; i++)
15205 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
979721f8 15206 {
f676971a 15207 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
15208 CR0_REGNO+i),
15209 r12_rtx));
979721f8 15210 }
979721f8
MM
15211 }
15212
9ebbca7d 15213 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
15214 have been done. */
15215 if (frame_reg_rtx != sp_reg_rtx)
15216 {
15217 /* This blockage is needed so that sched doesn't decide to move
15218 the sp change before the register restores. */
15219 rs6000_emit_stack_tie ();
15220 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15221 }
15222 else if (sp_offset != 0)
15223 emit_insn (TARGET_32BIT
15224 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15225 GEN_INT (sp_offset))
15226 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15227 GEN_INT (sp_offset)));
b6c9286a 15228
83720594
RH
15229 if (current_function_calls_eh_return)
15230 {
15231 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 15232 emit_insn (TARGET_32BIT
83720594
RH
15233 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15234 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15235 }
15236
9ebbca7d
GK
15237 if (!sibcall)
15238 {
15239 rtvec p;
15240 if (! restoring_FPRs_inline)
15241 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15242 else
15243 p = rtvec_alloc (2);
b6c9286a 15244
e35b9579 15245 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
15246 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15247 gen_rtx_REG (Pmode,
9ebbca7d 15248 LINK_REGISTER_REGNUM));
9ebbca7d
GK
15249
15250 /* If we have to restore more than two FP registers, branch to the
15251 restore function. It will return to our caller. */
15252 if (! restoring_FPRs_inline)
15253 {
15254 int i;
15255 char rname[30];
520a57c8 15256 const char *alloc_rname;
979721f8 15257
f676971a 15258 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 15259 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 15260 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15261 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15262 gen_rtx_SYMBOL_REF (Pmode,
15263 alloc_rname));
b6c9286a 15264
9ebbca7d
GK
15265 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15266 {
15267 rtx addr, mem;
15268 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15269 GEN_INT (info->fp_save_offset + 8*i));
0be76840 15270 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15271
f676971a 15272 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
15273 gen_rtx_SET (VOIDmode,
15274 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15275 mem);
b6c9286a
MM
15276 }
15277 }
f676971a 15278
9ebbca7d 15279 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 15280 }
9878760c
RK
15281}
15282
15283/* Write function epilogue. */
15284
08c148a8 15285static void
f676971a 15286rs6000_output_function_epilogue (FILE *file,
a2369ed3 15287 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 15288{
9ebbca7d 15289 if (! HAVE_epilogue)
9878760c 15290 {
9ebbca7d
GK
15291 rtx insn = get_last_insn ();
15292 /* If the last insn was a BARRIER, we don't have to write anything except
15293 the trace table. */
15294 if (GET_CODE (insn) == NOTE)
15295 insn = prev_nonnote_insn (insn);
15296 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 15297 {
9ebbca7d
GK
15298 /* This is slightly ugly, but at least we don't have two
15299 copies of the epilogue-emitting code. */
15300 start_sequence ();
15301
15302 /* A NOTE_INSN_DELETED is supposed to be at the start
15303 and end of the "toplevel" insn chain. */
2e040219 15304 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15305 rs6000_emit_epilogue (FALSE);
2e040219 15306 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15307
a3c9585f 15308 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15309 {
15310 rtx insn;
15311 unsigned addr = 0;
15312 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15313 {
15314 INSN_ADDRESSES_NEW (insn, addr);
15315 addr += 4;
15316 }
15317 }
15318
9ebbca7d 15319 if (TARGET_DEBUG_STACK)
a4f6c312 15320 debug_rtx_list (get_insns (), 100);
c9d691e9 15321 final (get_insns (), file, FALSE);
9ebbca7d 15322 end_sequence ();
4697a36c 15323 }
9878760c 15324 }
b4ac57ab 15325
efdba735
SH
15326#if TARGET_MACHO
15327 macho_branch_islands ();
0e5da0be
GK
15328 /* Mach-O doesn't support labels at the end of objects, so if
15329 it looks like we might want one, insert a NOP. */
15330 {
15331 rtx insn = get_last_insn ();
15332 while (insn
15333 && NOTE_P (insn)
15334 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15335 insn = PREV_INSN (insn);
f676971a
EC
15336 if (insn
15337 && (LABEL_P (insn)
0e5da0be
GK
15338 || (NOTE_P (insn)
15339 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15340 fputs ("\tnop\n", file);
15341 }
15342#endif
15343
9b30bae2 15344 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
15345 on its format.
15346
15347 We don't output a traceback table if -finhibit-size-directive was
15348 used. The documentation for -finhibit-size-directive reads
15349 ``don't output a @code{.size} assembler directive, or anything
15350 else that would cause trouble if the function is split in the
15351 middle, and the two halves are placed at locations far apart in
15352 memory.'' The traceback table has this property, since it
15353 includes the offset from the start of the function to the
4d30c363
MM
15354 traceback table itself.
15355
15356 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 15357 different traceback table. */
57ac7be9 15358 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 15359 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 15360 {
69c75916 15361 const char *fname = NULL;
3ac88239 15362 const char *language_string = lang_hooks.name;
6041bf2f 15363 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 15364 int i;
57ac7be9 15365 int optional_tbtab;
8097c268 15366 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
15367
15368 if (rs6000_traceback == traceback_full)
15369 optional_tbtab = 1;
15370 else if (rs6000_traceback == traceback_part)
15371 optional_tbtab = 0;
15372 else
15373 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 15374
69c75916
AM
15375 if (optional_tbtab)
15376 {
15377 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15378 while (*fname == '.') /* V.4 encodes . in the name */
15379 fname++;
15380
15381 /* Need label immediately before tbtab, so we can compute
15382 its offset from the function start. */
15383 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15384 ASM_OUTPUT_LABEL (file, fname);
15385 }
314fc5a9
ILT
15386
15387 /* The .tbtab pseudo-op can only be used for the first eight
15388 expressions, since it can't handle the possibly variable
15389 length fields that follow. However, if you omit the optional
15390 fields, the assembler outputs zeros for all optional fields
15391 anyways, giving each variable length field is minimum length
15392 (as defined in sys/debug.h). Thus we can not use the .tbtab
15393 pseudo-op at all. */
15394
15395 /* An all-zero word flags the start of the tbtab, for debuggers
15396 that have to find it by searching forward from the entry
15397 point or from the current pc. */
19d2d16f 15398 fputs ("\t.long 0\n", file);
314fc5a9
ILT
15399
15400 /* Tbtab format type. Use format type 0. */
19d2d16f 15401 fputs ("\t.byte 0,", file);
314fc5a9 15402
5fc921c1
DE
15403 /* Language type. Unfortunately, there does not seem to be any
15404 official way to discover the language being compiled, so we
15405 use language_string.
15406 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
15407 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
15408 a number, so for now use 9. */
5fc921c1 15409 if (! strcmp (language_string, "GNU C"))
314fc5a9 15410 i = 0;
6de9cd9a
DN
15411 else if (! strcmp (language_string, "GNU F77")
15412 || ! strcmp (language_string, "GNU F95"))
314fc5a9 15413 i = 1;
8b83775b 15414 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 15415 i = 2;
5fc921c1
DE
15416 else if (! strcmp (language_string, "GNU Ada"))
15417 i = 3;
56438901
AM
15418 else if (! strcmp (language_string, "GNU C++")
15419 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 15420 i = 9;
9517ead8
AG
15421 else if (! strcmp (language_string, "GNU Java"))
15422 i = 13;
5fc921c1
DE
15423 else if (! strcmp (language_string, "GNU Objective-C"))
15424 i = 14;
314fc5a9 15425 else
37409796 15426 gcc_unreachable ();
314fc5a9
ILT
15427 fprintf (file, "%d,", i);
15428
15429 /* 8 single bit fields: global linkage (not set for C extern linkage,
15430 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15431 from start of procedure stored in tbtab, internal function, function
15432 has controlled storage, function has no toc, function uses fp,
15433 function logs/aborts fp operations. */
15434 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
15435 fprintf (file, "%d,",
15436 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
15437
15438 /* 6 bitfields: function is interrupt handler, name present in
15439 proc table, function calls alloca, on condition directives
15440 (controls stack walks, 3 bits), saves condition reg, saves
15441 link reg. */
15442 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15443 set up as a frame pointer, even when there is no alloca call. */
15444 fprintf (file, "%d,",
6041bf2f
DE
15445 ((optional_tbtab << 6)
15446 | ((optional_tbtab & frame_pointer_needed) << 5)
15447 | (info->cr_save_p << 1)
15448 | (info->lr_save_p)));
314fc5a9 15449
6041bf2f 15450 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
15451 (6 bits). */
15452 fprintf (file, "%d,",
4697a36c 15453 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
15454
15455 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15456 fprintf (file, "%d,", (32 - first_reg_to_save ()));
15457
6041bf2f
DE
15458 if (optional_tbtab)
15459 {
15460 /* Compute the parameter info from the function decl argument
15461 list. */
15462 tree decl;
15463 int next_parm_info_bit = 31;
314fc5a9 15464
6041bf2f
DE
15465 for (decl = DECL_ARGUMENTS (current_function_decl);
15466 decl; decl = TREE_CHAIN (decl))
15467 {
15468 rtx parameter = DECL_INCOMING_RTL (decl);
15469 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 15470
6041bf2f
DE
15471 if (GET_CODE (parameter) == REG)
15472 {
ebb109ad 15473 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
15474 {
15475 int bits;
15476
15477 float_parms++;
15478
37409796
NS
15479 switch (mode)
15480 {
15481 case SFmode:
15482 bits = 0x2;
15483 break;
15484
15485 case DFmode:
15486 case TFmode:
15487 bits = 0x3;
15488 break;
15489
15490 default:
15491 gcc_unreachable ();
15492 }
6041bf2f
DE
15493
15494 /* If only one bit will fit, don't or in this entry. */
15495 if (next_parm_info_bit > 0)
15496 parm_info |= (bits << (next_parm_info_bit - 1));
15497 next_parm_info_bit -= 2;
15498 }
15499 else
15500 {
15501 fixed_parms += ((GET_MODE_SIZE (mode)
15502 + (UNITS_PER_WORD - 1))
15503 / UNITS_PER_WORD);
15504 next_parm_info_bit -= 1;
15505 }
15506 }
15507 }
15508 }
314fc5a9
ILT
15509
15510 /* Number of fixed point parameters. */
15511 /* This is actually the number of words of fixed point parameters; thus
15512 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15513 fprintf (file, "%d,", fixed_parms);
15514
15515 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15516 all on stack. */
15517 /* This is actually the number of fp registers that hold parameters;
15518 and thus the maximum value is 13. */
15519 /* Set parameters on stack bit if parameters are not in their original
15520 registers, regardless of whether they are on the stack? Xlc
15521 seems to set the bit when not optimizing. */
15522 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15523
6041bf2f
DE
15524 if (! optional_tbtab)
15525 return;
15526
314fc5a9
ILT
15527 /* Optional fields follow. Some are variable length. */
15528
15529 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15530 11 double float. */
15531 /* There is an entry for each parameter in a register, in the order that
15532 they occur in the parameter list. Any intervening arguments on the
15533 stack are ignored. If the list overflows a long (max possible length
15534 34 bits) then completely leave off all elements that don't fit. */
15535 /* Only emit this long if there was at least one parameter. */
15536 if (fixed_parms || float_parms)
15537 fprintf (file, "\t.long %d\n", parm_info);
15538
15539 /* Offset from start of code to tb table. */
19d2d16f 15540 fputs ("\t.long ", file);
314fc5a9 15541 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
15542 if (TARGET_AIX)
15543 RS6000_OUTPUT_BASENAME (file, fname);
15544 else
15545 assemble_name (file, fname);
15546 putc ('-', file);
15547 rs6000_output_function_entry (file, fname);
19d2d16f 15548 putc ('\n', file);
314fc5a9
ILT
15549
15550 /* Interrupt handler mask. */
15551 /* Omit this long, since we never set the interrupt handler bit
15552 above. */
15553
15554 /* Number of CTL (controlled storage) anchors. */
15555 /* Omit this long, since the has_ctl bit is never set above. */
15556
15557 /* Displacement into stack of each CTL anchor. */
15558 /* Omit this list of longs, because there are no CTL anchors. */
15559
15560 /* Length of function name. */
69c75916
AM
15561 if (*fname == '*')
15562 ++fname;
296b8152 15563 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
15564
15565 /* Function name. */
15566 assemble_string (fname, strlen (fname));
15567
15568 /* Register for alloca automatic storage; this is always reg 31.
15569 Only emit this if the alloca bit was set above. */
15570 if (frame_pointer_needed)
19d2d16f 15571 fputs ("\t.byte 31\n", file);
b1765bde
DE
15572
15573 fputs ("\t.align 2\n", file);
9b30bae2 15574 }
9878760c 15575}
17167fd8 15576\f
a4f6c312
SS
15577/* A C compound statement that outputs the assembler code for a thunk
15578 function, used to implement C++ virtual function calls with
15579 multiple inheritance. The thunk acts as a wrapper around a virtual
15580 function, adjusting the implicit object parameter before handing
15581 control off to the real function.
15582
15583 First, emit code to add the integer DELTA to the location that
15584 contains the incoming first argument. Assume that this argument
15585 contains a pointer, and is the one used to pass the `this' pointer
15586 in C++. This is the incoming argument *before* the function
15587 prologue, e.g. `%o0' on a sparc. The addition must preserve the
15588 values of all other incoming arguments.
17167fd8
MM
15589
15590 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
15591 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
15592 not touch the return address. Hence returning from FUNCTION will
15593 return to whoever called the current `thunk'.
17167fd8 15594
a4f6c312
SS
15595 The effect must be as if FUNCTION had been called directly with the
15596 adjusted first argument. This macro is responsible for emitting
15597 all of the code for a thunk function; output_function_prologue()
15598 and output_function_epilogue() are not invoked.
17167fd8 15599
a4f6c312
SS
15600 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
15601 been extracted from it.) It might possibly be useful on some
15602 targets, but probably not.
17167fd8 15603
a4f6c312
SS
15604 If you do not define this macro, the target-independent code in the
15605 C++ frontend will generate a less efficient heavyweight thunk that
15606 calls FUNCTION instead of jumping to it. The generic approach does
15607 not support varargs. */
17167fd8 15608
3961e8fe 15609static void
f676971a
EC
15610rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15611 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 15612 tree function)
17167fd8 15613{
5b71a4e7 15614 rtx this, insn, funexp;
17167fd8 15615
5b71a4e7 15616 reload_completed = 1;
fe3ad572 15617 epilogue_completed = 1;
5b71a4e7 15618 no_new_pseudos = 1;
6429e3be 15619 reset_block_changes ();
56a7189a 15620
5b71a4e7 15621 /* Mark the end of the (empty) prologue. */
2e040219 15622 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 15623
5b71a4e7
DE
15624 /* Find the "this" pointer. If the function returns a structure,
15625 the structure return pointer is in r3. */
61f71b34 15626 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 15627 this = gen_rtx_REG (Pmode, 4);
56a7189a 15628 else
5b71a4e7 15629 this = gen_rtx_REG (Pmode, 3);
17167fd8 15630
5b71a4e7
DE
15631 /* Apply the constant offset, if required. */
15632 if (delta)
15633 {
15634 rtx delta_rtx = GEN_INT (delta);
15635 emit_insn (TARGET_32BIT
15636 ? gen_addsi3 (this, this, delta_rtx)
15637 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
15638 }
15639
5b71a4e7
DE
15640 /* Apply the offset from the vtable, if required. */
15641 if (vcall_offset)
17167fd8 15642 {
5b71a4e7
DE
15643 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15644 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 15645
5b71a4e7 15646 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
15647 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15648 {
15649 emit_insn (TARGET_32BIT
15650 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15651 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15652 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15653 }
15654 else
15655 {
15656 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15657
15658 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15659 }
5b71a4e7
DE
15660 emit_insn (TARGET_32BIT
15661 ? gen_addsi3 (this, this, tmp)
15662 : gen_adddi3 (this, this, tmp));
17167fd8
MM
15663 }
15664
5b71a4e7
DE
15665 /* Generate a tail call to the target function. */
15666 if (!TREE_USED (function))
15667 {
15668 assemble_external (function);
15669 TREE_USED (function) = 1;
15670 }
15671 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 15672 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
15673
15674#if TARGET_MACHO
ab82a49f 15675 if (MACHOPIC_INDIRECT)
5b71a4e7 15676 funexp = machopic_indirect_call_target (funexp);
ee890fe2 15677#endif
5b71a4e7
DE
15678
15679 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 15680 generate sibcall RTL explicitly. */
5b71a4e7
DE
15681 insn = emit_call_insn (
15682 gen_rtx_PARALLEL (VOIDmode,
15683 gen_rtvec (4,
15684 gen_rtx_CALL (VOIDmode,
15685 funexp, const0_rtx),
15686 gen_rtx_USE (VOIDmode, const0_rtx),
15687 gen_rtx_USE (VOIDmode,
15688 gen_rtx_REG (SImode,
15689 LINK_REGISTER_REGNUM)),
15690 gen_rtx_RETURN (VOIDmode))));
15691 SIBLING_CALL_P (insn) = 1;
15692 emit_barrier ();
15693
15694 /* Run just enough of rest_of_compilation to get the insns emitted.
15695 There's not really enough bulk here to make other passes such as
15696 instruction scheduling worth while. Note that use_thunk calls
15697 assemble_start_function and assemble_end_function. */
15698 insn = get_insns ();
a2855205 15699 insn_locators_initialize ();
5b71a4e7
DE
15700 shorten_branches (insn);
15701 final_start_function (insn, file, 1);
c9d691e9 15702 final (insn, file, 1);
5b71a4e7
DE
15703 final_end_function ();
15704
15705 reload_completed = 0;
fe3ad572 15706 epilogue_completed = 0;
5b71a4e7 15707 no_new_pseudos = 0;
9ebbca7d 15708}
9ebbca7d
GK
15709\f
15710/* A quick summary of the various types of 'constant-pool tables'
15711 under PowerPC:
15712
f676971a 15713 Target Flags Name One table per
9ebbca7d
GK
15714 AIX (none) AIX TOC object file
15715 AIX -mfull-toc AIX TOC object file
15716 AIX -mminimal-toc AIX minimal TOC translation unit
15717 SVR4/EABI (none) SVR4 SDATA object file
15718 SVR4/EABI -fpic SVR4 pic object file
15719 SVR4/EABI -fPIC SVR4 PIC translation unit
15720 SVR4/EABI -mrelocatable EABI TOC function
15721 SVR4/EABI -maix AIX TOC object file
f676971a 15722 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
15723 AIX minimal TOC translation unit
15724
15725 Name Reg. Set by entries contains:
15726 made by addrs? fp? sum?
15727
15728 AIX TOC 2 crt0 as Y option option
15729 AIX minimal TOC 30 prolog gcc Y Y option
15730 SVR4 SDATA 13 crt0 gcc N Y N
15731 SVR4 pic 30 prolog ld Y not yet N
15732 SVR4 PIC 30 prolog gcc Y option option
15733 EABI TOC 30 prolog gcc Y option option
15734
15735*/
15736
9ebbca7d
GK
15737/* Hash functions for the hash table. */
15738
15739static unsigned
a2369ed3 15740rs6000_hash_constant (rtx k)
9ebbca7d 15741{
46b33600
RH
15742 enum rtx_code code = GET_CODE (k);
15743 enum machine_mode mode = GET_MODE (k);
15744 unsigned result = (code << 3) ^ mode;
15745 const char *format;
15746 int flen, fidx;
f676971a 15747
46b33600
RH
15748 format = GET_RTX_FORMAT (code);
15749 flen = strlen (format);
15750 fidx = 0;
9ebbca7d 15751
46b33600
RH
15752 switch (code)
15753 {
15754 case LABEL_REF:
15755 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15756
15757 case CONST_DOUBLE:
15758 if (mode != VOIDmode)
15759 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15760 flen = 2;
15761 break;
15762
15763 case CODE_LABEL:
15764 fidx = 3;
15765 break;
15766
15767 default:
15768 break;
15769 }
9ebbca7d
GK
15770
15771 for (; fidx < flen; fidx++)
15772 switch (format[fidx])
15773 {
15774 case 's':
15775 {
15776 unsigned i, len;
15777 const char *str = XSTR (k, fidx);
15778 len = strlen (str);
15779 result = result * 613 + len;
15780 for (i = 0; i < len; i++)
15781 result = result * 613 + (unsigned) str[i];
17167fd8
MM
15782 break;
15783 }
9ebbca7d
GK
15784 case 'u':
15785 case 'e':
15786 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15787 break;
15788 case 'i':
15789 case 'n':
15790 result = result * 613 + (unsigned) XINT (k, fidx);
15791 break;
15792 case 'w':
15793 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15794 result = result * 613 + (unsigned) XWINT (k, fidx);
15795 else
15796 {
15797 size_t i;
9390387d 15798 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
15799 result = result * 613 + (unsigned) (XWINT (k, fidx)
15800 >> CHAR_BIT * i);
15801 }
15802 break;
09501938
DE
15803 case '0':
15804 break;
9ebbca7d 15805 default:
37409796 15806 gcc_unreachable ();
9ebbca7d 15807 }
46b33600 15808
9ebbca7d
GK
15809 return result;
15810}
15811
15812static unsigned
a2369ed3 15813toc_hash_function (const void *hash_entry)
9ebbca7d 15814{
f676971a 15815 const struct toc_hash_struct *thc =
a9098fd0
GK
15816 (const struct toc_hash_struct *) hash_entry;
15817 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
15818}
15819
15820/* Compare H1 and H2 for equivalence. */
15821
15822static int
a2369ed3 15823toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
15824{
15825 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15826 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15827
a9098fd0
GK
15828 if (((const struct toc_hash_struct *) h1)->key_mode
15829 != ((const struct toc_hash_struct *) h2)->key_mode)
15830 return 0;
15831
5692c7bc 15832 return rtx_equal_p (r1, r2);
9ebbca7d
GK
15833}
15834
28e510bd
MM
15835/* These are the names given by the C++ front-end to vtables, and
15836 vtable-like objects. Ideally, this logic should not be here;
15837 instead, there should be some programmatic way of inquiring as
15838 to whether or not an object is a vtable. */
15839
15840#define VTABLE_NAME_P(NAME) \
9390387d 15841 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
15842 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15843 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 15844 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 15845 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
15846
15847void
a2369ed3 15848rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
15849{
15850 /* Currently C++ toc references to vtables can be emitted before it
15851 is decided whether the vtable is public or private. If this is
15852 the case, then the linker will eventually complain that there is
f676971a 15853 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
15854 we emit the TOC reference to reference the symbol and not the
15855 section. */
15856 const char *name = XSTR (x, 0);
54ee9799 15857
f676971a 15858 if (VTABLE_NAME_P (name))
54ee9799
DE
15859 {
15860 RS6000_OUTPUT_BASENAME (file, name);
15861 }
15862 else
15863 assemble_name (file, name);
28e510bd
MM
15864}
15865
a4f6c312
SS
15866/* Output a TOC entry. We derive the entry name from what is being
15867 written. */
9878760c
RK
15868
15869void
a2369ed3 15870output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
15871{
15872 char buf[256];
3cce094d 15873 const char *name = buf;
ec940faa 15874 const char *real_name;
9878760c 15875 rtx base = x;
16fdeb48 15876 HOST_WIDE_INT offset = 0;
9878760c 15877
37409796 15878 gcc_assert (!TARGET_NO_TOC);
4697a36c 15879
9ebbca7d
GK
15880 /* When the linker won't eliminate them, don't output duplicate
15881 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
15882 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15883 CODE_LABELs. */
15884 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
15885 {
15886 struct toc_hash_struct *h;
15887 void * * found;
f676971a 15888
17211ab5 15889 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 15890 time because GGC is not initialized at that point. */
17211ab5 15891 if (toc_hash_table == NULL)
f676971a 15892 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
15893 toc_hash_eq, NULL);
15894
9ebbca7d
GK
15895 h = ggc_alloc (sizeof (*h));
15896 h->key = x;
a9098fd0 15897 h->key_mode = mode;
9ebbca7d 15898 h->labelno = labelno;
f676971a 15899
9ebbca7d
GK
15900 found = htab_find_slot (toc_hash_table, h, 1);
15901 if (*found == NULL)
15902 *found = h;
f676971a 15903 else /* This is indeed a duplicate.
9ebbca7d
GK
15904 Set this label equal to that label. */
15905 {
15906 fputs ("\t.set ", file);
15907 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15908 fprintf (file, "%d,", labelno);
15909 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 15910 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
15911 found)->labelno));
15912 return;
15913 }
15914 }
15915
15916 /* If we're going to put a double constant in the TOC, make sure it's
15917 aligned properly when strict alignment is on. */
ff1720ed
RK
15918 if (GET_CODE (x) == CONST_DOUBLE
15919 && STRICT_ALIGNMENT
a9098fd0 15920 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
15921 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15922 ASM_OUTPUT_ALIGN (file, 3);
15923 }
15924
4977bab6 15925 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 15926
37c37a57
RK
15927 /* Handle FP constants specially. Note that if we have a minimal
15928 TOC, things we put here aren't actually in the TOC, so we can allow
15929 FP constants. */
00b79d54
BE
15930 if (GET_CODE (x) == CONST_DOUBLE &&
15931 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
15932 {
15933 REAL_VALUE_TYPE rv;
15934 long k[4];
15935
15936 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
15937 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
15938 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
15939 else
15940 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
15941
15942 if (TARGET_64BIT)
15943 {
15944 if (TARGET_MINIMAL_TOC)
15945 fputs (DOUBLE_INT_ASM_OP, file);
15946 else
15947 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15948 k[0] & 0xffffffff, k[1] & 0xffffffff,
15949 k[2] & 0xffffffff, k[3] & 0xffffffff);
15950 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15951 k[0] & 0xffffffff, k[1] & 0xffffffff,
15952 k[2] & 0xffffffff, k[3] & 0xffffffff);
15953 return;
15954 }
15955 else
15956 {
15957 if (TARGET_MINIMAL_TOC)
15958 fputs ("\t.long ", file);
15959 else
15960 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15961 k[0] & 0xffffffff, k[1] & 0xffffffff,
15962 k[2] & 0xffffffff, k[3] & 0xffffffff);
15963 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15964 k[0] & 0xffffffff, k[1] & 0xffffffff,
15965 k[2] & 0xffffffff, k[3] & 0xffffffff);
15966 return;
15967 }
15968 }
00b79d54
BE
15969 else if (GET_CODE (x) == CONST_DOUBLE &&
15970 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 15971 {
042259f2
DE
15972 REAL_VALUE_TYPE rv;
15973 long k[2];
0adc764e 15974
042259f2 15975 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
15976
15977 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
15978 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
15979 else
15980 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 15981
13ded975
DE
15982 if (TARGET_64BIT)
15983 {
15984 if (TARGET_MINIMAL_TOC)
2bfcf297 15985 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 15986 else
2f0552b6
AM
15987 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15988 k[0] & 0xffffffff, k[1] & 0xffffffff);
15989 fprintf (file, "0x%lx%08lx\n",
15990 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
15991 return;
15992 }
1875cc88 15993 else
13ded975
DE
15994 {
15995 if (TARGET_MINIMAL_TOC)
2bfcf297 15996 fputs ("\t.long ", file);
13ded975 15997 else
2f0552b6
AM
15998 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15999 k[0] & 0xffffffff, k[1] & 0xffffffff);
16000 fprintf (file, "0x%lx,0x%lx\n",
16001 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
16002 return;
16003 }
9878760c 16004 }
00b79d54
BE
16005 else if (GET_CODE (x) == CONST_DOUBLE &&
16006 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 16007 {
042259f2
DE
16008 REAL_VALUE_TYPE rv;
16009 long l;
9878760c 16010
042259f2 16011 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16012 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16013 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
16014 else
16015 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 16016
31bfaa0b
DE
16017 if (TARGET_64BIT)
16018 {
16019 if (TARGET_MINIMAL_TOC)
2bfcf297 16020 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 16021 else
2f0552b6
AM
16022 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16023 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
16024 return;
16025 }
042259f2 16026 else
31bfaa0b
DE
16027 {
16028 if (TARGET_MINIMAL_TOC)
2bfcf297 16029 fputs ("\t.long ", file);
31bfaa0b 16030 else
2f0552b6
AM
16031 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16032 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
16033 return;
16034 }
042259f2 16035 }
f176e826 16036 else if (GET_MODE (x) == VOIDmode
a9098fd0 16037 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 16038 {
e2c953b6 16039 unsigned HOST_WIDE_INT low;
042259f2
DE
16040 HOST_WIDE_INT high;
16041
16042 if (GET_CODE (x) == CONST_DOUBLE)
16043 {
16044 low = CONST_DOUBLE_LOW (x);
16045 high = CONST_DOUBLE_HIGH (x);
16046 }
16047 else
16048#if HOST_BITS_PER_WIDE_INT == 32
16049 {
16050 low = INTVAL (x);
0858c623 16051 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
16052 }
16053#else
16054 {
c4ad648e
AM
16055 low = INTVAL (x) & 0xffffffff;
16056 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
16057 }
16058#endif
9878760c 16059
a9098fd0
GK
16060 /* TOC entries are always Pmode-sized, but since this
16061 is a bigendian machine then if we're putting smaller
16062 integer constants in the TOC we have to pad them.
16063 (This is still a win over putting the constants in
16064 a separate constant pool, because then we'd have
02a4ec28
FS
16065 to have both a TOC entry _and_ the actual constant.)
16066
16067 For a 32-bit target, CONST_INT values are loaded and shifted
16068 entirely within `low' and can be stored in one TOC entry. */
16069
37409796
NS
16070 /* It would be easy to make this work, but it doesn't now. */
16071 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
16072
16073 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
16074 {
16075#if HOST_BITS_PER_WIDE_INT == 32
16076 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
16077 POINTER_SIZE, &low, &high, 0);
16078#else
16079 low |= high << 32;
16080 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
16081 high = (HOST_WIDE_INT) low >> 32;
16082 low &= 0xffffffff;
16083#endif
16084 }
a9098fd0 16085
13ded975
DE
16086 if (TARGET_64BIT)
16087 {
16088 if (TARGET_MINIMAL_TOC)
2bfcf297 16089 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 16090 else
2f0552b6
AM
16091 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16092 (long) high & 0xffffffff, (long) low & 0xffffffff);
16093 fprintf (file, "0x%lx%08lx\n",
16094 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
16095 return;
16096 }
1875cc88 16097 else
13ded975 16098 {
02a4ec28
FS
16099 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
16100 {
16101 if (TARGET_MINIMAL_TOC)
2bfcf297 16102 fputs ("\t.long ", file);
02a4ec28 16103 else
2bfcf297 16104 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
16105 (long) high & 0xffffffff, (long) low & 0xffffffff);
16106 fprintf (file, "0x%lx,0x%lx\n",
16107 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 16108 }
13ded975 16109 else
02a4ec28
FS
16110 {
16111 if (TARGET_MINIMAL_TOC)
2bfcf297 16112 fputs ("\t.long ", file);
02a4ec28 16113 else
2f0552b6
AM
16114 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
16115 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 16116 }
13ded975
DE
16117 return;
16118 }
9878760c
RK
16119 }
16120
16121 if (GET_CODE (x) == CONST)
16122 {
37409796 16123 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 16124
9878760c
RK
16125 base = XEXP (XEXP (x, 0), 0);
16126 offset = INTVAL (XEXP (XEXP (x, 0), 1));
16127 }
f676971a 16128
37409796
NS
16129 switch (GET_CODE (base))
16130 {
16131 case SYMBOL_REF:
16132 name = XSTR (base, 0);
16133 break;
16134
16135 case LABEL_REF:
16136 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
16137 CODE_LABEL_NUMBER (XEXP (base, 0)));
16138 break;
16139
16140 case CODE_LABEL:
16141 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
16142 break;
16143
16144 default:
16145 gcc_unreachable ();
16146 }
9878760c 16147
772c5265 16148 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 16149 if (TARGET_MINIMAL_TOC)
2bfcf297 16150 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
16151 else
16152 {
b6c9286a 16153 fprintf (file, "\t.tc %s", real_name);
9878760c 16154
1875cc88 16155 if (offset < 0)
16fdeb48 16156 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 16157 else if (offset)
16fdeb48 16158 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 16159
19d2d16f 16160 fputs ("[TC],", file);
1875cc88 16161 }
581bc4de
MM
16162
16163 /* Currently C++ toc references to vtables can be emitted before it
16164 is decided whether the vtable is public or private. If this is
16165 the case, then the linker will eventually complain that there is
16166 a TOC reference to an unknown section. Thus, for vtables only,
16167 we emit the TOC reference to reference the symbol and not the
16168 section. */
28e510bd 16169 if (VTABLE_NAME_P (name))
581bc4de 16170 {
54ee9799 16171 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 16172 if (offset < 0)
16fdeb48 16173 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 16174 else if (offset > 0)
16fdeb48 16175 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
16176 }
16177 else
16178 output_addr_const (file, x);
19d2d16f 16179 putc ('\n', file);
9878760c
RK
16180}
16181\f
16182/* Output an assembler pseudo-op to write an ASCII string of N characters
16183 starting at P to FILE.
16184
16185 On the RS/6000, we have to do this using the .byte operation and
16186 write out special characters outside the quoted string.
16187 Also, the assembler is broken; very long strings are truncated,
a4f6c312 16188 so we must artificially break them up early. */
9878760c
RK
16189
16190void
a2369ed3 16191output_ascii (FILE *file, const char *p, int n)
9878760c
RK
16192{
16193 char c;
16194 int i, count_string;
d330fd93
KG
16195 const char *for_string = "\t.byte \"";
16196 const char *for_decimal = "\t.byte ";
16197 const char *to_close = NULL;
9878760c
RK
16198
16199 count_string = 0;
16200 for (i = 0; i < n; i++)
16201 {
16202 c = *p++;
16203 if (c >= ' ' && c < 0177)
16204 {
16205 if (for_string)
16206 fputs (for_string, file);
16207 putc (c, file);
16208
16209 /* Write two quotes to get one. */
16210 if (c == '"')
16211 {
16212 putc (c, file);
16213 ++count_string;
16214 }
16215
16216 for_string = NULL;
16217 for_decimal = "\"\n\t.byte ";
16218 to_close = "\"\n";
16219 ++count_string;
16220
16221 if (count_string >= 512)
16222 {
16223 fputs (to_close, file);
16224
16225 for_string = "\t.byte \"";
16226 for_decimal = "\t.byte ";
16227 to_close = NULL;
16228 count_string = 0;
16229 }
16230 }
16231 else
16232 {
16233 if (for_decimal)
16234 fputs (for_decimal, file);
16235 fprintf (file, "%d", c);
16236
16237 for_string = "\n\t.byte \"";
16238 for_decimal = ", ";
16239 to_close = "\n";
16240 count_string = 0;
16241 }
16242 }
16243
16244 /* Now close the string if we have written one. Then end the line. */
16245 if (to_close)
9ebbca7d 16246 fputs (to_close, file);
9878760c
RK
16247}
16248\f
16249/* Generate a unique section name for FILENAME for a section type
16250 represented by SECTION_DESC. Output goes into BUF.
16251
16252 SECTION_DESC can be any string, as long as it is different for each
16253 possible section type.
16254
16255 We name the section in the same manner as xlc. The name begins with an
16256 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
16257 names) with the last period replaced by the string SECTION_DESC. If
16258 FILENAME does not contain a period, SECTION_DESC is appended to the end of
16259 the name. */
9878760c
RK
16260
16261void
f676971a 16262rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 16263 const char *section_desc)
9878760c 16264{
9ebbca7d 16265 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
16266 char *p;
16267 int len;
9878760c
RK
16268
16269 after_last_slash = filename;
16270 for (q = filename; *q; q++)
11e5fe42
RK
16271 {
16272 if (*q == '/')
16273 after_last_slash = q + 1;
16274 else if (*q == '.')
16275 last_period = q;
16276 }
9878760c 16277
11e5fe42 16278 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 16279 *buf = (char *) xmalloc (len);
9878760c
RK
16280
16281 p = *buf;
16282 *p++ = '_';
16283
16284 for (q = after_last_slash; *q; q++)
16285 {
11e5fe42 16286 if (q == last_period)
c4ad648e 16287 {
9878760c
RK
16288 strcpy (p, section_desc);
16289 p += strlen (section_desc);
e3981aab 16290 break;
c4ad648e 16291 }
9878760c 16292
e9a780ec 16293 else if (ISALNUM (*q))
c4ad648e 16294 *p++ = *q;
9878760c
RK
16295 }
16296
11e5fe42 16297 if (last_period == 0)
9878760c
RK
16298 strcpy (p, section_desc);
16299 else
16300 *p = '\0';
16301}
e165f3f0 16302\f
a4f6c312 16303/* Emit profile function. */
411707f4 16304
411707f4 16305void
a2369ed3 16306output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 16307{
858081ad
AH
16308 /* Non-standard profiling for kernels, which just saves LR then calls
16309 _mcount without worrying about arg saves. The idea is to change
16310 the function prologue as little as possible as it isn't easy to
16311 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
16312 if (TARGET_PROFILE_KERNEL)
16313 return;
16314
8480e480
CC
16315 if (DEFAULT_ABI == ABI_AIX)
16316 {
9739c90c
JJ
16317#ifndef NO_PROFILE_COUNTERS
16318# define NO_PROFILE_COUNTERS 0
16319#endif
f676971a 16320 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
16321 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16322 else
16323 {
16324 char buf[30];
16325 const char *label_name;
16326 rtx fun;
411707f4 16327
9739c90c
JJ
16328 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16329 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16330 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 16331
9739c90c
JJ
16332 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16333 fun, Pmode);
16334 }
8480e480 16335 }
ee890fe2
SS
16336 else if (DEFAULT_ABI == ABI_DARWIN)
16337 {
d5fa86ba 16338 const char *mcount_name = RS6000_MCOUNT;
ee890fe2
SS
16339 int caller_addr_regno = LINK_REGISTER_REGNUM;
16340
16341 /* Be conservative and always set this, at least for now. */
16342 current_function_uses_pic_offset_table = 1;
16343
16344#if TARGET_MACHO
16345 /* For PIC code, set up a stub and collect the caller's address
16346 from r0, which is where the prologue puts it. */
11abc112
MM
16347 if (MACHOPIC_INDIRECT
16348 && current_function_uses_pic_offset_table)
16349 caller_addr_regno = 0;
ee890fe2
SS
16350#endif
16351 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16352 0, VOIDmode, 1,
16353 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16354 }
411707f4
CC
16355}
16356
a4f6c312 16357/* Write function profiler code. */
e165f3f0
RK
16358
16359void
a2369ed3 16360output_function_profiler (FILE *file, int labelno)
e165f3f0 16361{
3daf36a4 16362 char buf[100];
e165f3f0 16363
38c1f2d7 16364 switch (DEFAULT_ABI)
3daf36a4 16365 {
38c1f2d7 16366 default:
37409796 16367 gcc_unreachable ();
38c1f2d7
MM
16368
16369 case ABI_V4:
09eeeacb
AM
16370 if (!TARGET_32BIT)
16371 {
d4ee4d25 16372 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
16373 return;
16374 }
ffcfcb5f 16375 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 16376 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
16377 if (NO_PROFILE_COUNTERS)
16378 {
16379 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16380 reg_names[0], reg_names[1]);
16381 }
16382 else if (TARGET_SECURE_PLT && flag_pic)
16383 {
16384 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16385 reg_names[0], reg_names[1]);
16386 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16387 asm_fprintf (file, "\t{cau|addis} %s,%s,",
16388 reg_names[12], reg_names[12]);
16389 assemble_name (file, buf);
16390 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16391 assemble_name (file, buf);
16392 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16393 }
16394 else if (flag_pic == 1)
38c1f2d7 16395 {
dfdfa60f 16396 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
16397 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16398 reg_names[0], reg_names[1]);
17167fd8 16399 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 16400 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 16401 assemble_name (file, buf);
17167fd8 16402 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 16403 }
9ebbca7d 16404 else if (flag_pic > 1)
38c1f2d7 16405 {
71625f3d
AM
16406 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16407 reg_names[0], reg_names[1]);
9ebbca7d 16408 /* Now, we need to get the address of the label. */
71625f3d 16409 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 16410 assemble_name (file, buf);
9ebbca7d
GK
16411 fputs ("-.\n1:", file);
16412 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 16413 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
16414 reg_names[0], reg_names[11]);
16415 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16416 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 16417 }
38c1f2d7
MM
16418 else
16419 {
17167fd8 16420 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 16421 assemble_name (file, buf);
dfdfa60f 16422 fputs ("@ha\n", file);
71625f3d
AM
16423 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16424 reg_names[0], reg_names[1]);
a260abc9 16425 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 16426 assemble_name (file, buf);
17167fd8 16427 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
16428 }
16429
50d440bc 16430 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
16431 fprintf (file, "\tbl %s%s\n",
16432 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
16433 break;
16434
16435 case ABI_AIX:
ee890fe2 16436 case ABI_DARWIN:
ffcfcb5f
AM
16437 if (!TARGET_PROFILE_KERNEL)
16438 {
a3c9585f 16439 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
16440 }
16441 else
16442 {
37409796 16443 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
16444
16445 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16446 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16447
6de9cd9a 16448 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
16449 {
16450 asm_fprintf (file, "\tstd %s,24(%s)\n",
16451 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16452 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16453 asm_fprintf (file, "\tld %s,24(%s)\n",
16454 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16455 }
16456 else
16457 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16458 }
38c1f2d7
MM
16459 break;
16460 }
e165f3f0 16461}
a251ffd0 16462
b54cf83a 16463\f
b54cf83a
DE
16464/* Power4 load update and store update instructions are cracked into a
16465 load or store and an integer insn which are executed in the same cycle.
16466 Branches have their own dispatch slot which does not count against the
16467 GCC issue rate, but it changes the program flow so there are no other
16468 instructions to issue in this cycle. */
16469
16470static int
f676971a
EC
16471rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16472 int verbose ATTRIBUTE_UNUSED,
a2369ed3 16473 rtx insn, int more)
b54cf83a
DE
16474{
16475 if (GET_CODE (PATTERN (insn)) == USE
16476 || GET_CODE (PATTERN (insn)) == CLOBBER)
16477 return more;
16478
ec507f2d 16479 if (rs6000_sched_groups)
b54cf83a 16480 {
cbe26ab8 16481 if (is_microcoded_insn (insn))
c4ad648e 16482 return 0;
cbe26ab8 16483 else if (is_cracked_insn (insn))
c4ad648e 16484 return more > 2 ? more - 2 : 0;
b54cf83a 16485 }
165b263e
DE
16486
16487 return more - 1;
b54cf83a
DE
16488}
16489
a251ffd0
TG
16490/* Adjust the cost of a scheduling dependency. Return the new cost of
16491 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16492
c237e94a 16493static int
0a4f0294 16494rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0
TG
16495{
16496 if (! recog_memoized (insn))
16497 return 0;
16498
16499 if (REG_NOTE_KIND (link) != 0)
16500 return 0;
16501
16502 if (REG_NOTE_KIND (link) == 0)
16503 {
ed947a96
DJ
16504 /* Data dependency; DEP_INSN writes a register that INSN reads
16505 some cycles later. */
c9dbf840
DE
16506
16507 /* Separate a load from a narrower, dependent store. */
16508 if (rs6000_sched_groups
16509 && GET_CODE (PATTERN (insn)) == SET
16510 && GET_CODE (PATTERN (dep_insn)) == SET
16511 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16512 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16513 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16514 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16515 return cost + 14;
16516
ed947a96
DJ
16517 switch (get_attr_type (insn))
16518 {
16519 case TYPE_JMPREG:
309323c2 16520 /* Tell the first scheduling pass about the latency between
ed947a96
DJ
16521 a mtctr and bctr (and mtlr and br/blr). The first
16522 scheduling pass will not know about this latency since
16523 the mtctr instruction, which has the latency associated
16524 to it, will be generated by reload. */
309323c2 16525 return TARGET_POWER ? 5 : 4;
ed947a96
DJ
16526 case TYPE_BRANCH:
16527 /* Leave some extra cycles between a compare and its
16528 dependent branch, to inhibit expensive mispredicts. */
309323c2
DE
16529 if ((rs6000_cpu_attr == CPU_PPC603
16530 || rs6000_cpu_attr == CPU_PPC604
16531 || rs6000_cpu_attr == CPU_PPC604E
16532 || rs6000_cpu_attr == CPU_PPC620
16533 || rs6000_cpu_attr == CPU_PPC630
16534 || rs6000_cpu_attr == CPU_PPC750
16535 || rs6000_cpu_attr == CPU_PPC7400
16536 || rs6000_cpu_attr == CPU_PPC7450
ec507f2d
DE
16537 || rs6000_cpu_attr == CPU_POWER4
16538 || rs6000_cpu_attr == CPU_POWER5)
ed947a96
DJ
16539 && recog_memoized (dep_insn)
16540 && (INSN_CODE (dep_insn) >= 0)
b54cf83a
DE
16541 && (get_attr_type (dep_insn) == TYPE_CMP
16542 || get_attr_type (dep_insn) == TYPE_COMPARE
ed947a96 16543 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
9259f3b0
DE
16544 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16545 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
ed947a96 16546 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
b54cf83a
DE
16547 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16548 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
ed947a96
DJ
16549 return cost + 2;
16550 default:
16551 break;
16552 }
a251ffd0
TG
16553 /* Fall out to return default cost. */
16554 }
16555
16556 return cost;
16557}
b6c9286a 16558
cbe26ab8 16559/* The function returns a true if INSN is microcoded.
839a4992 16560 Return false otherwise. */
cbe26ab8
DN
16561
16562static bool
16563is_microcoded_insn (rtx insn)
16564{
16565 if (!insn || !INSN_P (insn)
16566 || GET_CODE (PATTERN (insn)) == USE
16567 || GET_CODE (PATTERN (insn)) == CLOBBER)
16568 return false;
16569
ec507f2d 16570 if (rs6000_sched_groups)
cbe26ab8
DN
16571 {
16572 enum attr_type type = get_attr_type (insn);
16573 if (type == TYPE_LOAD_EXT_U
16574 || type == TYPE_LOAD_EXT_UX
16575 || type == TYPE_LOAD_UX
16576 || type == TYPE_STORE_UX
16577 || type == TYPE_MFCR)
c4ad648e 16578 return true;
cbe26ab8
DN
16579 }
16580
16581 return false;
16582}
16583
5c425df5 16584/* The function returns a nonzero value if INSN can be scheduled only
cbe26ab8
DN
16585 as the first insn in a dispatch group ("dispatch-slot restricted").
16586 In this case, the returned value indicates how many dispatch slots
16587 the insn occupies (at the beginning of the group).
79ae11c4
DN
16588 Return 0 otherwise. */
16589
cbe26ab8 16590static int
79ae11c4
DN
16591is_dispatch_slot_restricted (rtx insn)
16592{
16593 enum attr_type type;
16594
ec507f2d 16595 if (!rs6000_sched_groups)
79ae11c4
DN
16596 return 0;
16597
16598 if (!insn
16599 || insn == NULL_RTX
16600 || GET_CODE (insn) == NOTE
16601 || GET_CODE (PATTERN (insn)) == USE
16602 || GET_CODE (PATTERN (insn)) == CLOBBER)
16603 return 0;
16604
16605 type = get_attr_type (insn);
16606
ec507f2d
DE
16607 switch (type)
16608 {
16609 case TYPE_MFCR:
16610 case TYPE_MFCRF:
16611 case TYPE_MTCR:
16612 case TYPE_DELAYED_CR:
16613 case TYPE_CR_LOGICAL:
16614 case TYPE_MTJMPR:
16615 case TYPE_MFJMPR:
16616 return 1;
16617 case TYPE_IDIV:
16618 case TYPE_LDIV:
16619 return 2;
b52110d4
DE
16620 case TYPE_LOAD_L:
16621 case TYPE_STORE_C:
16622 case TYPE_ISYNC:
16623 case TYPE_SYNC:
16624 return 4;
ec507f2d
DE
16625 default:
16626 if (rs6000_cpu == PROCESSOR_POWER5
16627 && is_cracked_insn (insn))
16628 return 2;
16629 return 0;
16630 }
79ae11c4
DN
16631}
16632
cbe26ab8
DN
16633/* The function returns true if INSN is cracked into 2 instructions
16634 by the processor (and therefore occupies 2 issue slots). */
16635
16636static bool
16637is_cracked_insn (rtx insn)
16638{
16639 if (!insn || !INSN_P (insn)
16640 || GET_CODE (PATTERN (insn)) == USE
16641 || GET_CODE (PATTERN (insn)) == CLOBBER)
16642 return false;
16643
ec507f2d 16644 if (rs6000_sched_groups)
cbe26ab8
DN
16645 {
16646 enum attr_type type = get_attr_type (insn);
16647 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
16648 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16649 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16650 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16651 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16652 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16653 || type == TYPE_IDIV || type == TYPE_LDIV
16654 || type == TYPE_INSERT_WORD)
16655 return true;
cbe26ab8
DN
16656 }
16657
16658 return false;
16659}
16660
16661/* The function returns true if INSN can be issued only from
a3c9585f 16662 the branch slot. */
cbe26ab8
DN
16663
16664static bool
16665is_branch_slot_insn (rtx insn)
16666{
16667 if (!insn || !INSN_P (insn)
16668 || GET_CODE (PATTERN (insn)) == USE
16669 || GET_CODE (PATTERN (insn)) == CLOBBER)
16670 return false;
16671
ec507f2d 16672 if (rs6000_sched_groups)
cbe26ab8
DN
16673 {
16674 enum attr_type type = get_attr_type (insn);
16675 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 16676 return true;
cbe26ab8
DN
16677 return false;
16678 }
16679
16680 return false;
16681}
79ae11c4 16682
a4f6c312 16683/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
16684 priority INSN_PRIORITY (INSN). Increase the priority to execute the
16685 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
16686 define this macro if you do not need to adjust the scheduling
16687 priorities of insns. */
bef84347 16688
c237e94a 16689static int
a2369ed3 16690rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 16691{
a4f6c312
SS
16692 /* On machines (like the 750) which have asymmetric integer units,
16693 where one integer unit can do multiply and divides and the other
16694 can't, reduce the priority of multiply/divide so it is scheduled
16695 before other integer operations. */
bef84347
VM
16696
16697#if 0
2c3c49de 16698 if (! INSN_P (insn))
bef84347
VM
16699 return priority;
16700
16701 if (GET_CODE (PATTERN (insn)) == USE)
16702 return priority;
16703
16704 switch (rs6000_cpu_attr) {
16705 case CPU_PPC750:
16706 switch (get_attr_type (insn))
16707 {
16708 default:
16709 break;
16710
16711 case TYPE_IMUL:
16712 case TYPE_IDIV:
3cb999d8
DE
16713 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16714 priority, priority);
bef84347
VM
16715 if (priority >= 0 && priority < 0x01000000)
16716 priority >>= 3;
16717 break;
16718 }
16719 }
16720#endif
16721
79ae11c4
DN
16722 if (is_dispatch_slot_restricted (insn)
16723 && reload_completed
f676971a 16724 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
16725 && rs6000_sched_restricted_insns_priority)
16726 {
16727
c4ad648e
AM
16728 /* Prioritize insns that can be dispatched only in the first
16729 dispatch slot. */
79ae11c4 16730 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
16731 /* Attach highest priority to insn. This means that in
16732 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 16733 precede 'priority' (critical path) considerations. */
f676971a 16734 return current_sched_info->sched_max_insns_priority;
79ae11c4 16735 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 16736 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
16737 haifa-sched.c:ready_sort(), only 'priority' (critical path)
16738 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
16739 return (priority + 1);
16740 }
79ae11c4 16741
bef84347
VM
16742 return priority;
16743}
16744
a4f6c312
SS
16745/* Return how many instructions the machine can issue per cycle. */
16746
c237e94a 16747static int
863d938c 16748rs6000_issue_rate (void)
b6c9286a 16749{
3317bab1
DE
16750 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
16751 if (!reload_completed)
16752 return 1;
16753
b6c9286a 16754 switch (rs6000_cpu_attr) {
3cb999d8
DE
16755 case CPU_RIOS1: /* ? */
16756 case CPU_RS64A:
16757 case CPU_PPC601: /* ? */
ed947a96 16758 case CPU_PPC7450:
3cb999d8 16759 return 3;
b54cf83a 16760 case CPU_PPC440:
b6c9286a 16761 case CPU_PPC603:
bef84347 16762 case CPU_PPC750:
ed947a96 16763 case CPU_PPC7400:
be12c2b0 16764 case CPU_PPC8540:
f676971a 16765 return 2;
3cb999d8 16766 case CPU_RIOS2:
b6c9286a 16767 case CPU_PPC604:
19684119 16768 case CPU_PPC604E:
b6c9286a 16769 case CPU_PPC620:
3cb999d8 16770 case CPU_PPC630:
b6c9286a 16771 return 4;
cbe26ab8 16772 case CPU_POWER4:
ec507f2d 16773 case CPU_POWER5:
cbe26ab8 16774 return 5;
b6c9286a
MM
16775 default:
16776 return 1;
16777 }
16778}
16779
be12c2b0
VM
16780/* Return how many instructions to look ahead for better insn
16781 scheduling. */
16782
16783static int
863d938c 16784rs6000_use_sched_lookahead (void)
be12c2b0
VM
16785{
16786 if (rs6000_cpu_attr == CPU_PPC8540)
16787 return 4;
16788 return 0;
16789}
16790
569fa502
DN
16791/* Determine is PAT refers to memory. */
16792
16793static bool
16794is_mem_ref (rtx pat)
16795{
16796 const char * fmt;
16797 int i, j;
16798 bool ret = false;
16799
16800 if (GET_CODE (pat) == MEM)
16801 return true;
16802
16803 /* Recursively process the pattern. */
16804 fmt = GET_RTX_FORMAT (GET_CODE (pat));
16805
16806 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16807 {
16808 if (fmt[i] == 'e')
16809 ret |= is_mem_ref (XEXP (pat, i));
16810 else if (fmt[i] == 'E')
16811 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16812 ret |= is_mem_ref (XVECEXP (pat, i, j));
16813 }
16814
16815 return ret;
16816}
16817
16818/* Determine if PAT is a PATTERN of a load insn. */
f676971a 16819
569fa502
DN
16820static bool
16821is_load_insn1 (rtx pat)
16822{
16823 if (!pat || pat == NULL_RTX)
16824 return false;
16825
16826 if (GET_CODE (pat) == SET)
16827 return is_mem_ref (SET_SRC (pat));
16828
16829 if (GET_CODE (pat) == PARALLEL)
16830 {
16831 int i;
16832
16833 for (i = 0; i < XVECLEN (pat, 0); i++)
16834 if (is_load_insn1 (XVECEXP (pat, 0, i)))
16835 return true;
16836 }
16837
16838 return false;
16839}
16840
16841/* Determine if INSN loads from memory. */
16842
16843static bool
16844is_load_insn (rtx insn)
16845{
16846 if (!insn || !INSN_P (insn))
16847 return false;
16848
16849 if (GET_CODE (insn) == CALL_INSN)
16850 return false;
16851
16852 return is_load_insn1 (PATTERN (insn));
16853}
16854
16855/* Determine if PAT is a PATTERN of a store insn. */
16856
16857static bool
16858is_store_insn1 (rtx pat)
16859{
16860 if (!pat || pat == NULL_RTX)
16861 return false;
16862
16863 if (GET_CODE (pat) == SET)
16864 return is_mem_ref (SET_DEST (pat));
16865
16866 if (GET_CODE (pat) == PARALLEL)
16867 {
16868 int i;
16869
16870 for (i = 0; i < XVECLEN (pat, 0); i++)
16871 if (is_store_insn1 (XVECEXP (pat, 0, i)))
16872 return true;
16873 }
16874
16875 return false;
16876}
16877
16878/* Determine if INSN stores to memory. */
16879
16880static bool
16881is_store_insn (rtx insn)
16882{
16883 if (!insn || !INSN_P (insn))
16884 return false;
16885
16886 return is_store_insn1 (PATTERN (insn));
16887}
16888
16889/* Returns whether the dependence between INSN and NEXT is considered
16890 costly by the given target. */
16891
16892static bool
c4ad648e
AM
16893rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16894 int distance)
f676971a 16895{
aabcd309 16896 /* If the flag is not enabled - no dependence is considered costly;
f676971a 16897 allow all dependent insns in the same group.
569fa502
DN
16898 This is the most aggressive option. */
16899 if (rs6000_sched_costly_dep == no_dep_costly)
16900 return false;
16901
f676971a 16902 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
16903 do not allow dependent instructions in the same group.
16904 This is the most conservative option. */
16905 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 16906 return true;
569fa502 16907
f676971a
EC
16908 if (rs6000_sched_costly_dep == store_to_load_dep_costly
16909 && is_load_insn (next)
569fa502
DN
16910 && is_store_insn (insn))
16911 /* Prevent load after store in the same group. */
16912 return true;
16913
16914 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 16915 && is_load_insn (next)
569fa502
DN
16916 && is_store_insn (insn)
16917 && (!link || (int) REG_NOTE_KIND (link) == 0))
c4ad648e
AM
16918 /* Prevent load after store in the same group if it is a true
16919 dependence. */
569fa502 16920 return true;
f676971a
EC
16921
16922 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
16923 and will not be scheduled in the same group. */
16924 if (rs6000_sched_costly_dep <= max_dep_latency
16925 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
16926 return true;
16927
16928 return false;
16929}
16930
f676971a 16931/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
16932 skipping any "non-active" insns - insns that will not actually occupy
16933 an issue slot. Return NULL_RTX if such an insn is not found. */
16934
16935static rtx
16936get_next_active_insn (rtx insn, rtx tail)
16937{
f489aff8 16938 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
16939 return NULL_RTX;
16940
f489aff8 16941 while (1)
cbe26ab8 16942 {
f489aff8
AM
16943 insn = NEXT_INSN (insn);
16944 if (insn == NULL_RTX || insn == tail)
16945 return NULL_RTX;
cbe26ab8 16946
f489aff8
AM
16947 if (CALL_P (insn)
16948 || JUMP_P (insn)
16949 || (NONJUMP_INSN_P (insn)
16950 && GET_CODE (PATTERN (insn)) != USE
16951 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 16952 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
16953 break;
16954 }
16955 return insn;
cbe26ab8
DN
16956}
16957
839a4992 16958/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
16959 of group WHICH_GROUP.
16960
16961 If WHICH_GROUP == current_group, this function will return true if INSN
16962 causes the termination of the current group (i.e, the dispatch group to
16963 which INSN belongs). This means that INSN will be the last insn in the
16964 group it belongs to.
16965
16966 If WHICH_GROUP == previous_group, this function will return true if INSN
16967 causes the termination of the previous group (i.e, the dispatch group that
16968 precedes the group to which INSN belongs). This means that INSN will be
16969 the first insn in the group it belongs to). */
16970
16971static bool
16972insn_terminates_group_p (rtx insn, enum group_termination which_group)
16973{
16974 enum attr_type type;
16975
16976 if (! insn)
16977 return false;
569fa502 16978
cbe26ab8
DN
16979 type = get_attr_type (insn);
16980
16981 if (is_microcoded_insn (insn))
16982 return true;
16983
16984 if (which_group == current_group)
16985 {
16986 if (is_branch_slot_insn (insn))
c4ad648e 16987 return true;
cbe26ab8
DN
16988 return false;
16989 }
16990 else if (which_group == previous_group)
16991 {
16992 if (is_dispatch_slot_restricted (insn))
c4ad648e 16993 return true;
cbe26ab8
DN
16994 return false;
16995 }
16996
16997 return false;
16998}
16999
839a4992 17000/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
17001 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
17002
17003static bool
17004is_costly_group (rtx *group_insns, rtx next_insn)
17005{
17006 int i;
17007 rtx link;
17008 int cost;
17009 int issue_rate = rs6000_issue_rate ();
17010
17011 for (i = 0; i < issue_rate; i++)
17012 {
17013 rtx insn = group_insns[i];
17014 if (!insn)
c4ad648e 17015 continue;
cbe26ab8 17016 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
c4ad648e
AM
17017 {
17018 rtx next = XEXP (link, 0);
17019 if (next == next_insn)
17020 {
17021 cost = insn_cost (insn, link, next_insn);
17022 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
17023 return true;
17024 }
17025 }
cbe26ab8
DN
17026 }
17027
17028 return false;
17029}
17030
f676971a 17031/* Utility of the function redefine_groups.
cbe26ab8
DN
17032 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
17033 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
17034 to keep it "far" (in a separate group) from GROUP_INSNS, following
17035 one of the following schemes, depending on the value of the flag
17036 -minsert_sched_nops = X:
17037 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 17038 in order to force NEXT_INSN into a separate group.
f676971a
EC
17039 (2) X < sched_finish_regroup_exact: insert exactly X nops.
17040 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
17041 insertion (has a group just ended, how many vacant issue slots remain in the
17042 last group, and how many dispatch groups were encountered so far). */
17043
f676971a 17044static int
c4ad648e
AM
17045force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
17046 rtx next_insn, bool *group_end, int can_issue_more,
17047 int *group_count)
cbe26ab8
DN
17048{
17049 rtx nop;
17050 bool force;
17051 int issue_rate = rs6000_issue_rate ();
17052 bool end = *group_end;
17053 int i;
17054
17055 if (next_insn == NULL_RTX)
17056 return can_issue_more;
17057
17058 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
17059 return can_issue_more;
17060
17061 force = is_costly_group (group_insns, next_insn);
17062 if (!force)
17063 return can_issue_more;
17064
17065 if (sched_verbose > 6)
17066 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 17067 *group_count ,can_issue_more);
cbe26ab8
DN
17068
17069 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
17070 {
17071 if (*group_end)
c4ad648e 17072 can_issue_more = 0;
cbe26ab8
DN
17073
17074 /* Since only a branch can be issued in the last issue_slot, it is
17075 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
17076 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
17077 in this case the last nop will start a new group and the branch
17078 will be forced to the new group. */
cbe26ab8 17079 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 17080 can_issue_more--;
cbe26ab8
DN
17081
17082 while (can_issue_more > 0)
c4ad648e 17083 {
9390387d 17084 nop = gen_nop ();
c4ad648e
AM
17085 emit_insn_before (nop, next_insn);
17086 can_issue_more--;
17087 }
cbe26ab8
DN
17088
17089 *group_end = true;
17090 return 0;
f676971a 17091 }
cbe26ab8
DN
17092
17093 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
17094 {
17095 int n_nops = rs6000_sched_insert_nops;
17096
f676971a 17097 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 17098 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 17099 if (can_issue_more == 0)
c4ad648e 17100 can_issue_more = issue_rate;
cbe26ab8
DN
17101 can_issue_more--;
17102 if (can_issue_more == 0)
c4ad648e
AM
17103 {
17104 can_issue_more = issue_rate - 1;
17105 (*group_count)++;
17106 end = true;
17107 for (i = 0; i < issue_rate; i++)
17108 {
17109 group_insns[i] = 0;
17110 }
17111 }
cbe26ab8
DN
17112
17113 while (n_nops > 0)
c4ad648e
AM
17114 {
17115 nop = gen_nop ();
17116 emit_insn_before (nop, next_insn);
17117 if (can_issue_more == issue_rate - 1) /* new group begins */
17118 end = false;
17119 can_issue_more--;
17120 if (can_issue_more == 0)
17121 {
17122 can_issue_more = issue_rate - 1;
17123 (*group_count)++;
17124 end = true;
17125 for (i = 0; i < issue_rate; i++)
17126 {
17127 group_insns[i] = 0;
17128 }
17129 }
17130 n_nops--;
17131 }
cbe26ab8
DN
17132
17133 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 17134 can_issue_more++;
cbe26ab8 17135
c4ad648e
AM
17136 /* Is next_insn going to start a new group? */
17137 *group_end
17138 = (end
cbe26ab8
DN
17139 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17140 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17141 || (can_issue_more < issue_rate &&
c4ad648e 17142 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 17143 if (*group_end && end)
c4ad648e 17144 (*group_count)--;
cbe26ab8
DN
17145
17146 if (sched_verbose > 6)
c4ad648e
AM
17147 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
17148 *group_count, can_issue_more);
f676971a
EC
17149 return can_issue_more;
17150 }
cbe26ab8
DN
17151
17152 return can_issue_more;
17153}
17154
17155/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 17156 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
17157 form in practice. It tries to achieve this synchronization by forcing the
17158 estimated processor grouping on the compiler (as opposed to the function
17159 'pad_goups' which tries to force the scheduler's grouping on the processor).
17160
17161 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
17162 examines the (estimated) dispatch groups that will be formed by the processor
17163 dispatcher. It marks these group boundaries to reflect the estimated
17164 processor grouping, overriding the grouping that the scheduler had marked.
17165 Depending on the value of the flag '-minsert-sched-nops' this function can
17166 force certain insns into separate groups or force a certain distance between
17167 them by inserting nops, for example, if there exists a "costly dependence"
17168 between the insns.
17169
17170 The function estimates the group boundaries that the processor will form as
0fa2e4df 17171 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
17172 each insn. A subsequent insn will start a new group if one of the following
17173 4 cases applies:
17174 - no more vacant issue slots remain in the current dispatch group.
17175 - only the last issue slot, which is the branch slot, is vacant, but the next
17176 insn is not a branch.
17177 - only the last 2 or less issue slots, including the branch slot, are vacant,
17178 which means that a cracked insn (which occupies two issue slots) can't be
17179 issued in this group.
f676971a 17180 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
17181 start a new group. */
17182
17183static int
17184redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17185{
17186 rtx insn, next_insn;
17187 int issue_rate;
17188 int can_issue_more;
17189 int slot, i;
17190 bool group_end;
17191 int group_count = 0;
17192 rtx *group_insns;
17193
17194 /* Initialize. */
17195 issue_rate = rs6000_issue_rate ();
17196 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 17197 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
17198 {
17199 group_insns[i] = 0;
17200 }
17201 can_issue_more = issue_rate;
17202 slot = 0;
17203 insn = get_next_active_insn (prev_head_insn, tail);
17204 group_end = false;
17205
17206 while (insn != NULL_RTX)
17207 {
17208 slot = (issue_rate - can_issue_more);
17209 group_insns[slot] = insn;
17210 can_issue_more =
c4ad648e 17211 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 17212 if (insn_terminates_group_p (insn, current_group))
c4ad648e 17213 can_issue_more = 0;
cbe26ab8
DN
17214
17215 next_insn = get_next_active_insn (insn, tail);
17216 if (next_insn == NULL_RTX)
c4ad648e 17217 return group_count + 1;
cbe26ab8 17218
c4ad648e
AM
17219 /* Is next_insn going to start a new group? */
17220 group_end
17221 = (can_issue_more == 0
17222 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
17223 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
17224 || (can_issue_more < issue_rate &&
17225 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 17226
f676971a 17227 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
17228 next_insn, &group_end, can_issue_more,
17229 &group_count);
cbe26ab8
DN
17230
17231 if (group_end)
c4ad648e
AM
17232 {
17233 group_count++;
17234 can_issue_more = 0;
17235 for (i = 0; i < issue_rate; i++)
17236 {
17237 group_insns[i] = 0;
17238 }
17239 }
cbe26ab8
DN
17240
17241 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 17242 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 17243 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 17244 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
17245
17246 insn = next_insn;
17247 if (can_issue_more == 0)
c4ad648e
AM
17248 can_issue_more = issue_rate;
17249 } /* while */
cbe26ab8
DN
17250
17251 return group_count;
17252}
17253
17254/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
17255 dispatch group boundaries that the scheduler had marked. Pad with nops
17256 any dispatch groups which have vacant issue slots, in order to force the
17257 scheduler's grouping on the processor dispatcher. The function
17258 returns the number of dispatch groups found. */
17259
17260static int
17261pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17262{
17263 rtx insn, next_insn;
17264 rtx nop;
17265 int issue_rate;
17266 int can_issue_more;
17267 int group_end;
17268 int group_count = 0;
17269
17270 /* Initialize issue_rate. */
17271 issue_rate = rs6000_issue_rate ();
17272 can_issue_more = issue_rate;
17273
17274 insn = get_next_active_insn (prev_head_insn, tail);
17275 next_insn = get_next_active_insn (insn, tail);
17276
17277 while (insn != NULL_RTX)
17278 {
17279 can_issue_more =
17280 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17281
17282 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
17283
17284 if (next_insn == NULL_RTX)
c4ad648e 17285 break;
cbe26ab8
DN
17286
17287 if (group_end)
c4ad648e
AM
17288 {
17289 /* If the scheduler had marked group termination at this location
17290 (between insn and next_indn), and neither insn nor next_insn will
17291 force group termination, pad the group with nops to force group
17292 termination. */
17293 if (can_issue_more
17294 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17295 && !insn_terminates_group_p (insn, current_group)
17296 && !insn_terminates_group_p (next_insn, previous_group))
17297 {
9390387d 17298 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
17299 can_issue_more--;
17300
17301 while (can_issue_more)
17302 {
17303 nop = gen_nop ();
17304 emit_insn_before (nop, next_insn);
17305 can_issue_more--;
17306 }
17307 }
17308
17309 can_issue_more = issue_rate;
17310 group_count++;
17311 }
cbe26ab8
DN
17312
17313 insn = next_insn;
17314 next_insn = get_next_active_insn (insn, tail);
17315 }
17316
17317 return group_count;
17318}
17319
17320/* The following function is called at the end of scheduling BB.
17321 After reload, it inserts nops at insn group bundling. */
17322
17323static void
38f391a5 17324rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
17325{
17326 int n_groups;
17327
17328 if (sched_verbose)
17329 fprintf (dump, "=== Finishing schedule.\n");
17330
ec507f2d 17331 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
17332 {
17333 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 17334 return;
cbe26ab8
DN
17335
17336 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
17337 n_groups = pad_groups (dump, sched_verbose,
17338 current_sched_info->prev_head,
17339 current_sched_info->next_tail);
cbe26ab8 17340 else
c4ad648e
AM
17341 n_groups = redefine_groups (dump, sched_verbose,
17342 current_sched_info->prev_head,
17343 current_sched_info->next_tail);
cbe26ab8
DN
17344
17345 if (sched_verbose >= 6)
17346 {
17347 fprintf (dump, "ngroups = %d\n", n_groups);
17348 print_rtl (dump, current_sched_info->prev_head);
17349 fprintf (dump, "Done finish_sched\n");
17350 }
17351 }
17352}
b6c9286a 17353\f
b6c9286a
MM
17354/* Length in units of the trampoline for entering a nested function. */
17355
17356int
863d938c 17357rs6000_trampoline_size (void)
b6c9286a
MM
17358{
17359 int ret = 0;
17360
17361 switch (DEFAULT_ABI)
17362 {
17363 default:
37409796 17364 gcc_unreachable ();
b6c9286a
MM
17365
17366 case ABI_AIX:
8f802bfb 17367 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
17368 break;
17369
4dabc42d 17370 case ABI_DARWIN:
b6c9286a 17371 case ABI_V4:
03a7e1a5 17372 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 17373 break;
b6c9286a
MM
17374 }
17375
17376 return ret;
17377}
17378
17379/* Emit RTL insns to initialize the variable parts of a trampoline.
17380 FNADDR is an RTX for the address of the function's pure code.
17381 CXT is an RTX for the static chain value for the function. */
17382
17383void
a2369ed3 17384rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 17385{
8bd04c56 17386 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 17387 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
17388
17389 switch (DEFAULT_ABI)
17390 {
17391 default:
37409796 17392 gcc_unreachable ();
b6c9286a 17393
8bd04c56 17394/* Macros to shorten the code expansions below. */
9613eaff 17395#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 17396#define MEM_PLUS(addr,offset) \
9613eaff 17397 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 17398
b6c9286a
MM
17399 /* Under AIX, just build the 3 word function descriptor */
17400 case ABI_AIX:
8bd04c56 17401 {
9613eaff
SH
17402 rtx fn_reg = gen_reg_rtx (Pmode);
17403 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 17404 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 17405 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
17406 emit_move_insn (MEM_DEREF (addr), fn_reg);
17407 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17408 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17409 }
b6c9286a
MM
17410 break;
17411
4dabc42d
TC
17412 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
17413 case ABI_DARWIN:
b6c9286a 17414 case ABI_V4:
9613eaff 17415 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 17416 FALSE, VOIDmode, 4,
9613eaff 17417 addr, Pmode,
eaf1bcf1 17418 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
17419 fnaddr, Pmode,
17420 ctx_reg, Pmode);
b6c9286a 17421 break;
b6c9286a
MM
17422 }
17423
17424 return;
17425}
7509c759
MM
17426
17427\f
91d231cb 17428/* Table of valid machine attributes. */
a4f6c312 17429
91d231cb 17430const struct attribute_spec rs6000_attribute_table[] =
7509c759 17431{
91d231cb 17432 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 17433 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
17434 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17435 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
17436 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
17437 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
17438#ifdef SUBTARGET_ATTRIBUTE_TABLE
17439 SUBTARGET_ATTRIBUTE_TABLE,
17440#endif
a5c76ee6 17441 { NULL, 0, 0, false, false, false, NULL }
91d231cb 17442};
7509c759 17443
8bb418a3
ZL
17444/* Handle the "altivec" attribute. The attribute may have
17445 arguments as follows:
f676971a 17446
8bb418a3
ZL
17447 __attribute__((altivec(vector__)))
17448 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
17449 __attribute__((altivec(bool__))) (always followed by 'unsigned')
17450
17451 and may appear more than once (e.g., 'vector bool char') in a
17452 given declaration. */
17453
17454static tree
f90ac3f0
UP
17455rs6000_handle_altivec_attribute (tree *node,
17456 tree name ATTRIBUTE_UNUSED,
17457 tree args,
8bb418a3
ZL
17458 int flags ATTRIBUTE_UNUSED,
17459 bool *no_add_attrs)
17460{
17461 tree type = *node, result = NULL_TREE;
17462 enum machine_mode mode;
17463 int unsigned_p;
17464 char altivec_type
17465 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17466 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17467 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 17468 : '?');
8bb418a3
ZL
17469
17470 while (POINTER_TYPE_P (type)
17471 || TREE_CODE (type) == FUNCTION_TYPE
17472 || TREE_CODE (type) == METHOD_TYPE
17473 || TREE_CODE (type) == ARRAY_TYPE)
17474 type = TREE_TYPE (type);
17475
17476 mode = TYPE_MODE (type);
17477
f90ac3f0
UP
17478 /* Check for invalid AltiVec type qualifiers. */
17479 if (type == long_unsigned_type_node || type == long_integer_type_node)
17480 {
17481 if (TARGET_64BIT)
17482 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17483 else if (rs6000_warn_altivec_long)
d4ee4d25 17484 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
17485 }
17486 else if (type == long_long_unsigned_type_node
17487 || type == long_long_integer_type_node)
17488 error ("use of %<long long%> in AltiVec types is invalid");
17489 else if (type == double_type_node)
17490 error ("use of %<double%> in AltiVec types is invalid");
17491 else if (type == long_double_type_node)
17492 error ("use of %<long double%> in AltiVec types is invalid");
17493 else if (type == boolean_type_node)
17494 error ("use of boolean types in AltiVec types is invalid");
17495 else if (TREE_CODE (type) == COMPLEX_TYPE)
17496 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
17497 else if (DECIMAL_FLOAT_MODE_P (mode))
17498 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
17499
17500 switch (altivec_type)
17501 {
17502 case 'v':
8df83eae 17503 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
17504 switch (mode)
17505 {
c4ad648e
AM
17506 case SImode:
17507 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17508 break;
17509 case HImode:
17510 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17511 break;
17512 case QImode:
17513 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17514 break;
17515 case SFmode: result = V4SF_type_node; break;
17516 /* If the user says 'vector int bool', we may be handed the 'bool'
17517 attribute _before_ the 'vector' attribute, and so select the
17518 proper type in the 'b' case below. */
17519 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17520 result = type;
17521 default: break;
8bb418a3
ZL
17522 }
17523 break;
17524 case 'b':
17525 switch (mode)
17526 {
c4ad648e
AM
17527 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17528 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17529 case QImode: case V16QImode: result = bool_V16QI_type_node;
17530 default: break;
8bb418a3
ZL
17531 }
17532 break;
17533 case 'p':
17534 switch (mode)
17535 {
c4ad648e
AM
17536 case V8HImode: result = pixel_V8HI_type_node;
17537 default: break;
8bb418a3
ZL
17538 }
17539 default: break;
17540 }
17541
7958a2a6
FJ
17542 if (result && result != type && TYPE_READONLY (type))
17543 result = build_qualified_type (result, TYPE_QUAL_CONST);
17544
8bb418a3
ZL
17545 *no_add_attrs = true; /* No need to hang on to the attribute. */
17546
f90ac3f0 17547 if (result)
8bb418a3
ZL
17548 *node = reconstruct_complex_type (*node, result);
17549
17550 return NULL_TREE;
17551}
17552
f18eca82
ZL
17553/* AltiVec defines four built-in scalar types that serve as vector
17554 elements; we must teach the compiler how to mangle them. */
17555
17556static const char *
17557rs6000_mangle_fundamental_type (tree type)
17558{
17559 if (type == bool_char_type_node) return "U6__boolc";
17560 if (type == bool_short_type_node) return "U6__bools";
17561 if (type == pixel_type_node) return "u7__pixel";
17562 if (type == bool_int_type_node) return "U6__booli";
17563
337bde91
DE
17564 /* Mangle IBM extended float long double as `g' (__float128) on
17565 powerpc*-linux where long-double-64 previously was the default. */
17566 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
17567 && TARGET_ELF
17568 && TARGET_LONG_DOUBLE_128
17569 && !TARGET_IEEEQUAD)
17570 return "g";
17571
f18eca82
ZL
17572 /* For all other types, use normal C++ mangling. */
17573 return NULL;
17574}
17575
a5c76ee6
ZW
17576/* Handle a "longcall" or "shortcall" attribute; arguments as in
17577 struct attribute_spec.handler. */
a4f6c312 17578
91d231cb 17579static tree
f676971a
EC
17580rs6000_handle_longcall_attribute (tree *node, tree name,
17581 tree args ATTRIBUTE_UNUSED,
17582 int flags ATTRIBUTE_UNUSED,
a2369ed3 17583 bool *no_add_attrs)
91d231cb
JM
17584{
17585 if (TREE_CODE (*node) != FUNCTION_TYPE
17586 && TREE_CODE (*node) != FIELD_DECL
17587 && TREE_CODE (*node) != TYPE_DECL)
17588 {
5c498b10 17589 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
17590 IDENTIFIER_POINTER (name));
17591 *no_add_attrs = true;
17592 }
6a4cee5f 17593
91d231cb 17594 return NULL_TREE;
7509c759
MM
17595}
17596
a5c76ee6
ZW
17597/* Set longcall attributes on all functions declared when
17598 rs6000_default_long_calls is true. */
17599static void
a2369ed3 17600rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
17601{
17602 if (rs6000_default_long_calls
17603 && (TREE_CODE (type) == FUNCTION_TYPE
17604 || TREE_CODE (type) == METHOD_TYPE))
17605 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17606 NULL_TREE,
17607 TYPE_ATTRIBUTES (type));
16d6f994
EC
17608
17609#if TARGET_MACHO
17610 darwin_set_default_type_attributes (type);
17611#endif
a5c76ee6
ZW
17612}
17613
3cb999d8
DE
17614/* Return a reference suitable for calling a function with the
17615 longcall attribute. */
a4f6c312 17616
9390387d 17617rtx
a2369ed3 17618rs6000_longcall_ref (rtx call_ref)
6a4cee5f 17619{
d330fd93 17620 const char *call_name;
6a4cee5f
MM
17621 tree node;
17622
17623 if (GET_CODE (call_ref) != SYMBOL_REF)
17624 return call_ref;
17625
17626 /* System V adds '.' to the internal name, so skip them. */
17627 call_name = XSTR (call_ref, 0);
17628 if (*call_name == '.')
17629 {
17630 while (*call_name == '.')
17631 call_name++;
17632
17633 node = get_identifier (call_name);
39403d82 17634 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
17635 }
17636
17637 return force_reg (Pmode, call_ref);
17638}
7509c759 17639\f
77ccdfed
EC
17640#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
17641#define TARGET_USE_MS_BITFIELD_LAYOUT 0
17642#endif
17643
17644/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
17645 struct attribute_spec.handler. */
17646static tree
17647rs6000_handle_struct_attribute (tree *node, tree name,
17648 tree args ATTRIBUTE_UNUSED,
17649 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
17650{
17651 tree *type = NULL;
17652 if (DECL_P (*node))
17653 {
17654 if (TREE_CODE (*node) == TYPE_DECL)
17655 type = &TREE_TYPE (*node);
17656 }
17657 else
17658 type = node;
17659
17660 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
17661 || TREE_CODE (*type) == UNION_TYPE)))
17662 {
17663 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
17664 *no_add_attrs = true;
17665 }
17666
17667 else if ((is_attribute_p ("ms_struct", name)
17668 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
17669 || ((is_attribute_p ("gcc_struct", name)
17670 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
17671 {
17672 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
17673 IDENTIFIER_POINTER (name));
17674 *no_add_attrs = true;
17675 }
17676
17677 return NULL_TREE;
17678}
17679
17680static bool
17681rs6000_ms_bitfield_layout_p (tree record_type)
17682{
17683 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
17684 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
17685 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
17686}
17687\f
b64a1b53
RH
17688#ifdef USING_ELFOS_H
17689
d6b5193b 17690/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 17691
d6b5193b
RS
17692static void
17693rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
17694{
17695 if (DEFAULT_ABI == ABI_AIX
17696 && TARGET_MINIMAL_TOC
17697 && !TARGET_RELOCATABLE)
17698 {
17699 if (!toc_initialized)
17700 {
17701 toc_initialized = 1;
17702 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17703 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
17704 fprintf (asm_out_file, "\t.tc ");
17705 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
17706 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17707 fprintf (asm_out_file, "\n");
17708
17709 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17710 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17711 fprintf (asm_out_file, " = .+32768\n");
17712 }
17713 else
17714 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17715 }
17716 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
17717 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
17718 else
17719 {
17720 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17721 if (!toc_initialized)
17722 {
17723 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
17724 fprintf (asm_out_file, " = .+32768\n");
17725 toc_initialized = 1;
17726 }
17727 }
17728}
17729
17730/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 17731
b64a1b53 17732static void
d6b5193b
RS
17733rs6000_elf_asm_init_sections (void)
17734{
17735 toc_section
17736 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
17737
17738 sdata2_section
17739 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
17740 SDATA2_SECTION_ASM_OP);
17741}
17742
17743/* Implement TARGET_SELECT_RTX_SECTION. */
17744
17745static section *
f676971a 17746rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 17747 unsigned HOST_WIDE_INT align)
7509c759 17748{
a9098fd0 17749 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 17750 return toc_section;
7509c759 17751 else
d6b5193b 17752 return default_elf_select_rtx_section (mode, x, align);
7509c759
MM
17753}
17754
d6b5193b 17755/* Implement TARGET_ASM_SELECT_SECTION for ELF targets. */
7509c759 17756
d6b5193b 17757static section *
f676971a 17758rs6000_elf_select_section (tree decl, int reloc,
a2369ed3 17759 unsigned HOST_WIDE_INT align)
7509c759 17760{
f1384257
AM
17761 /* Pretend that we're always building for a shared library when
17762 ABI_AIX, because otherwise we end up with dynamic relocations
17763 in read-only sections. This happens for function pointers,
17764 references to vtables in typeinfo, and probably other cases. */
d6b5193b
RS
17765 return default_elf_select_section_1 (decl, reloc, align,
17766 flag_pic || DEFAULT_ABI == ABI_AIX);
63019373
GK
17767}
17768
17769/* A C statement to build up a unique section name, expressed as a
17770 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
17771 RELOC indicates whether the initial value of EXP requires
17772 link-time relocations. If you do not define this macro, GCC will use
17773 the symbol name prefixed by `.' as the section name. Note - this
f5143c46 17774 macro can now be called for uninitialized data items as well as
4912a07c 17775 initialized data and functions. */
63019373 17776
ae46c4e0 17777static void
a2369ed3 17778rs6000_elf_unique_section (tree decl, int reloc)
63019373 17779{
f1384257
AM
17780 /* As above, pretend that we're always building for a shared library
17781 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
0e5dbd9b
DE
17782 default_unique_section_1 (decl, reloc,
17783 flag_pic || DEFAULT_ABI == ABI_AIX);
7509c759 17784}
d9407988 17785\f
d1908feb
JJ
17786/* For a SYMBOL_REF, set generic flags and then perform some
17787 target-specific processing.
17788
d1908feb
JJ
17789 When the AIX ABI is requested on a non-AIX system, replace the
17790 function name with the real name (with a leading .) rather than the
17791 function descriptor name. This saves a lot of overriding code to
17792 read the prefixes. */
d9407988 17793
fb49053f 17794static void
a2369ed3 17795rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 17796{
d1908feb 17797 default_encode_section_info (decl, rtl, first);
b2003250 17798
d1908feb
JJ
17799 if (first
17800 && TREE_CODE (decl) == FUNCTION_DECL
17801 && !TARGET_AIX
17802 && DEFAULT_ABI == ABI_AIX)
d9407988 17803 {
c6a2438a 17804 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
17805 size_t len = strlen (XSTR (sym_ref, 0));
17806 char *str = alloca (len + 2);
17807 str[0] = '.';
17808 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17809 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 17810 }
d9407988
MM
17811}
17812
c1b7d95a 17813bool
a2369ed3 17814rs6000_elf_in_small_data_p (tree decl)
0e5dbd9b
DE
17815{
17816 if (rs6000_sdata == SDATA_NONE)
17817 return false;
17818
7482ad25
AF
17819 /* We want to merge strings, so we never consider them small data. */
17820 if (TREE_CODE (decl) == STRING_CST)
17821 return false;
17822
17823 /* Functions are never in the small data area. */
17824 if (TREE_CODE (decl) == FUNCTION_DECL)
17825 return false;
17826
0e5dbd9b
DE
17827 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17828 {
17829 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17830 if (strcmp (section, ".sdata") == 0
17831 || strcmp (section, ".sdata2") == 0
20bfcd69
GK
17832 || strcmp (section, ".sbss") == 0
17833 || strcmp (section, ".sbss2") == 0
17834 || strcmp (section, ".PPC.EMB.sdata0") == 0
17835 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
17836 return true;
17837 }
17838 else
17839 {
17840 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17841
17842 if (size > 0
307b599c 17843 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
17844 /* If it's not public, and we're not going to reference it there,
17845 there's no need to put it in the small data section. */
0e5dbd9b
DE
17846 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17847 return true;
17848 }
17849
17850 return false;
17851}
17852
b91da81f 17853#endif /* USING_ELFOS_H */
aacd3885
RS
17854\f
17855/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 17856
aacd3885
RS
17857static bool
17858rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
17859{
17860 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
17861}
a6c2a102 17862\f
000034eb 17863/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
17864 ADDR can be effectively incremented by incrementing REG.
17865
17866 r0 is special and we must not select it as an address
17867 register by this routine since our caller will try to
17868 increment the returned register via an "la" instruction. */
000034eb 17869
9390387d 17870rtx
a2369ed3 17871find_addr_reg (rtx addr)
000034eb
DE
17872{
17873 while (GET_CODE (addr) == PLUS)
17874 {
02441cd6
JL
17875 if (GET_CODE (XEXP (addr, 0)) == REG
17876 && REGNO (XEXP (addr, 0)) != 0)
000034eb 17877 addr = XEXP (addr, 0);
02441cd6
JL
17878 else if (GET_CODE (XEXP (addr, 1)) == REG
17879 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
17880 addr = XEXP (addr, 1);
17881 else if (CONSTANT_P (XEXP (addr, 0)))
17882 addr = XEXP (addr, 1);
17883 else if (CONSTANT_P (XEXP (addr, 1)))
17884 addr = XEXP (addr, 0);
17885 else
37409796 17886 gcc_unreachable ();
000034eb 17887 }
37409796
NS
17888 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17889 return addr;
000034eb
DE
17890}
17891
a6c2a102 17892void
a2369ed3 17893rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
17894{
17895 fatal_insn ("bad address", op);
17896}
c8023011 17897
ee890fe2
SS
17898#if TARGET_MACHO
17899
efdba735 17900static tree branch_island_list = 0;
ee890fe2 17901
efdba735
SH
17902/* Remember to generate a branch island for far calls to the given
17903 function. */
ee890fe2 17904
f676971a 17905static void
c4ad648e
AM
17906add_compiler_branch_island (tree label_name, tree function_name,
17907 int line_number)
ee890fe2 17908{
efdba735 17909 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 17910 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
17911 TREE_CHAIN (branch_island) = branch_island_list;
17912 branch_island_list = branch_island;
ee890fe2
SS
17913}
17914
efdba735
SH
17915#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17916#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17917#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17918 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 17919
efdba735
SH
17920/* Generate far-jump branch islands for everything on the
17921 branch_island_list. Invoked immediately after the last instruction
17922 of the epilogue has been emitted; the branch-islands must be
17923 appended to, and contiguous with, the function body. Mach-O stubs
17924 are generated in machopic_output_stub(). */
ee890fe2 17925
efdba735
SH
17926static void
17927macho_branch_islands (void)
17928{
17929 char tmp_buf[512];
17930 tree branch_island;
17931
17932 for (branch_island = branch_island_list;
17933 branch_island;
17934 branch_island = TREE_CHAIN (branch_island))
17935 {
17936 const char *label =
17937 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17938 const char *name =
11abc112 17939 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
17940 char name_buf[512];
17941 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17942 if (name[0] == '*' || name[0] == '&')
17943 strcpy (name_buf, name+1);
17944 else
17945 {
17946 name_buf[0] = '_';
17947 strcpy (name_buf+1, name);
17948 }
17949 strcpy (tmp_buf, "\n");
17950 strcat (tmp_buf, label);
ee890fe2 17951#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 17952 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 17953 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 17954#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
17955 if (flag_pic)
17956 {
17957 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
17958 strcat (tmp_buf, label);
17959 strcat (tmp_buf, "_pic\n");
17960 strcat (tmp_buf, label);
17961 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 17962
efdba735
SH
17963 strcat (tmp_buf, "\taddis r11,r11,ha16(");
17964 strcat (tmp_buf, name_buf);
17965 strcat (tmp_buf, " - ");
17966 strcat (tmp_buf, label);
17967 strcat (tmp_buf, "_pic)\n");
f676971a 17968
efdba735 17969 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 17970
efdba735
SH
17971 strcat (tmp_buf, "\taddi r12,r11,lo16(");
17972 strcat (tmp_buf, name_buf);
17973 strcat (tmp_buf, " - ");
17974 strcat (tmp_buf, label);
17975 strcat (tmp_buf, "_pic)\n");
f676971a 17976
efdba735
SH
17977 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
17978 }
17979 else
17980 {
17981 strcat (tmp_buf, ":\nlis r12,hi16(");
17982 strcat (tmp_buf, name_buf);
17983 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
17984 strcat (tmp_buf, name_buf);
17985 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
17986 }
17987 output_asm_insn (tmp_buf, 0);
ee890fe2 17988#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 17989 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 17990 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 17991#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 17992 }
ee890fe2 17993
efdba735 17994 branch_island_list = 0;
ee890fe2
SS
17995}
17996
17997/* NO_PREVIOUS_DEF checks in the link list whether the function name is
17998 already there or not. */
17999
efdba735 18000static int
a2369ed3 18001no_previous_def (tree function_name)
ee890fe2 18002{
efdba735
SH
18003 tree branch_island;
18004 for (branch_island = branch_island_list;
18005 branch_island;
18006 branch_island = TREE_CHAIN (branch_island))
18007 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
18008 return 0;
18009 return 1;
18010}
18011
18012/* GET_PREV_LABEL gets the label name from the previous definition of
18013 the function. */
18014
efdba735 18015static tree
a2369ed3 18016get_prev_label (tree function_name)
ee890fe2 18017{
efdba735
SH
18018 tree branch_island;
18019 for (branch_island = branch_island_list;
18020 branch_island;
18021 branch_island = TREE_CHAIN (branch_island))
18022 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
18023 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
18024 return 0;
18025}
18026
18027/* INSN is either a function call or a millicode call. It may have an
f676971a 18028 unconditional jump in its delay slot.
ee890fe2
SS
18029
18030 CALL_DEST is the routine we are calling. */
18031
18032char *
c4ad648e
AM
18033output_call (rtx insn, rtx *operands, int dest_operand_number,
18034 int cookie_operand_number)
ee890fe2
SS
18035{
18036 static char buf[256];
efdba735
SH
18037 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
18038 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
18039 {
18040 tree labelname;
efdba735 18041 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 18042
ee890fe2
SS
18043 if (no_previous_def (funname))
18044 {
308c142a 18045 int line_number = 0;
ee890fe2
SS
18046 rtx label_rtx = gen_label_rtx ();
18047 char *label_buf, temp_buf[256];
18048 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
18049 CODE_LABEL_NUMBER (label_rtx));
18050 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
18051 labelname = get_identifier (label_buf);
18052 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
18053 if (insn)
18054 line_number = NOTE_LINE_NUMBER (insn);
efdba735 18055 add_compiler_branch_island (labelname, funname, line_number);
ee890fe2
SS
18056 }
18057 else
18058 labelname = get_prev_label (funname);
18059
efdba735
SH
18060 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
18061 instruction will reach 'foo', otherwise link as 'bl L42'".
18062 "L42" should be a 'branch island', that will do a far jump to
18063 'foo'. Branch islands are generated in
18064 macho_branch_islands(). */
ee890fe2 18065 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 18066 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
18067 }
18068 else
efdba735
SH
18069 sprintf (buf, "bl %%z%d", dest_operand_number);
18070 return buf;
ee890fe2
SS
18071}
18072
ee890fe2
SS
18073/* Generate PIC and indirect symbol stubs. */
18074
18075void
a2369ed3 18076machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
18077{
18078 unsigned int length;
a4f6c312
SS
18079 char *symbol_name, *lazy_ptr_name;
18080 char *local_label_0;
ee890fe2
SS
18081 static int label = 0;
18082
df56a27f 18083 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 18084 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 18085
ee890fe2 18086
ee890fe2
SS
18087 length = strlen (symb);
18088 symbol_name = alloca (length + 32);
18089 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
18090
18091 lazy_ptr_name = alloca (length + 32);
18092 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
18093
ee890fe2 18094 if (flag_pic == 2)
56c779bc 18095 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 18096 else
56c779bc 18097 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
18098
18099 if (flag_pic == 2)
18100 {
d974312d
DJ
18101 fprintf (file, "\t.align 5\n");
18102
18103 fprintf (file, "%s:\n", stub);
18104 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18105
876455fa 18106 label++;
89da1f32 18107 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 18108 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 18109
ee890fe2
SS
18110 fprintf (file, "\tmflr r0\n");
18111 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
18112 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
18113 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
18114 lazy_ptr_name, local_label_0);
18115 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
18116 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
18117 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
18118 lazy_ptr_name, local_label_0);
18119 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
18120 fprintf (file, "\tbctr\n");
18121 }
18122 else
d974312d
DJ
18123 {
18124 fprintf (file, "\t.align 4\n");
18125
18126 fprintf (file, "%s:\n", stub);
18127 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
18128
18129 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
18130 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
18131 (TARGET_64BIT ? "ldu" : "lwzu"),
18132 lazy_ptr_name);
d974312d
DJ
18133 fprintf (file, "\tmtctr r12\n");
18134 fprintf (file, "\tbctr\n");
18135 }
f676971a 18136
56c779bc 18137 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
18138 fprintf (file, "%s:\n", lazy_ptr_name);
18139 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
18140 fprintf (file, "%sdyld_stub_binding_helper\n",
18141 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
18142}
18143
18144/* Legitimize PIC addresses. If the address is already
18145 position-independent, we return ORIG. Newly generated
18146 position-independent addresses go into a reg. This is REG if non
18147 zero, otherwise we allocate register(s) as necessary. */
18148
4fbbe694 18149#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
18150
18151rtx
f676971a 18152rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 18153 rtx reg)
ee890fe2
SS
18154{
18155 rtx base, offset;
18156
18157 if (reg == NULL && ! reload_in_progress && ! reload_completed)
18158 reg = gen_reg_rtx (Pmode);
18159
18160 if (GET_CODE (orig) == CONST)
18161 {
37409796
NS
18162 rtx reg_temp;
18163
ee890fe2
SS
18164 if (GET_CODE (XEXP (orig, 0)) == PLUS
18165 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
18166 return orig;
18167
37409796 18168 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 18169
37409796
NS
18170 /* Use a different reg for the intermediate value, as
18171 it will be marked UNCHANGING. */
18172 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
18173 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
18174 Pmode, reg_temp);
18175 offset =
18176 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
18177 Pmode, reg);
bb8df8a6 18178
ee890fe2
SS
18179 if (GET_CODE (offset) == CONST_INT)
18180 {
18181 if (SMALL_INT (offset))
ed8908e7 18182 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
18183 else if (! reload_in_progress && ! reload_completed)
18184 offset = force_reg (Pmode, offset);
18185 else
c859cda6
DJ
18186 {
18187 rtx mem = force_const_mem (Pmode, orig);
18188 return machopic_legitimize_pic_address (mem, Pmode, reg);
18189 }
ee890fe2 18190 }
f1c25d3b 18191 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
18192 }
18193
18194 /* Fall back on generic machopic code. */
18195 return machopic_legitimize_pic_address (orig, mode, reg);
18196}
18197
c4e18b1c
GK
18198/* Output a .machine directive for the Darwin assembler, and call
18199 the generic start_file routine. */
18200
18201static void
18202rs6000_darwin_file_start (void)
18203{
94ff898d 18204 static const struct
c4e18b1c
GK
18205 {
18206 const char *arg;
18207 const char *name;
18208 int if_set;
18209 } mapping[] = {
55dbfb48 18210 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
18211 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
18212 { "power4", "ppc970", 0 },
18213 { "G5", "ppc970", 0 },
18214 { "7450", "ppc7450", 0 },
18215 { "7400", "ppc7400", MASK_ALTIVEC },
18216 { "G4", "ppc7400", 0 },
18217 { "750", "ppc750", 0 },
18218 { "740", "ppc750", 0 },
18219 { "G3", "ppc750", 0 },
18220 { "604e", "ppc604e", 0 },
18221 { "604", "ppc604", 0 },
18222 { "603e", "ppc603", 0 },
18223 { "603", "ppc603", 0 },
18224 { "601", "ppc601", 0 },
18225 { NULL, "ppc", 0 } };
18226 const char *cpu_id = "";
18227 size_t i;
94ff898d 18228
9390387d 18229 rs6000_file_start ();
192d0f89 18230 darwin_file_start ();
c4e18b1c
GK
18231
18232 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
18233 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
18234 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
18235 && rs6000_select[i].string[0] != '\0')
18236 cpu_id = rs6000_select[i].string;
18237
18238 /* Look through the mapping array. Pick the first name that either
18239 matches the argument, has a bit set in IF_SET that is also set
18240 in the target flags, or has a NULL name. */
18241
18242 i = 0;
18243 while (mapping[i].arg != NULL
18244 && strcmp (mapping[i].arg, cpu_id) != 0
18245 && (mapping[i].if_set & target_flags) == 0)
18246 i++;
18247
18248 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
18249}
18250
ee890fe2 18251#endif /* TARGET_MACHO */
7c262518
RH
18252
18253#if TARGET_ELF
18254static unsigned int
a2369ed3 18255rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
7c262518 18256{
1ff8f81a
AM
18257 return default_section_type_flags_1 (decl, name, reloc,
18258 flag_pic || DEFAULT_ABI == ABI_AIX);
7c262518 18259}
d9f6800d
RH
18260
18261/* Record an element in the table of global constructors. SYMBOL is
18262 a SYMBOL_REF of the function to be called; PRIORITY is a number
18263 between 0 and MAX_INIT_PRIORITY.
18264
18265 This differs from default_named_section_asm_out_constructor in
18266 that we have special handling for -mrelocatable. */
18267
18268static void
a2369ed3 18269rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
18270{
18271 const char *section = ".ctors";
18272 char buf[16];
18273
18274 if (priority != DEFAULT_INIT_PRIORITY)
18275 {
18276 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
18277 /* Invert the numbering so the linker puts us in the proper
18278 order; constructors are run from right to left, and the
18279 linker sorts in increasing order. */
18280 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
18281 section = buf;
18282 }
18283
d6b5193b 18284 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 18285 assemble_align (POINTER_SIZE);
d9f6800d
RH
18286
18287 if (TARGET_RELOCATABLE)
18288 {
18289 fputs ("\t.long (", asm_out_file);
18290 output_addr_const (asm_out_file, symbol);
18291 fputs (")@fixup\n", asm_out_file);
18292 }
18293 else
c8af3574 18294 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
18295}
18296
18297static void
a2369ed3 18298rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
18299{
18300 const char *section = ".dtors";
18301 char buf[16];
18302
18303 if (priority != DEFAULT_INIT_PRIORITY)
18304 {
18305 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
18306 /* Invert the numbering so the linker puts us in the proper
18307 order; constructors are run from right to left, and the
18308 linker sorts in increasing order. */
18309 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
18310 section = buf;
18311 }
18312
d6b5193b 18313 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 18314 assemble_align (POINTER_SIZE);
d9f6800d
RH
18315
18316 if (TARGET_RELOCATABLE)
18317 {
18318 fputs ("\t.long (", asm_out_file);
18319 output_addr_const (asm_out_file, symbol);
18320 fputs (")@fixup\n", asm_out_file);
18321 }
18322 else
c8af3574 18323 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 18324}
9739c90c
JJ
18325
18326void
a2369ed3 18327rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
18328{
18329 if (TARGET_64BIT)
18330 {
18331 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
18332 ASM_OUTPUT_LABEL (file, name);
18333 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
18334 rs6000_output_function_entry (file, name);
18335 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
18336 if (DOT_SYMBOLS)
9739c90c 18337 {
85b776df 18338 fputs ("\t.size\t", file);
9739c90c 18339 assemble_name (file, name);
85b776df
AM
18340 fputs (",24\n\t.type\t.", file);
18341 assemble_name (file, name);
18342 fputs (",@function\n", file);
18343 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
18344 {
18345 fputs ("\t.globl\t.", file);
18346 assemble_name (file, name);
18347 putc ('\n', file);
18348 }
9739c90c 18349 }
85b776df
AM
18350 else
18351 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 18352 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
18353 rs6000_output_function_entry (file, name);
18354 fputs (":\n", file);
9739c90c
JJ
18355 return;
18356 }
18357
18358 if (TARGET_RELOCATABLE
7f970b70 18359 && !TARGET_SECURE_PLT
9739c90c 18360 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 18361 && uses_TOC ())
9739c90c
JJ
18362 {
18363 char buf[256];
18364
18365 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
18366
18367 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
18368 fprintf (file, "\t.long ");
18369 assemble_name (file, buf);
18370 putc ('-', file);
18371 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
18372 assemble_name (file, buf);
18373 putc ('\n', file);
18374 }
18375
18376 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18377 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18378
18379 if (DEFAULT_ABI == ABI_AIX)
18380 {
18381 const char *desc_name, *orig_name;
18382
18383 orig_name = (*targetm.strip_name_encoding) (name);
18384 desc_name = orig_name;
18385 while (*desc_name == '.')
18386 desc_name++;
18387
18388 if (TREE_PUBLIC (decl))
18389 fprintf (file, "\t.globl %s\n", desc_name);
18390
18391 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18392 fprintf (file, "%s:\n", desc_name);
18393 fprintf (file, "\t.long %s\n", orig_name);
18394 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18395 if (DEFAULT_ABI == ABI_AIX)
18396 fputs ("\t.long 0\n", file);
18397 fprintf (file, "\t.previous\n");
18398 }
18399 ASM_OUTPUT_LABEL (file, name);
18400}
1334b570
AM
18401
18402static void
18403rs6000_elf_end_indicate_exec_stack (void)
18404{
18405 if (TARGET_32BIT)
18406 file_end_indicate_exec_stack ();
18407}
7c262518
RH
18408#endif
18409
cbaaba19 18410#if TARGET_XCOFF
0d5817b2
DE
18411static void
18412rs6000_xcoff_asm_output_anchor (rtx symbol)
18413{
18414 char buffer[100];
18415
18416 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
18417 SYMBOL_REF_BLOCK_OFFSET (symbol));
18418 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
18419}
18420
7c262518 18421static void
a2369ed3 18422rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
18423{
18424 fputs (GLOBAL_ASM_OP, stream);
18425 RS6000_OUTPUT_BASENAME (stream, name);
18426 putc ('\n', stream);
18427}
18428
d6b5193b
RS
18429/* A get_unnamed_decl callback, used for read-only sections. PTR
18430 points to the section string variable. */
18431
18432static void
18433rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
18434{
18435 fprintf (asm_out_file, "\t.csect %s[RO],3\n",
18436 *(const char *const *) directive);
18437}
18438
18439/* Likewise for read-write sections. */
18440
18441static void
18442rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
18443{
18444 fprintf (asm_out_file, "\t.csect %s[RW],3\n",
18445 *(const char *const *) directive);
18446}
18447
18448/* A get_unnamed_section callback, used for switching to toc_section. */
18449
18450static void
18451rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
18452{
18453 if (TARGET_MINIMAL_TOC)
18454 {
18455 /* toc_section is always selected at least once from
18456 rs6000_xcoff_file_start, so this is guaranteed to
18457 always be defined once and only once in each file. */
18458 if (!toc_initialized)
18459 {
18460 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
18461 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
18462 toc_initialized = 1;
18463 }
18464 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
18465 (TARGET_32BIT ? "" : ",3"));
18466 }
18467 else
18468 fputs ("\t.toc\n", asm_out_file);
18469}
18470
18471/* Implement TARGET_ASM_INIT_SECTIONS. */
18472
18473static void
18474rs6000_xcoff_asm_init_sections (void)
18475{
18476 read_only_data_section
18477 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18478 &xcoff_read_only_section_name);
18479
18480 private_data_section
18481 = get_unnamed_section (SECTION_WRITE,
18482 rs6000_xcoff_output_readwrite_section_asm_op,
18483 &xcoff_private_data_section_name);
18484
18485 read_only_private_data_section
18486 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
18487 &xcoff_private_data_section_name);
18488
18489 toc_section
18490 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
18491
18492 readonly_data_section = read_only_data_section;
18493 exception_section = data_section;
18494}
18495
b275d088 18496static void
c18a5b6c
MM
18497rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18498 tree decl ATTRIBUTE_UNUSED)
7c262518 18499{
0e5dbd9b
DE
18500 int smclass;
18501 static const char * const suffix[3] = { "PR", "RO", "RW" };
18502
18503 if (flags & SECTION_CODE)
18504 smclass = 0;
18505 else if (flags & SECTION_WRITE)
18506 smclass = 2;
18507 else
18508 smclass = 1;
18509
5b5198f7 18510 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 18511 (flags & SECTION_CODE) ? "." : "",
5b5198f7 18512 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 18513}
ae46c4e0 18514
d6b5193b 18515static section *
f676971a 18516rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 18517 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 18518{
5add3202 18519 if (decl_readonly_section_1 (decl, reloc, 1))
ae46c4e0 18520 {
0e5dbd9b 18521 if (TREE_PUBLIC (decl))
d6b5193b 18522 return read_only_data_section;
ae46c4e0 18523 else
d6b5193b 18524 return read_only_private_data_section;
ae46c4e0
RH
18525 }
18526 else
18527 {
0e5dbd9b 18528 if (TREE_PUBLIC (decl))
d6b5193b 18529 return data_section;
ae46c4e0 18530 else
d6b5193b 18531 return private_data_section;
ae46c4e0
RH
18532 }
18533}
18534
18535static void
a2369ed3 18536rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
18537{
18538 const char *name;
ae46c4e0 18539
5b5198f7
DE
18540 /* Use select_section for private and uninitialized data. */
18541 if (!TREE_PUBLIC (decl)
18542 || DECL_COMMON (decl)
0e5dbd9b
DE
18543 || DECL_INITIAL (decl) == NULL_TREE
18544 || DECL_INITIAL (decl) == error_mark_node
18545 || (flag_zero_initialized_in_bss
18546 && initializer_zerop (DECL_INITIAL (decl))))
18547 return;
18548
18549 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18550 name = (*targetm.strip_name_encoding) (name);
18551 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 18552}
b64a1b53 18553
fb49053f
RH
18554/* Select section for constant in constant pool.
18555
18556 On RS/6000, all constants are in the private read-only data area.
18557 However, if this is being placed in the TOC it must be output as a
18558 toc entry. */
18559
d6b5193b 18560static section *
f676971a 18561rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 18562 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
18563{
18564 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 18565 return toc_section;
b64a1b53 18566 else
d6b5193b 18567 return read_only_private_data_section;
b64a1b53 18568}
772c5265
RH
18569
18570/* Remove any trailing [DS] or the like from the symbol name. */
18571
18572static const char *
a2369ed3 18573rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
18574{
18575 size_t len;
18576 if (*name == '*')
18577 name++;
18578 len = strlen (name);
18579 if (name[len - 1] == ']')
18580 return ggc_alloc_string (name, len - 4);
18581 else
18582 return name;
18583}
18584
5add3202
DE
18585/* Section attributes. AIX is always PIC. */
18586
18587static unsigned int
a2369ed3 18588rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 18589{
5b5198f7
DE
18590 unsigned int align;
18591 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
18592
18593 /* Align to at least UNIT size. */
18594 if (flags & SECTION_CODE)
18595 align = MIN_UNITS_PER_WORD;
18596 else
18597 /* Increase alignment of large objects if not already stricter. */
18598 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18599 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18600 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18601
18602 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 18603}
a5fe455b 18604
1bc7c5b6
ZW
18605/* Output at beginning of assembler file.
18606
18607 Initialize the section names for the RS/6000 at this point.
18608
18609 Specify filename, including full path, to assembler.
18610
18611 We want to go into the TOC section so at least one .toc will be emitted.
18612 Also, in order to output proper .bs/.es pairs, we need at least one static
18613 [RW] section emitted.
18614
18615 Finally, declare mcount when profiling to make the assembler happy. */
18616
18617static void
863d938c 18618rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
18619{
18620 rs6000_gen_section_name (&xcoff_bss_section_name,
18621 main_input_filename, ".bss_");
18622 rs6000_gen_section_name (&xcoff_private_data_section_name,
18623 main_input_filename, ".rw_");
18624 rs6000_gen_section_name (&xcoff_read_only_section_name,
18625 main_input_filename, ".ro_");
18626
18627 fputs ("\t.file\t", asm_out_file);
18628 output_quoted_string (asm_out_file, main_input_filename);
18629 fputc ('\n', asm_out_file);
1bc7c5b6 18630 if (write_symbols != NO_DEBUG)
d6b5193b
RS
18631 switch_to_section (private_data_section);
18632 switch_to_section (text_section);
1bc7c5b6
ZW
18633 if (profile_flag)
18634 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18635 rs6000_file_start ();
18636}
18637
a5fe455b
ZW
18638/* Output at end of assembler file.
18639 On the RS/6000, referencing data should automatically pull in text. */
18640
18641static void
863d938c 18642rs6000_xcoff_file_end (void)
a5fe455b 18643{
d6b5193b 18644 switch_to_section (text_section);
a5fe455b 18645 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 18646 switch_to_section (data_section);
a5fe455b
ZW
18647 fputs (TARGET_32BIT
18648 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18649 asm_out_file);
18650}
f1384257 18651#endif /* TARGET_XCOFF */
0e5dbd9b 18652
3c50106f
RH
18653/* Compute a (partial) cost for rtx X. Return true if the complete
18654 cost has been computed, and false if subexpressions should be
18655 scanned. In either case, *TOTAL contains the cost result. */
18656
18657static bool
1494c534 18658rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 18659{
f0517163
RS
18660 enum machine_mode mode = GET_MODE (x);
18661
3c50106f
RH
18662 switch (code)
18663 {
30a555d9 18664 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 18665 case CONST_INT:
066cd967
DE
18666 if (((outer_code == SET
18667 || outer_code == PLUS
18668 || outer_code == MINUS)
279bb624
DE
18669 && (satisfies_constraint_I (x)
18670 || satisfies_constraint_L (x)))
066cd967 18671 || (outer_code == AND
279bb624
DE
18672 && (satisfies_constraint_K (x)
18673 || (mode == SImode
18674 ? satisfies_constraint_L (x)
18675 : satisfies_constraint_J (x))
1990cd79
AM
18676 || mask_operand (x, mode)
18677 || (mode == DImode
18678 && mask64_operand (x, DImode))))
22e54023 18679 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
18680 && (satisfies_constraint_K (x)
18681 || (mode == SImode
18682 ? satisfies_constraint_L (x)
18683 : satisfies_constraint_J (x))))
066cd967
DE
18684 || outer_code == ASHIFT
18685 || outer_code == ASHIFTRT
18686 || outer_code == LSHIFTRT
18687 || outer_code == ROTATE
18688 || outer_code == ROTATERT
d5861a7a 18689 || outer_code == ZERO_EXTRACT
066cd967 18690 || (outer_code == MULT
279bb624 18691 && satisfies_constraint_I (x))
22e54023
DE
18692 || ((outer_code == DIV || outer_code == UDIV
18693 || outer_code == MOD || outer_code == UMOD)
18694 && exact_log2 (INTVAL (x)) >= 0)
066cd967 18695 || (outer_code == COMPARE
279bb624
DE
18696 && (satisfies_constraint_I (x)
18697 || satisfies_constraint_K (x)))
22e54023 18698 || (outer_code == EQ
279bb624
DE
18699 && (satisfies_constraint_I (x)
18700 || satisfies_constraint_K (x)
18701 || (mode == SImode
18702 ? satisfies_constraint_L (x)
18703 : satisfies_constraint_J (x))))
22e54023 18704 || (outer_code == GTU
279bb624 18705 && satisfies_constraint_I (x))
22e54023 18706 || (outer_code == LTU
279bb624 18707 && satisfies_constraint_P (x)))
066cd967
DE
18708 {
18709 *total = 0;
18710 return true;
18711 }
18712 else if ((outer_code == PLUS
4ae234b0 18713 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 18714 || (outer_code == MINUS
4ae234b0 18715 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
18716 || ((outer_code == SET
18717 || outer_code == IOR
18718 || outer_code == XOR)
18719 && (INTVAL (x)
18720 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18721 {
18722 *total = COSTS_N_INSNS (1);
18723 return true;
18724 }
18725 /* FALLTHRU */
18726
18727 case CONST_DOUBLE:
f6fe3a22 18728 if (mode == DImode && code == CONST_DOUBLE)
066cd967 18729 {
f6fe3a22
DE
18730 if ((outer_code == IOR || outer_code == XOR)
18731 && CONST_DOUBLE_HIGH (x) == 0
18732 && (CONST_DOUBLE_LOW (x)
18733 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
18734 {
18735 *total = 0;
18736 return true;
18737 }
18738 else if ((outer_code == AND && and64_2_operand (x, DImode))
18739 || ((outer_code == SET
18740 || outer_code == IOR
18741 || outer_code == XOR)
18742 && CONST_DOUBLE_HIGH (x) == 0))
18743 {
18744 *total = COSTS_N_INSNS (1);
18745 return true;
18746 }
066cd967
DE
18747 }
18748 /* FALLTHRU */
18749
3c50106f 18750 case CONST:
066cd967 18751 case HIGH:
3c50106f 18752 case SYMBOL_REF:
066cd967
DE
18753 case MEM:
18754 /* When optimizing for size, MEM should be slightly more expensive
18755 than generating address, e.g., (plus (reg) (const)).
c112cf2b 18756 L1 cache latency is about two instructions. */
066cd967 18757 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
18758 return true;
18759
30a555d9
DE
18760 case LABEL_REF:
18761 *total = 0;
18762 return true;
18763
3c50106f 18764 case PLUS:
f0517163 18765 if (mode == DFmode)
066cd967
DE
18766 {
18767 if (GET_CODE (XEXP (x, 0)) == MULT)
18768 {
18769 /* FNMA accounted in outer NEG. */
18770 if (outer_code == NEG)
18771 *total = rs6000_cost->dmul - rs6000_cost->fp;
18772 else
18773 *total = rs6000_cost->dmul;
18774 }
18775 else
18776 *total = rs6000_cost->fp;
18777 }
f0517163 18778 else if (mode == SFmode)
066cd967
DE
18779 {
18780 /* FNMA accounted in outer NEG. */
18781 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18782 *total = 0;
18783 else
18784 *total = rs6000_cost->fp;
18785 }
f0517163 18786 else
066cd967
DE
18787 *total = COSTS_N_INSNS (1);
18788 return false;
3c50106f 18789
52190329 18790 case MINUS:
f0517163 18791 if (mode == DFmode)
066cd967
DE
18792 {
18793 if (GET_CODE (XEXP (x, 0)) == MULT)
18794 {
18795 /* FNMA accounted in outer NEG. */
18796 if (outer_code == NEG)
18797 *total = 0;
18798 else
18799 *total = rs6000_cost->dmul;
18800 }
18801 else
18802 *total = rs6000_cost->fp;
18803 }
f0517163 18804 else if (mode == SFmode)
066cd967
DE
18805 {
18806 /* FNMA accounted in outer NEG. */
18807 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18808 *total = 0;
18809 else
18810 *total = rs6000_cost->fp;
18811 }
f0517163 18812 else
c4ad648e 18813 *total = COSTS_N_INSNS (1);
066cd967 18814 return false;
3c50106f
RH
18815
18816 case MULT:
c9dbf840 18817 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 18818 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 18819 {
8b897cfa
RS
18820 if (INTVAL (XEXP (x, 1)) >= -256
18821 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 18822 *total = rs6000_cost->mulsi_const9;
8b897cfa 18823 else
06a67bdd 18824 *total = rs6000_cost->mulsi_const;
3c50106f 18825 }
066cd967
DE
18826 /* FMA accounted in outer PLUS/MINUS. */
18827 else if ((mode == DFmode || mode == SFmode)
18828 && (outer_code == PLUS || outer_code == MINUS))
18829 *total = 0;
f0517163 18830 else if (mode == DFmode)
06a67bdd 18831 *total = rs6000_cost->dmul;
f0517163 18832 else if (mode == SFmode)
06a67bdd 18833 *total = rs6000_cost->fp;
f0517163 18834 else if (mode == DImode)
06a67bdd 18835 *total = rs6000_cost->muldi;
8b897cfa 18836 else
06a67bdd 18837 *total = rs6000_cost->mulsi;
066cd967 18838 return false;
3c50106f
RH
18839
18840 case DIV:
18841 case MOD:
f0517163
RS
18842 if (FLOAT_MODE_P (mode))
18843 {
06a67bdd
RS
18844 *total = mode == DFmode ? rs6000_cost->ddiv
18845 : rs6000_cost->sdiv;
066cd967 18846 return false;
f0517163 18847 }
5efb1046 18848 /* FALLTHRU */
3c50106f
RH
18849
18850 case UDIV:
18851 case UMOD:
627b6fe2
DJ
18852 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18853 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18854 {
18855 if (code == DIV || code == MOD)
18856 /* Shift, addze */
18857 *total = COSTS_N_INSNS (2);
18858 else
18859 /* Shift */
18860 *total = COSTS_N_INSNS (1);
18861 }
c4ad648e 18862 else
627b6fe2
DJ
18863 {
18864 if (GET_MODE (XEXP (x, 1)) == DImode)
18865 *total = rs6000_cost->divdi;
18866 else
18867 *total = rs6000_cost->divsi;
18868 }
18869 /* Add in shift and subtract for MOD. */
18870 if (code == MOD || code == UMOD)
18871 *total += COSTS_N_INSNS (2);
066cd967 18872 return false;
3c50106f
RH
18873
18874 case FFS:
18875 *total = COSTS_N_INSNS (4);
066cd967 18876 return false;
3c50106f 18877
06a67bdd 18878 case NOT:
066cd967
DE
18879 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18880 {
18881 *total = 0;
18882 return false;
18883 }
18884 /* FALLTHRU */
18885
18886 case AND:
18887 case IOR:
18888 case XOR:
d5861a7a
DE
18889 case ZERO_EXTRACT:
18890 *total = COSTS_N_INSNS (1);
18891 return false;
18892
066cd967
DE
18893 case ASHIFT:
18894 case ASHIFTRT:
18895 case LSHIFTRT:
18896 case ROTATE:
18897 case ROTATERT:
d5861a7a 18898 /* Handle mul_highpart. */
066cd967
DE
18899 if (outer_code == TRUNCATE
18900 && GET_CODE (XEXP (x, 0)) == MULT)
18901 {
18902 if (mode == DImode)
18903 *total = rs6000_cost->muldi;
18904 else
18905 *total = rs6000_cost->mulsi;
18906 return true;
18907 }
d5861a7a
DE
18908 else if (outer_code == AND)
18909 *total = 0;
18910 else
18911 *total = COSTS_N_INSNS (1);
18912 return false;
18913
18914 case SIGN_EXTEND:
18915 case ZERO_EXTEND:
18916 if (GET_CODE (XEXP (x, 0)) == MEM)
18917 *total = 0;
18918 else
18919 *total = COSTS_N_INSNS (1);
066cd967 18920 return false;
06a67bdd 18921
066cd967
DE
18922 case COMPARE:
18923 case NEG:
18924 case ABS:
18925 if (!FLOAT_MODE_P (mode))
18926 {
18927 *total = COSTS_N_INSNS (1);
18928 return false;
18929 }
18930 /* FALLTHRU */
18931
18932 case FLOAT:
18933 case UNSIGNED_FLOAT:
18934 case FIX:
18935 case UNSIGNED_FIX:
06a67bdd
RS
18936 case FLOAT_TRUNCATE:
18937 *total = rs6000_cost->fp;
066cd967 18938 return false;
06a67bdd 18939
a2af5043
DJ
18940 case FLOAT_EXTEND:
18941 if (mode == DFmode)
18942 *total = 0;
18943 else
18944 *total = rs6000_cost->fp;
18945 return false;
18946
06a67bdd
RS
18947 case UNSPEC:
18948 switch (XINT (x, 1))
18949 {
18950 case UNSPEC_FRSP:
18951 *total = rs6000_cost->fp;
18952 return true;
18953
18954 default:
18955 break;
18956 }
18957 break;
18958
18959 case CALL:
18960 case IF_THEN_ELSE:
18961 if (optimize_size)
18962 {
18963 *total = COSTS_N_INSNS (1);
18964 return true;
18965 }
066cd967
DE
18966 else if (FLOAT_MODE_P (mode)
18967 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
18968 {
18969 *total = rs6000_cost->fp;
18970 return false;
18971 }
06a67bdd
RS
18972 break;
18973
c0600ecd
DE
18974 case EQ:
18975 case GTU:
18976 case LTU:
22e54023
DE
18977 /* Carry bit requires mode == Pmode.
18978 NEG or PLUS already counted so only add one. */
18979 if (mode == Pmode
18980 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 18981 {
22e54023
DE
18982 *total = COSTS_N_INSNS (1);
18983 return true;
18984 }
18985 if (outer_code == SET)
18986 {
18987 if (XEXP (x, 1) == const0_rtx)
c0600ecd 18988 {
22e54023 18989 *total = COSTS_N_INSNS (2);
c0600ecd 18990 return true;
c0600ecd 18991 }
22e54023
DE
18992 else if (mode == Pmode)
18993 {
18994 *total = COSTS_N_INSNS (3);
18995 return false;
18996 }
18997 }
18998 /* FALLTHRU */
18999
19000 case GT:
19001 case LT:
19002 case UNORDERED:
19003 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
19004 {
19005 *total = COSTS_N_INSNS (2);
19006 return true;
c0600ecd 19007 }
22e54023
DE
19008 /* CC COMPARE. */
19009 if (outer_code == COMPARE)
19010 {
19011 *total = 0;
19012 return true;
19013 }
19014 break;
c0600ecd 19015
3c50106f 19016 default:
06a67bdd 19017 break;
3c50106f 19018 }
06a67bdd
RS
19019
19020 return false;
3c50106f
RH
19021}
19022
34bb030a
DE
19023/* A C expression returning the cost of moving data from a register of class
19024 CLASS1 to one of CLASS2. */
19025
19026int
f676971a 19027rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 19028 enum reg_class from, enum reg_class to)
34bb030a
DE
19029{
19030 /* Moves from/to GENERAL_REGS. */
19031 if (reg_classes_intersect_p (to, GENERAL_REGS)
19032 || reg_classes_intersect_p (from, GENERAL_REGS))
19033 {
19034 if (! reg_classes_intersect_p (to, GENERAL_REGS))
19035 from = to;
19036
19037 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
19038 return (rs6000_memory_move_cost (mode, from, 0)
19039 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
19040
c4ad648e
AM
19041 /* It's more expensive to move CR_REGS than CR0_REGS because of the
19042 shift. */
34bb030a
DE
19043 else if (from == CR_REGS)
19044 return 4;
19045
19046 else
c4ad648e 19047 /* A move will cost one instruction per GPR moved. */
c8b622ff 19048 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
19049 }
19050
c4ad648e 19051 /* Moving between two similar registers is just one instruction. */
34bb030a
DE
19052 else if (reg_classes_intersect_p (to, from))
19053 return mode == TFmode ? 4 : 2;
19054
c4ad648e 19055 /* Everything else has to go through GENERAL_REGS. */
34bb030a 19056 else
f676971a 19057 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
19058 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
19059}
19060
19061/* A C expressions returning the cost of moving data of MODE from a register to
19062 or from memory. */
19063
19064int
f676971a 19065rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 19066 int in ATTRIBUTE_UNUSED)
34bb030a
DE
19067{
19068 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 19069 return 4 * hard_regno_nregs[0][mode];
34bb030a 19070 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 19071 return 4 * hard_regno_nregs[32][mode];
34bb030a 19072 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 19073 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
19074 else
19075 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
19076}
19077
ef765ea9
DE
19078/* Newton-Raphson approximation of single-precision floating point divide n/d.
19079 Assumes no trapping math and finite arguments. */
19080
19081void
19082rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
19083{
19084 rtx x0, e0, e1, y1, u0, v0, one;
19085
19086 x0 = gen_reg_rtx (SFmode);
19087 e0 = gen_reg_rtx (SFmode);
19088 e1 = gen_reg_rtx (SFmode);
19089 y1 = gen_reg_rtx (SFmode);
19090 u0 = gen_reg_rtx (SFmode);
19091 v0 = gen_reg_rtx (SFmode);
19092 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
19093
19094 /* x0 = 1./d estimate */
19095 emit_insn (gen_rtx_SET (VOIDmode, x0,
19096 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
19097 UNSPEC_FRES)));
19098 /* e0 = 1. - d * x0 */
19099 emit_insn (gen_rtx_SET (VOIDmode, e0,
19100 gen_rtx_MINUS (SFmode, one,
19101 gen_rtx_MULT (SFmode, d, x0))));
19102 /* e1 = e0 + e0 * e0 */
19103 emit_insn (gen_rtx_SET (VOIDmode, e1,
19104 gen_rtx_PLUS (SFmode,
19105 gen_rtx_MULT (SFmode, e0, e0), e0)));
19106 /* y1 = x0 + e1 * x0 */
19107 emit_insn (gen_rtx_SET (VOIDmode, y1,
19108 gen_rtx_PLUS (SFmode,
19109 gen_rtx_MULT (SFmode, e1, x0), x0)));
19110 /* u0 = n * y1 */
19111 emit_insn (gen_rtx_SET (VOIDmode, u0,
19112 gen_rtx_MULT (SFmode, n, y1)));
19113 /* v0 = n - d * u0 */
19114 emit_insn (gen_rtx_SET (VOIDmode, v0,
19115 gen_rtx_MINUS (SFmode, n,
19116 gen_rtx_MULT (SFmode, d, u0))));
19117 /* res = u0 + v0 * y1 */
19118 emit_insn (gen_rtx_SET (VOIDmode, res,
19119 gen_rtx_PLUS (SFmode,
19120 gen_rtx_MULT (SFmode, v0, y1), u0)));
19121}
19122
19123/* Newton-Raphson approximation of double-precision floating point divide n/d.
19124 Assumes no trapping math and finite arguments. */
19125
19126void
19127rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
19128{
19129 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
19130
19131 x0 = gen_reg_rtx (DFmode);
19132 e0 = gen_reg_rtx (DFmode);
19133 e1 = gen_reg_rtx (DFmode);
19134 e2 = gen_reg_rtx (DFmode);
19135 y1 = gen_reg_rtx (DFmode);
19136 y2 = gen_reg_rtx (DFmode);
19137 y3 = gen_reg_rtx (DFmode);
19138 u0 = gen_reg_rtx (DFmode);
19139 v0 = gen_reg_rtx (DFmode);
19140 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
19141
19142 /* x0 = 1./d estimate */
19143 emit_insn (gen_rtx_SET (VOIDmode, x0,
19144 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
19145 UNSPEC_FRES)));
19146 /* e0 = 1. - d * x0 */
19147 emit_insn (gen_rtx_SET (VOIDmode, e0,
19148 gen_rtx_MINUS (DFmode, one,
19149 gen_rtx_MULT (SFmode, d, x0))));
19150 /* y1 = x0 + e0 * x0 */
19151 emit_insn (gen_rtx_SET (VOIDmode, y1,
19152 gen_rtx_PLUS (DFmode,
19153 gen_rtx_MULT (DFmode, e0, x0), x0)));
19154 /* e1 = e0 * e0 */
19155 emit_insn (gen_rtx_SET (VOIDmode, e1,
19156 gen_rtx_MULT (DFmode, e0, e0)));
19157 /* y2 = y1 + e1 * y1 */
19158 emit_insn (gen_rtx_SET (VOIDmode, y2,
19159 gen_rtx_PLUS (DFmode,
19160 gen_rtx_MULT (DFmode, e1, y1), y1)));
19161 /* e2 = e1 * e1 */
19162 emit_insn (gen_rtx_SET (VOIDmode, e2,
19163 gen_rtx_MULT (DFmode, e1, e1)));
19164 /* y3 = y2 + e2 * y2 */
19165 emit_insn (gen_rtx_SET (VOIDmode, y3,
19166 gen_rtx_PLUS (DFmode,
19167 gen_rtx_MULT (DFmode, e2, y2), y2)));
19168 /* u0 = n * y3 */
19169 emit_insn (gen_rtx_SET (VOIDmode, u0,
19170 gen_rtx_MULT (DFmode, n, y3)));
19171 /* v0 = n - d * u0 */
19172 emit_insn (gen_rtx_SET (VOIDmode, v0,
19173 gen_rtx_MINUS (DFmode, n,
19174 gen_rtx_MULT (DFmode, d, u0))));
19175 /* res = u0 + v0 * y3 */
19176 emit_insn (gen_rtx_SET (VOIDmode, res,
19177 gen_rtx_PLUS (DFmode,
19178 gen_rtx_MULT (DFmode, v0, y3), u0)));
19179}
19180
ded9bf77
AH
19181/* Return an RTX representing where to find the function value of a
19182 function returning MODE. */
19183static rtx
19184rs6000_complex_function_value (enum machine_mode mode)
19185{
19186 unsigned int regno;
19187 rtx r1, r2;
19188 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 19189 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 19190
18f63bfa
AH
19191 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
19192 regno = FP_ARG_RETURN;
354ed18f
AH
19193 else
19194 {
18f63bfa 19195 regno = GP_ARG_RETURN;
ded9bf77 19196
18f63bfa
AH
19197 /* 32-bit is OK since it'll go in r3/r4. */
19198 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
19199 return gen_rtx_REG (mode, regno);
19200 }
19201
18f63bfa
AH
19202 if (inner_bytes >= 8)
19203 return gen_rtx_REG (mode, regno);
19204
ded9bf77
AH
19205 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
19206 const0_rtx);
19207 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 19208 GEN_INT (inner_bytes));
ded9bf77
AH
19209 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
19210}
19211
a6ebc39a
AH
19212/* Define how to find the value returned by a function.
19213 VALTYPE is the data type of the value (as a tree).
19214 If the precise function being called is known, FUNC is its FUNCTION_DECL;
19215 otherwise, FUNC is 0.
19216
19217 On the SPE, both FPs and vectors are returned in r3.
19218
19219 On RS/6000 an integer value is in r3 and a floating-point value is in
19220 fp1, unless -msoft-float. */
19221
19222rtx
19223rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
19224{
19225 enum machine_mode mode;
2a8fa26c 19226 unsigned int regno;
a6ebc39a 19227
594a51fe
SS
19228 /* Special handling for structs in darwin64. */
19229 if (rs6000_darwin64_abi
19230 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
19231 && TREE_CODE (valtype) == RECORD_TYPE
19232 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
19233 {
19234 CUMULATIVE_ARGS valcum;
19235 rtx valret;
19236
0b5383eb 19237 valcum.words = 0;
594a51fe
SS
19238 valcum.fregno = FP_ARG_MIN_REG;
19239 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
19240 /* Do a trial code generation as if this were going to be passed as
19241 an argument; if any part goes in memory, we return NULL. */
19242 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
19243 if (valret)
19244 return valret;
19245 /* Otherwise fall through to standard ABI rules. */
19246 }
19247
0e67400a
FJ
19248 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
19249 {
19250 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
19251 return gen_rtx_PARALLEL (DImode,
19252 gen_rtvec (2,
19253 gen_rtx_EXPR_LIST (VOIDmode,
19254 gen_rtx_REG (SImode, GP_ARG_RETURN),
19255 const0_rtx),
19256 gen_rtx_EXPR_LIST (VOIDmode,
19257 gen_rtx_REG (SImode,
19258 GP_ARG_RETURN + 1),
19259 GEN_INT (4))));
19260 }
0f086e42
FJ
19261 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
19262 {
19263 return gen_rtx_PARALLEL (DCmode,
19264 gen_rtvec (4,
19265 gen_rtx_EXPR_LIST (VOIDmode,
19266 gen_rtx_REG (SImode, GP_ARG_RETURN),
19267 const0_rtx),
19268 gen_rtx_EXPR_LIST (VOIDmode,
19269 gen_rtx_REG (SImode,
19270 GP_ARG_RETURN + 1),
19271 GEN_INT (4)),
19272 gen_rtx_EXPR_LIST (VOIDmode,
19273 gen_rtx_REG (SImode,
19274 GP_ARG_RETURN + 2),
19275 GEN_INT (8)),
19276 gen_rtx_EXPR_LIST (VOIDmode,
19277 gen_rtx_REG (SImode,
19278 GP_ARG_RETURN + 3),
19279 GEN_INT (12))));
19280 }
602ea4d3 19281
a6ebc39a
AH
19282 if ((INTEGRAL_TYPE_P (valtype)
19283 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
19284 || POINTER_TYPE_P (valtype))
b78d48dd 19285 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a
AH
19286 else
19287 mode = TYPE_MODE (valtype);
19288
00b79d54
BE
19289 if (DECIMAL_FLOAT_MODE_P (mode))
19290 regno = GP_ARG_RETURN;
19291 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 19292 regno = FP_ARG_RETURN;
ded9bf77 19293 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 19294 && targetm.calls.split_complex_arg)
ded9bf77 19295 return rs6000_complex_function_value (mode);
44688022 19296 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 19297 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 19298 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 19299 regno = ALTIVEC_ARG_RETURN;
18f63bfa
AH
19300 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19301 && (mode == DFmode || mode == DCmode))
19302 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
19303 else
19304 regno = GP_ARG_RETURN;
19305
19306 return gen_rtx_REG (mode, regno);
19307}
19308
ded9bf77
AH
19309/* Define how to find the value returned by a library function
19310 assuming the value has mode MODE. */
19311rtx
19312rs6000_libcall_value (enum machine_mode mode)
19313{
19314 unsigned int regno;
19315
2e6c9641
FJ
19316 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
19317 {
19318 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
19319 return gen_rtx_PARALLEL (DImode,
19320 gen_rtvec (2,
19321 gen_rtx_EXPR_LIST (VOIDmode,
19322 gen_rtx_REG (SImode, GP_ARG_RETURN),
19323 const0_rtx),
19324 gen_rtx_EXPR_LIST (VOIDmode,
19325 gen_rtx_REG (SImode,
19326 GP_ARG_RETURN + 1),
19327 GEN_INT (4))));
19328 }
19329
00b79d54
BE
19330 if (DECIMAL_FLOAT_MODE_P (mode))
19331 regno = GP_ARG_RETURN;
19332 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
19333 && TARGET_HARD_FLOAT && TARGET_FPRS)
19334 regno = FP_ARG_RETURN;
44688022
AM
19335 else if (ALTIVEC_VECTOR_MODE (mode)
19336 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 19337 regno = ALTIVEC_ARG_RETURN;
42ba5130 19338 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 19339 return rs6000_complex_function_value (mode);
18f63bfa
AH
19340 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
19341 && (mode == DFmode || mode == DCmode))
19342 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
19343 else
19344 regno = GP_ARG_RETURN;
19345
19346 return gen_rtx_REG (mode, regno);
19347}
19348
d1d0c603
JJ
19349/* Define the offset between two registers, FROM to be eliminated and its
19350 replacement TO, at the start of a routine. */
19351HOST_WIDE_INT
19352rs6000_initial_elimination_offset (int from, int to)
19353{
19354 rs6000_stack_t *info = rs6000_stack_info ();
19355 HOST_WIDE_INT offset;
19356
7d5175e1 19357 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 19358 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
19359 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19360 {
19361 offset = info->push_p ? 0 : -info->total_size;
19362 if (FRAME_GROWS_DOWNWARD)
5b667039 19363 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
19364 }
19365 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
19366 offset = FRAME_GROWS_DOWNWARD
5b667039 19367 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
19368 : 0;
19369 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
19370 offset = info->total_size;
19371 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
19372 offset = info->push_p ? info->total_size : 0;
19373 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
19374 offset = 0;
19375 else
37409796 19376 gcc_unreachable ();
d1d0c603
JJ
19377
19378 return offset;
19379}
19380
58646b77 19381/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 19382
c8e4f0e9 19383static bool
58646b77 19384rs6000_is_opaque_type (tree type)
62e1dfcf 19385{
58646b77 19386 return (type == opaque_V2SI_type_node
2abe3e28 19387 || type == opaque_V2SF_type_node
58646b77
PB
19388 || type == opaque_p_V2SI_type_node
19389 || type == opaque_V4SI_type_node);
62e1dfcf
NC
19390}
19391
96714395 19392static rtx
a2369ed3 19393rs6000_dwarf_register_span (rtx reg)
96714395
AH
19394{
19395 unsigned regno;
19396
4d4cbc0e
AH
19397 if (TARGET_SPE
19398 && (SPE_VECTOR_MODE (GET_MODE (reg))
19399 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
19400 ;
19401 else
96714395
AH
19402 return NULL_RTX;
19403
19404 regno = REGNO (reg);
19405
19406 /* The duality of the SPE register size wreaks all kinds of havoc.
19407 This is a way of distinguishing r0 in 32-bits from r0 in
19408 64-bits. */
19409 return
19410 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
19411 BYTES_BIG_ENDIAN
19412 ? gen_rtvec (2,
19413 gen_rtx_REG (SImode, regno + 1200),
19414 gen_rtx_REG (SImode, regno))
19415 : gen_rtvec (2,
19416 gen_rtx_REG (SImode, regno),
19417 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
19418}
19419
93c9d1ba
AM
19420/* Map internal gcc register numbers to DWARF2 register numbers. */
19421
19422unsigned int
19423rs6000_dbx_register_number (unsigned int regno)
19424{
19425 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
19426 return regno;
19427 if (regno == MQ_REGNO)
19428 return 100;
19429 if (regno == LINK_REGISTER_REGNUM)
19430 return 108;
19431 if (regno == COUNT_REGISTER_REGNUM)
19432 return 109;
19433 if (CR_REGNO_P (regno))
19434 return regno - CR0_REGNO + 86;
19435 if (regno == XER_REGNO)
19436 return 101;
19437 if (ALTIVEC_REGNO_P (regno))
19438 return regno - FIRST_ALTIVEC_REGNO + 1124;
19439 if (regno == VRSAVE_REGNO)
19440 return 356;
19441 if (regno == VSCR_REGNO)
19442 return 67;
19443 if (regno == SPE_ACC_REGNO)
19444 return 99;
19445 if (regno == SPEFSCR_REGNO)
19446 return 612;
19447 /* SPE high reg number. We get these values of regno from
19448 rs6000_dwarf_register_span. */
37409796
NS
19449 gcc_assert (regno >= 1200 && regno < 1232);
19450 return regno;
93c9d1ba
AM
19451}
19452
93f90be6 19453/* target hook eh_return_filter_mode */
f676971a 19454static enum machine_mode
93f90be6
FJ
19455rs6000_eh_return_filter_mode (void)
19456{
19457 return TARGET_32BIT ? SImode : word_mode;
19458}
19459
00b79d54
BE
19460/* Target hook for scalar_mode_supported_p. */
19461static bool
19462rs6000_scalar_mode_supported_p (enum machine_mode mode)
19463{
19464 if (DECIMAL_FLOAT_MODE_P (mode))
19465 return true;
19466 else
19467 return default_scalar_mode_supported_p (mode);
19468}
19469
f676971a
EC
19470/* Target hook for vector_mode_supported_p. */
19471static bool
19472rs6000_vector_mode_supported_p (enum machine_mode mode)
19473{
19474
19475 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
19476 return true;
19477
19478 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
19479 return true;
19480
19481 else
19482 return false;
19483}
19484
bb8df8a6
EC
19485/* Target hook for invalid_arg_for_unprototyped_fn. */
19486static const char *
4d3e6fae
FJ
19487invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19488{
19489 return (!rs6000_darwin64_abi
19490 && typelist == 0
19491 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19492 && (funcdecl == NULL_TREE
19493 || (TREE_CODE (funcdecl) == FUNCTION_DECL
19494 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19495 ? N_("AltiVec argument passed to unprototyped function")
19496 : NULL;
19497}
19498
3aebbe5f
JJ
19499/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19500 setup by using __stack_chk_fail_local hidden function instead of
19501 calling __stack_chk_fail directly. Otherwise it is better to call
19502 __stack_chk_fail directly. */
19503
19504static tree
19505rs6000_stack_protect_fail (void)
19506{
19507 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19508 ? default_hidden_stack_protect_fail ()
19509 : default_external_stack_protect_fail ();
19510}
19511
17211ab5 19512#include "gt-rs6000.h"