]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
vect-reduc-dot-s16b.c: Mark functions noinline.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
279bb624 58#include "tm-constrs.h"
1bc7c5b6
ZW
59#if TARGET_XCOFF
60#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61#endif
93a27b7b
ZW
62#if TARGET_MACHO
63#include "gstab.h" /* for N_SLINE */
64#endif
9b30bae2 65
7509c759
MM
66#ifndef TARGET_NO_PROTOTYPE
67#define TARGET_NO_PROTOTYPE 0
68#endif
69
9878760c
RK
70#define min(A,B) ((A) < (B) ? (A) : (B))
71#define max(A,B) ((A) > (B) ? (A) : (B))
72
d1d0c603
JJ
73/* Structure used to define the rs6000 stack */
74typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
d1d0c603
JJ
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
111} rs6000_stack_t;
112
5b667039
JJ
113/* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */
115typedef struct machine_function GTY(())
116{
117 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
118 int ra_needs_full_frame;
119 /* Some local-dynamic symbol. */
120 const char *some_ld_name;
121 /* Whether the instruction chain has been scanned already. */
122 int insn_chain_scanned_p;
123 /* Flags if __builtin_return_address (0) was used. */
124 int ra_need_lr;
125 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126 varargs save area. */
127 HOST_WIDE_INT varargs_save_offset;
128} machine_function;
129
5248c961
RK
130/* Target cpu type */
131
132enum processor_type rs6000_cpu;
8e3f41e7
MM
133struct rs6000_cpu_select rs6000_select[3] =
134{
815cdc52
MM
135 /* switch name, tune arch */
136 { (const char *)0, "--with-cpu=", 1, 1 },
137 { (const char *)0, "-mcpu=", 1, 1 },
138 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 139};
5248c961 140
d296e02e
AP
141static GTY(()) bool rs6000_cell_dont_microcode;
142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
44cd321e
PS
149/* Align branch targets. */
150static GTY(()) bool rs6000_align_branch_targets;
151
569fa502
DN
152/* Support for -msched-costly-dep option. */
153const char *rs6000_sched_costly_dep_str;
154enum rs6000_dependence_cost rs6000_sched_costly_dep;
155
cbe26ab8
DN
156/* Support for -minsert-sched-nops option. */
157const char *rs6000_sched_insert_nops_str;
158enum rs6000_nop_insertion rs6000_sched_insert_nops;
159
7ccf35ed 160/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 161static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 162
602ea4d3 163/* Size of long double. */
6fa3f289
ZW
164int rs6000_long_double_type_size;
165
602ea4d3
JJ
166/* IEEE quad extended precision long double. */
167int rs6000_ieeequad;
168
169/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
170int rs6000_altivec_abi;
171
a3170dc6
AH
172/* Nonzero if we want SPE ABI extensions. */
173int rs6000_spe_abi;
174
5da702b1
AH
175/* Nonzero if floating point operations are done in the GPRs. */
176int rs6000_float_gprs = 0;
177
594a51fe
SS
178/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
179int rs6000_darwin64_abi;
180
a0ab749a 181/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 182static GTY(()) int common_mode_defined;
c81bebd7 183
9878760c
RK
184/* Save information from a "cmpxx" operation until the branch or scc is
185 emitted. */
9878760c
RK
186rtx rs6000_compare_op0, rs6000_compare_op1;
187int rs6000_compare_fp_p;
874a0744 188
874a0744
MM
189/* Label number of label created for -mrelocatable, to call to so we can
190 get the address of the GOT section */
191int rs6000_pic_labelno;
c81bebd7 192
b91da81f 193#ifdef USING_ELFOS_H
c81bebd7 194/* Which abi to adhere to */
9739c90c 195const char *rs6000_abi_name;
d9407988
MM
196
197/* Semantics of the small data area */
198enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199
200/* Which small data model to use */
815cdc52 201const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
202
203/* Counter for labels which are to be placed in .fixup. */
204int fixuplabelno = 0;
874a0744 205#endif
4697a36c 206
c4501e62
JJ
207/* Bit size of immediate TLS offsets and string from which it is decoded. */
208int rs6000_tls_size = 32;
209const char *rs6000_tls_size_string;
210
b6c9286a
MM
211/* ABI enumeration available for subtarget to use. */
212enum rs6000_abi rs6000_current_abi;
213
85b776df
AM
214/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
215int dot_symbols;
216
38c1f2d7 217/* Debug flags */
815cdc52 218const char *rs6000_debug_name;
38c1f2d7
MM
219int rs6000_debug_stack; /* debug stack applications */
220int rs6000_debug_arg; /* debug argument handling */
221
aabcd309 222/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
223bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
224
58646b77
PB
225/* Built in types. */
226
227tree rs6000_builtin_types[RS6000_BTI_MAX];
228tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 229
57ac7be9
AM
230const char *rs6000_traceback_name;
231static enum {
232 traceback_default = 0,
233 traceback_none,
234 traceback_part,
235 traceback_full
236} rs6000_traceback;
237
38c1f2d7
MM
238/* Flag to say the TOC is initialized */
239int toc_initialized;
9ebbca7d 240char toc_label_name[10];
38c1f2d7 241
44cd321e
PS
242/* Cached value of rs6000_variable_issue. This is cached in
243 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
244static short cached_can_issue_more;
245
d6b5193b
RS
246static GTY(()) section *read_only_data_section;
247static GTY(()) section *private_data_section;
248static GTY(()) section *read_only_private_data_section;
249static GTY(()) section *sdata2_section;
250static GTY(()) section *toc_section;
251
a3c9585f
KH
252/* Control alignment for fields within structures. */
253/* String from -malign-XXXXX. */
025d9908
KH
254int rs6000_alignment_flags;
255
78f5898b
AH
256/* True for any options that were explicitly set. */
257struct {
df01da37 258 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 259 bool alignment; /* True if -malign- was used. */
d3603e8c 260 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
261 bool spe; /* True if -mspe= was used. */
262 bool float_gprs; /* True if -mfloat-gprs= was used. */
263 bool isel; /* True if -misel was used. */
264 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 265 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
266} rs6000_explicit_options;
267
a3170dc6
AH
268struct builtin_description
269{
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
273 unsigned int mask;
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
277};
8b897cfa
RS
278\f
279/* Target cpu costs. */
280
281struct processor_costs {
c4ad648e 282 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
283 const int mulsi_const; /* cost of SImode multiplication by constant. */
284 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
285 const int muldi; /* cost of DImode multiplication. */
286 const int divsi; /* cost of SImode division. */
287 const int divdi; /* cost of DImode division. */
288 const int fp; /* cost of simple SFmode and DFmode insns. */
289 const int dmul; /* cost of DFmode multiplication (and fmadd). */
290 const int sdiv; /* cost of SFmode division (fdivs). */
291 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
292 const int cache_line_size; /* cache line size in bytes. */
293 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
294 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
295 const int simultaneous_prefetches; /* number of parallel prefetch
296 operations. */
8b897cfa
RS
297};
298
299const struct processor_costs *rs6000_cost;
300
301/* Processor costs (relative to an add) */
302
303/* Instruction size costs on 32bit processors. */
304static const
305struct processor_costs size32_cost = {
06a67bdd
RS
306 COSTS_N_INSNS (1), /* mulsi */
307 COSTS_N_INSNS (1), /* mulsi_const */
308 COSTS_N_INSNS (1), /* mulsi_const9 */
309 COSTS_N_INSNS (1), /* muldi */
310 COSTS_N_INSNS (1), /* divsi */
311 COSTS_N_INSNS (1), /* divdi */
312 COSTS_N_INSNS (1), /* fp */
313 COSTS_N_INSNS (1), /* dmul */
314 COSTS_N_INSNS (1), /* sdiv */
315 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
316 32,
317 0,
318 0,
5f732aba 319 0,
8b897cfa
RS
320};
321
322/* Instruction size costs on 64bit processors. */
323static const
324struct processor_costs size64_cost = {
06a67bdd
RS
325 COSTS_N_INSNS (1), /* mulsi */
326 COSTS_N_INSNS (1), /* mulsi_const */
327 COSTS_N_INSNS (1), /* mulsi_const9 */
328 COSTS_N_INSNS (1), /* muldi */
329 COSTS_N_INSNS (1), /* divsi */
330 COSTS_N_INSNS (1), /* divdi */
331 COSTS_N_INSNS (1), /* fp */
332 COSTS_N_INSNS (1), /* dmul */
333 COSTS_N_INSNS (1), /* sdiv */
334 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
335 128,
336 0,
337 0,
5f732aba 338 0,
8b897cfa
RS
339};
340
341/* Instruction costs on RIOS1 processors. */
342static const
343struct processor_costs rios1_cost = {
06a67bdd
RS
344 COSTS_N_INSNS (5), /* mulsi */
345 COSTS_N_INSNS (4), /* mulsi_const */
346 COSTS_N_INSNS (3), /* mulsi_const9 */
347 COSTS_N_INSNS (5), /* muldi */
348 COSTS_N_INSNS (19), /* divsi */
349 COSTS_N_INSNS (19), /* divdi */
350 COSTS_N_INSNS (2), /* fp */
351 COSTS_N_INSNS (2), /* dmul */
352 COSTS_N_INSNS (19), /* sdiv */
353 COSTS_N_INSNS (19), /* ddiv */
5f732aba
DE
354 128,
355 64, /* l1 cache */
356 512, /* l2 cache */
0b11da67 357 0, /* streams */
8b897cfa
RS
358};
359
360/* Instruction costs on RIOS2 processors. */
361static const
362struct processor_costs rios2_cost = {
06a67bdd
RS
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (13), /* divsi */
368 COSTS_N_INSNS (13), /* divdi */
369 COSTS_N_INSNS (2), /* fp */
370 COSTS_N_INSNS (2), /* dmul */
371 COSTS_N_INSNS (17), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
373 256,
374 256, /* l1 cache */
375 1024, /* l2 cache */
0b11da67 376 0, /* streams */
8b897cfa
RS
377};
378
379/* Instruction costs on RS64A processors. */
380static const
381struct processor_costs rs64a_cost = {
06a67bdd
RS
382 COSTS_N_INSNS (20), /* mulsi */
383 COSTS_N_INSNS (12), /* mulsi_const */
384 COSTS_N_INSNS (8), /* mulsi_const9 */
385 COSTS_N_INSNS (34), /* muldi */
386 COSTS_N_INSNS (65), /* divsi */
387 COSTS_N_INSNS (67), /* divdi */
388 COSTS_N_INSNS (4), /* fp */
389 COSTS_N_INSNS (4), /* dmul */
390 COSTS_N_INSNS (31), /* sdiv */
391 COSTS_N_INSNS (31), /* ddiv */
0b11da67 392 128,
5f732aba
DE
393 128, /* l1 cache */
394 2048, /* l2 cache */
0b11da67 395 1, /* streams */
8b897cfa
RS
396};
397
398/* Instruction costs on MPCCORE processors. */
399static const
400struct processor_costs mpccore_cost = {
06a67bdd
RS
401 COSTS_N_INSNS (2), /* mulsi */
402 COSTS_N_INSNS (2), /* mulsi_const */
403 COSTS_N_INSNS (2), /* mulsi_const9 */
404 COSTS_N_INSNS (2), /* muldi */
405 COSTS_N_INSNS (6), /* divsi */
406 COSTS_N_INSNS (6), /* divdi */
407 COSTS_N_INSNS (4), /* fp */
408 COSTS_N_INSNS (5), /* dmul */
409 COSTS_N_INSNS (10), /* sdiv */
410 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
411 32,
412 4, /* l1 cache */
413 16, /* l2 cache */
0b11da67 414 1, /* streams */
8b897cfa
RS
415};
416
417/* Instruction costs on PPC403 processors. */
418static const
419struct processor_costs ppc403_cost = {
06a67bdd
RS
420 COSTS_N_INSNS (4), /* mulsi */
421 COSTS_N_INSNS (4), /* mulsi_const */
422 COSTS_N_INSNS (4), /* mulsi_const9 */
423 COSTS_N_INSNS (4), /* muldi */
424 COSTS_N_INSNS (33), /* divsi */
425 COSTS_N_INSNS (33), /* divdi */
426 COSTS_N_INSNS (11), /* fp */
427 COSTS_N_INSNS (11), /* dmul */
428 COSTS_N_INSNS (11), /* sdiv */
429 COSTS_N_INSNS (11), /* ddiv */
0b11da67 430 32,
5f732aba
DE
431 4, /* l1 cache */
432 16, /* l2 cache */
0b11da67 433 1, /* streams */
8b897cfa
RS
434};
435
436/* Instruction costs on PPC405 processors. */
437static const
438struct processor_costs ppc405_cost = {
06a67bdd
RS
439 COSTS_N_INSNS (5), /* mulsi */
440 COSTS_N_INSNS (4), /* mulsi_const */
441 COSTS_N_INSNS (3), /* mulsi_const9 */
442 COSTS_N_INSNS (5), /* muldi */
443 COSTS_N_INSNS (35), /* divsi */
444 COSTS_N_INSNS (35), /* divdi */
445 COSTS_N_INSNS (11), /* fp */
446 COSTS_N_INSNS (11), /* dmul */
447 COSTS_N_INSNS (11), /* sdiv */
448 COSTS_N_INSNS (11), /* ddiv */
0b11da67 449 32,
5f732aba
DE
450 16, /* l1 cache */
451 128, /* l2 cache */
0b11da67 452 1, /* streams */
8b897cfa
RS
453};
454
455/* Instruction costs on PPC440 processors. */
456static const
457struct processor_costs ppc440_cost = {
06a67bdd
RS
458 COSTS_N_INSNS (3), /* mulsi */
459 COSTS_N_INSNS (2), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (3), /* muldi */
462 COSTS_N_INSNS (34), /* divsi */
463 COSTS_N_INSNS (34), /* divdi */
464 COSTS_N_INSNS (5), /* fp */
465 COSTS_N_INSNS (5), /* dmul */
466 COSTS_N_INSNS (19), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
0b11da67 468 32,
5f732aba
DE
469 32, /* l1 cache */
470 256, /* l2 cache */
0b11da67 471 1, /* streams */
8b897cfa
RS
472};
473
474/* Instruction costs on PPC601 processors. */
475static const
476struct processor_costs ppc601_cost = {
06a67bdd
RS
477 COSTS_N_INSNS (5), /* mulsi */
478 COSTS_N_INSNS (5), /* mulsi_const */
479 COSTS_N_INSNS (5), /* mulsi_const9 */
480 COSTS_N_INSNS (5), /* muldi */
481 COSTS_N_INSNS (36), /* divsi */
482 COSTS_N_INSNS (36), /* divdi */
483 COSTS_N_INSNS (4), /* fp */
484 COSTS_N_INSNS (5), /* dmul */
485 COSTS_N_INSNS (17), /* sdiv */
486 COSTS_N_INSNS (31), /* ddiv */
0b11da67 487 32,
5f732aba
DE
488 32, /* l1 cache */
489 256, /* l2 cache */
0b11da67 490 1, /* streams */
8b897cfa
RS
491};
492
493/* Instruction costs on PPC603 processors. */
494static const
495struct processor_costs ppc603_cost = {
06a67bdd
RS
496 COSTS_N_INSNS (5), /* mulsi */
497 COSTS_N_INSNS (3), /* mulsi_const */
498 COSTS_N_INSNS (2), /* mulsi_const9 */
499 COSTS_N_INSNS (5), /* muldi */
500 COSTS_N_INSNS (37), /* divsi */
501 COSTS_N_INSNS (37), /* divdi */
502 COSTS_N_INSNS (3), /* fp */
503 COSTS_N_INSNS (4), /* dmul */
504 COSTS_N_INSNS (18), /* sdiv */
505 COSTS_N_INSNS (33), /* ddiv */
0b11da67 506 32,
5f732aba
DE
507 8, /* l1 cache */
508 64, /* l2 cache */
0b11da67 509 1, /* streams */
8b897cfa
RS
510};
511
512/* Instruction costs on PPC604 processors. */
513static const
514struct processor_costs ppc604_cost = {
06a67bdd
RS
515 COSTS_N_INSNS (4), /* mulsi */
516 COSTS_N_INSNS (4), /* mulsi_const */
517 COSTS_N_INSNS (4), /* mulsi_const9 */
518 COSTS_N_INSNS (4), /* muldi */
519 COSTS_N_INSNS (20), /* divsi */
520 COSTS_N_INSNS (20), /* divdi */
521 COSTS_N_INSNS (3), /* fp */
522 COSTS_N_INSNS (3), /* dmul */
523 COSTS_N_INSNS (18), /* sdiv */
524 COSTS_N_INSNS (32), /* ddiv */
0b11da67 525 32,
5f732aba
DE
526 16, /* l1 cache */
527 512, /* l2 cache */
0b11da67 528 1, /* streams */
8b897cfa
RS
529};
530
531/* Instruction costs on PPC604e processors. */
532static const
533struct processor_costs ppc604e_cost = {
06a67bdd
RS
534 COSTS_N_INSNS (2), /* mulsi */
535 COSTS_N_INSNS (2), /* mulsi_const */
536 COSTS_N_INSNS (2), /* mulsi_const9 */
537 COSTS_N_INSNS (2), /* muldi */
538 COSTS_N_INSNS (20), /* divsi */
539 COSTS_N_INSNS (20), /* divdi */
540 COSTS_N_INSNS (3), /* fp */
541 COSTS_N_INSNS (3), /* dmul */
542 COSTS_N_INSNS (18), /* sdiv */
543 COSTS_N_INSNS (32), /* ddiv */
0b11da67 544 32,
5f732aba
DE
545 32, /* l1 cache */
546 1024, /* l2 cache */
0b11da67 547 1, /* streams */
8b897cfa
RS
548};
549
f0517163 550/* Instruction costs on PPC620 processors. */
8b897cfa
RS
551static const
552struct processor_costs ppc620_cost = {
06a67bdd
RS
553 COSTS_N_INSNS (5), /* mulsi */
554 COSTS_N_INSNS (4), /* mulsi_const */
555 COSTS_N_INSNS (3), /* mulsi_const9 */
556 COSTS_N_INSNS (7), /* muldi */
557 COSTS_N_INSNS (21), /* divsi */
558 COSTS_N_INSNS (37), /* divdi */
559 COSTS_N_INSNS (3), /* fp */
560 COSTS_N_INSNS (3), /* dmul */
561 COSTS_N_INSNS (18), /* sdiv */
562 COSTS_N_INSNS (32), /* ddiv */
0b11da67 563 128,
5f732aba
DE
564 32, /* l1 cache */
565 1024, /* l2 cache */
0b11da67 566 1, /* streams */
f0517163
RS
567};
568
569/* Instruction costs on PPC630 processors. */
570static const
571struct processor_costs ppc630_cost = {
06a67bdd
RS
572 COSTS_N_INSNS (5), /* mulsi */
573 COSTS_N_INSNS (4), /* mulsi_const */
574 COSTS_N_INSNS (3), /* mulsi_const9 */
575 COSTS_N_INSNS (7), /* muldi */
576 COSTS_N_INSNS (21), /* divsi */
577 COSTS_N_INSNS (37), /* divdi */
578 COSTS_N_INSNS (3), /* fp */
579 COSTS_N_INSNS (3), /* dmul */
580 COSTS_N_INSNS (17), /* sdiv */
581 COSTS_N_INSNS (21), /* ddiv */
0b11da67 582 128,
5f732aba
DE
583 64, /* l1 cache */
584 1024, /* l2 cache */
0b11da67 585 1, /* streams */
8b897cfa
RS
586};
587
d296e02e
AP
588/* Instruction costs on Cell processor. */
589/* COSTS_N_INSNS (1) ~ one add. */
590static const
591struct processor_costs ppccell_cost = {
592 COSTS_N_INSNS (9/2)+2, /* mulsi */
593 COSTS_N_INSNS (6/2), /* mulsi_const */
594 COSTS_N_INSNS (6/2), /* mulsi_const9 */
595 COSTS_N_INSNS (15/2)+2, /* muldi */
596 COSTS_N_INSNS (38/2), /* divsi */
597 COSTS_N_INSNS (70/2), /* divdi */
598 COSTS_N_INSNS (10/2), /* fp */
599 COSTS_N_INSNS (10/2), /* dmul */
600 COSTS_N_INSNS (74/2), /* sdiv */
601 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67 602 128,
5f732aba
DE
603 32, /* l1 cache */
604 512, /* l2 cache */
605 6, /* streams */
d296e02e
AP
606};
607
8b897cfa
RS
608/* Instruction costs on PPC750 and PPC7400 processors. */
609static const
610struct processor_costs ppc750_cost = {
06a67bdd
RS
611 COSTS_N_INSNS (5), /* mulsi */
612 COSTS_N_INSNS (3), /* mulsi_const */
613 COSTS_N_INSNS (2), /* mulsi_const9 */
614 COSTS_N_INSNS (5), /* muldi */
615 COSTS_N_INSNS (17), /* divsi */
616 COSTS_N_INSNS (17), /* divdi */
617 COSTS_N_INSNS (3), /* fp */
618 COSTS_N_INSNS (3), /* dmul */
619 COSTS_N_INSNS (17), /* sdiv */
620 COSTS_N_INSNS (31), /* ddiv */
0b11da67 621 32,
5f732aba
DE
622 32, /* l1 cache */
623 512, /* l2 cache */
0b11da67 624 1, /* streams */
8b897cfa
RS
625};
626
627/* Instruction costs on PPC7450 processors. */
628static const
629struct processor_costs ppc7450_cost = {
06a67bdd
RS
630 COSTS_N_INSNS (4), /* mulsi */
631 COSTS_N_INSNS (3), /* mulsi_const */
632 COSTS_N_INSNS (3), /* mulsi_const9 */
633 COSTS_N_INSNS (4), /* muldi */
634 COSTS_N_INSNS (23), /* divsi */
635 COSTS_N_INSNS (23), /* divdi */
636 COSTS_N_INSNS (5), /* fp */
637 COSTS_N_INSNS (5), /* dmul */
638 COSTS_N_INSNS (21), /* sdiv */
639 COSTS_N_INSNS (35), /* ddiv */
0b11da67 640 32,
5f732aba
DE
641 32, /* l1 cache */
642 1024, /* l2 cache */
0b11da67 643 1, /* streams */
8b897cfa 644};
a3170dc6 645
8b897cfa
RS
646/* Instruction costs on PPC8540 processors. */
647static const
648struct processor_costs ppc8540_cost = {
06a67bdd
RS
649 COSTS_N_INSNS (4), /* mulsi */
650 COSTS_N_INSNS (4), /* mulsi_const */
651 COSTS_N_INSNS (4), /* mulsi_const9 */
652 COSTS_N_INSNS (4), /* muldi */
653 COSTS_N_INSNS (19), /* divsi */
654 COSTS_N_INSNS (19), /* divdi */
655 COSTS_N_INSNS (4), /* fp */
656 COSTS_N_INSNS (4), /* dmul */
657 COSTS_N_INSNS (29), /* sdiv */
658 COSTS_N_INSNS (29), /* ddiv */
0b11da67 659 32,
5f732aba
DE
660 32, /* l1 cache */
661 256, /* l2 cache */
0b11da67 662 1, /* prefetch streams /*/
8b897cfa
RS
663};
664
665/* Instruction costs on POWER4 and POWER5 processors. */
666static const
667struct processor_costs power4_cost = {
06a67bdd
RS
668 COSTS_N_INSNS (3), /* mulsi */
669 COSTS_N_INSNS (2), /* mulsi_const */
670 COSTS_N_INSNS (2), /* mulsi_const9 */
671 COSTS_N_INSNS (4), /* muldi */
672 COSTS_N_INSNS (18), /* divsi */
673 COSTS_N_INSNS (34), /* divdi */
674 COSTS_N_INSNS (3), /* fp */
675 COSTS_N_INSNS (3), /* dmul */
676 COSTS_N_INSNS (17), /* sdiv */
677 COSTS_N_INSNS (17), /* ddiv */
0b11da67 678 128,
5f732aba
DE
679 32, /* l1 cache */
680 1024, /* l2 cache */
0b11da67 681 8, /* prefetch streams /*/
8b897cfa
RS
682};
683
44cd321e
PS
684/* Instruction costs on POWER6 processors. */
685static const
686struct processor_costs power6_cost = {
687 COSTS_N_INSNS (8), /* mulsi */
688 COSTS_N_INSNS (8), /* mulsi_const */
689 COSTS_N_INSNS (8), /* mulsi_const9 */
690 COSTS_N_INSNS (8), /* muldi */
691 COSTS_N_INSNS (22), /* divsi */
692 COSTS_N_INSNS (28), /* divdi */
693 COSTS_N_INSNS (3), /* fp */
694 COSTS_N_INSNS (3), /* dmul */
695 COSTS_N_INSNS (13), /* sdiv */
696 COSTS_N_INSNS (16), /* ddiv */
0b11da67 697 128,
5f732aba
DE
698 64, /* l1 cache */
699 2048, /* l2 cache */
0b11da67 700 16, /* prefetch streams */
44cd321e
PS
701};
702
8b897cfa 703\f
a2369ed3 704static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 705static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 706static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
707static void rs6000_emit_stack_tie (void);
708static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
709static rtx spe_synthesize_frame_save (rtx);
710static bool spe_func_has_64bit_regs_p (void);
b20a9cca 711static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 712 int, HOST_WIDE_INT);
a2369ed3
DJ
713static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
714static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
715static unsigned rs6000_hash_constant (rtx);
716static unsigned toc_hash_function (const void *);
717static int toc_hash_eq (const void *, const void *);
718static int constant_pool_expr_1 (rtx, int *, int *);
719static bool constant_pool_expr_p (rtx);
d04b6e6e 720static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
721static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
722static struct machine_function * rs6000_init_machine_status (void);
723static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 724static bool no_global_regs_above (int);
5add3202 725#ifdef HAVE_GAS_HIDDEN
a2369ed3 726static void rs6000_assemble_visibility (tree, int);
5add3202 727#endif
a2369ed3
DJ
728static int rs6000_ra_ever_killed (void);
729static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 730static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 731static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 732static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 733static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 734static const char *rs6000_mangle_type (const_tree);
b86fe7b4 735extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 736static void rs6000_set_default_type_attributes (tree);
52ff33d0 737static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
738static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
739static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
740static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
741 tree);
a2369ed3 742static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 743static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 744static void rs6000_file_start (void);
7c262518 745#if TARGET_ELF
9b580a0b 746static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
747static void rs6000_elf_asm_out_constructor (rtx, int);
748static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 749static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 750static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
751static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
752 unsigned HOST_WIDE_INT);
a56d7372 753static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 754 ATTRIBUTE_UNUSED;
7c262518 755#endif
3101faab 756static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
cbaaba19 757#if TARGET_XCOFF
0d5817b2 758static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 759static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 760static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 761static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 762static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 763static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 764 unsigned HOST_WIDE_INT);
d6b5193b
RS
765static void rs6000_xcoff_unique_section (tree, int);
766static section *rs6000_xcoff_select_rtx_section
767 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
768static const char * rs6000_xcoff_strip_name_encoding (const char *);
769static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
770static void rs6000_xcoff_file_start (void);
771static void rs6000_xcoff_file_end (void);
f1384257 772#endif
a2369ed3
DJ
773static int rs6000_variable_issue (FILE *, int, rtx, int);
774static bool rs6000_rtx_costs (rtx, int, int, int *);
775static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 776static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 777static bool is_microcoded_insn (rtx);
d296e02e 778static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
779static bool is_cracked_insn (rtx);
780static bool is_branch_slot_insn (rtx);
44cd321e 781static bool is_load_insn (rtx);
e3a0e200 782static rtx get_store_dest (rtx pat);
44cd321e
PS
783static bool is_store_insn (rtx);
784static bool set_to_load_agen (rtx,rtx);
982afe02 785static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
786static int rs6000_adjust_priority (rtx, int);
787static int rs6000_issue_rate (void);
b198261f 788static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
789static rtx get_next_active_insn (rtx, rtx);
790static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
791static bool insn_must_be_first_in_group (rtx);
792static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
793static bool is_costly_group (rtx *, rtx);
794static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
795static int redefine_groups (FILE *, int, rtx, rtx);
796static int pad_groups (FILE *, int, rtx, rtx);
797static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
798static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
799static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 800static int rs6000_use_sched_lookahead (void);
d296e02e 801static int rs6000_use_sched_lookahead_guard (rtx);
7ccf35ed 802static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
803static tree rs6000_builtin_mul_widen_even (tree);
804static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 805static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 806
58646b77 807static void def_builtin (int, const char *, tree, int);
3101faab 808static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
809static void rs6000_init_builtins (void);
810static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
811static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
812static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
813static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
814static void altivec_init_builtins (void);
815static void rs6000_common_init_builtins (void);
c15c90bb 816static void rs6000_init_libfuncs (void);
a2369ed3 817
96038623
DE
818static void paired_init_builtins (void);
819static rtx paired_expand_builtin (tree, rtx, bool *);
820static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
821static rtx paired_expand_stv_builtin (enum insn_code, tree);
822static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
823
b20a9cca
AM
824static void enable_mask_for_builtins (struct builtin_description *, int,
825 enum rs6000_builtins,
826 enum rs6000_builtins);
7c62e993 827static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
828static void spe_init_builtins (void);
829static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 830static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
831static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
832static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
833static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
834static rs6000_stack_t *rs6000_stack_info (void);
835static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
836
837static rtx altivec_expand_builtin (tree, rtx, bool *);
838static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
839static rtx altivec_expand_st_builtin (tree, rtx, bool *);
840static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
841static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 842static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 843 const char *, tree, rtx);
b4a62fa0 844static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 845static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
846static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
847static rtx altivec_expand_vec_set_builtin (tree);
848static rtx altivec_expand_vec_ext_builtin (tree, rtx);
849static int get_element_number (tree, tree);
78f5898b 850static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 851static void rs6000_parse_tls_size_option (void);
5da702b1 852static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
853static int first_altivec_reg_to_save (void);
854static unsigned int compute_vrsave_mask (void);
9390387d 855static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
856static void is_altivec_return_reg (rtx, void *);
857static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
858int easy_vector_constant (rtx, enum machine_mode);
3101faab 859static bool rs6000_is_opaque_type (const_tree);
a2369ed3 860static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 861static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 862static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 863static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
864static rtx rs6000_tls_get_addr (void);
865static rtx rs6000_got_sym (void);
9390387d 866static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
867static const char *rs6000_get_some_local_dynamic_name (void);
868static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 869static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 870static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 871 enum machine_mode, tree);
0b5383eb
DJ
872static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
873 HOST_WIDE_INT);
874static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
875 tree, HOST_WIDE_INT);
876static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
877 HOST_WIDE_INT,
878 rtx[], int *);
879static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
880 const_tree, HOST_WIDE_INT,
881 rtx[], int *);
882static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 883static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 884static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
885static void setup_incoming_varargs (CUMULATIVE_ARGS *,
886 enum machine_mode, tree,
887 int *, int);
8cd5a4e0 888static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 889 const_tree, bool);
78a52f11
RH
890static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
891 tree, bool);
3101faab 892static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
893#if TARGET_MACHO
894static void macho_branch_islands (void);
efdba735
SH
895static int no_previous_def (tree function_name);
896static tree get_prev_label (tree function_name);
c4e18b1c 897static void rs6000_darwin_file_start (void);
efdba735
SH
898#endif
899
c35d187f 900static tree rs6000_build_builtin_va_list (void);
23a60a04 901static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 902static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 903static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 904static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 905static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 906 enum machine_mode);
94ff898d 907static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
908 enum machine_mode);
909static int get_vsel_insn (enum machine_mode);
910static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 911static tree rs6000_stack_protect_fail (void);
21213b4c
DP
912
913const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
914static enum machine_mode rs6000_eh_return_filter_mode (void);
915
17211ab5
GK
916/* Hash table stuff for keeping track of TOC entries. */
917
918struct toc_hash_struct GTY(())
919{
920 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
921 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
922 rtx key;
923 enum machine_mode key_mode;
924 int labelno;
925};
926
927static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
928\f
929/* Default register names. */
930char rs6000_reg_names[][8] =
931{
802a0058
MM
932 "0", "1", "2", "3", "4", "5", "6", "7",
933 "8", "9", "10", "11", "12", "13", "14", "15",
934 "16", "17", "18", "19", "20", "21", "22", "23",
935 "24", "25", "26", "27", "28", "29", "30", "31",
936 "0", "1", "2", "3", "4", "5", "6", "7",
937 "8", "9", "10", "11", "12", "13", "14", "15",
938 "16", "17", "18", "19", "20", "21", "22", "23",
939 "24", "25", "26", "27", "28", "29", "30", "31",
940 "mq", "lr", "ctr","ap",
941 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
942 "xer",
943 /* AltiVec registers. */
0cd5e3a1
AH
944 "0", "1", "2", "3", "4", "5", "6", "7",
945 "8", "9", "10", "11", "12", "13", "14", "15",
946 "16", "17", "18", "19", "20", "21", "22", "23",
947 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
948 "vrsave", "vscr",
949 /* SPE registers. */
7d5175e1
JJ
950 "spe_acc", "spefscr",
951 /* Soft frame pointer. */
952 "sfp"
c81bebd7
MM
953};
954
955#ifdef TARGET_REGNAMES
8b60264b 956static const char alt_reg_names[][8] =
c81bebd7 957{
802a0058
MM
958 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
959 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
960 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
961 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
962 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
963 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
964 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
965 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
966 "mq", "lr", "ctr", "ap",
967 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 968 "xer",
59a4c851 969 /* AltiVec registers. */
0ac081f6 970 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
971 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
972 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
973 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
974 "vrsave", "vscr",
975 /* SPE registers. */
7d5175e1
JJ
976 "spe_acc", "spefscr",
977 /* Soft frame pointer. */
978 "sfp"
c81bebd7
MM
979};
980#endif
9878760c 981\f
daf11973
MM
982#ifndef MASK_STRICT_ALIGN
983#define MASK_STRICT_ALIGN 0
984#endif
ffcfcb5f
AM
985#ifndef TARGET_PROFILE_KERNEL
986#define TARGET_PROFILE_KERNEL 0
987#endif
3961e8fe
RH
988
989/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
990#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
991\f
992/* Initialize the GCC target structure. */
91d231cb
JM
993#undef TARGET_ATTRIBUTE_TABLE
994#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
995#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
996#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 997
301d03af
RS
998#undef TARGET_ASM_ALIGNED_DI_OP
999#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1000
1001/* Default unaligned ops are only provided for ELF. Find the ops needed
1002 for non-ELF systems. */
1003#ifndef OBJECT_FORMAT_ELF
cbaaba19 1004#if TARGET_XCOFF
ae6c1efd 1005/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1006 64-bit targets. */
1007#undef TARGET_ASM_UNALIGNED_HI_OP
1008#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1009#undef TARGET_ASM_UNALIGNED_SI_OP
1010#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1011#undef TARGET_ASM_UNALIGNED_DI_OP
1012#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1013#else
1014/* For Darwin. */
1015#undef TARGET_ASM_UNALIGNED_HI_OP
1016#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1017#undef TARGET_ASM_UNALIGNED_SI_OP
1018#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1019#undef TARGET_ASM_UNALIGNED_DI_OP
1020#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1021#undef TARGET_ASM_ALIGNED_DI_OP
1022#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1023#endif
1024#endif
1025
1026/* This hook deals with fixups for relocatable code and DI-mode objects
1027 in 64-bit code. */
1028#undef TARGET_ASM_INTEGER
1029#define TARGET_ASM_INTEGER rs6000_assemble_integer
1030
93638d7a
AM
1031#ifdef HAVE_GAS_HIDDEN
1032#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1033#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1034#endif
1035
c4501e62
JJ
1036#undef TARGET_HAVE_TLS
1037#define TARGET_HAVE_TLS HAVE_AS_TLS
1038
1039#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1040#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1041
08c148a8
NB
1042#undef TARGET_ASM_FUNCTION_PROLOGUE
1043#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1044#undef TARGET_ASM_FUNCTION_EPILOGUE
1045#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1046
b54cf83a
DE
1047#undef TARGET_SCHED_VARIABLE_ISSUE
1048#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1049
c237e94a
ZW
1050#undef TARGET_SCHED_ISSUE_RATE
1051#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1052#undef TARGET_SCHED_ADJUST_COST
1053#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1054#undef TARGET_SCHED_ADJUST_PRIORITY
1055#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1056#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1057#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1058#undef TARGET_SCHED_INIT
1059#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1060#undef TARGET_SCHED_FINISH
1061#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1062#undef TARGET_SCHED_REORDER
1063#define TARGET_SCHED_REORDER rs6000_sched_reorder
1064#undef TARGET_SCHED_REORDER2
1065#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1066
be12c2b0
VM
1067#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1068#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1069
d296e02e
AP
1070#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1071#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1072
7ccf35ed
DN
1073#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1074#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1075#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1076#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1077#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1078#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1079#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1080#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1081
5b900a4c
DN
1082#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1083#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1084
0ac081f6
AH
1085#undef TARGET_INIT_BUILTINS
1086#define TARGET_INIT_BUILTINS rs6000_init_builtins
1087
1088#undef TARGET_EXPAND_BUILTIN
1089#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1090
608063c3
JB
1091#undef TARGET_MANGLE_TYPE
1092#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1093
c15c90bb
ZW
1094#undef TARGET_INIT_LIBFUNCS
1095#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1096
f1384257 1097#if TARGET_MACHO
0e5dbd9b 1098#undef TARGET_BINDS_LOCAL_P
31920d83 1099#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1100#endif
0e5dbd9b 1101
77ccdfed
EC
1102#undef TARGET_MS_BITFIELD_LAYOUT_P
1103#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1104
3961e8fe
RH
1105#undef TARGET_ASM_OUTPUT_MI_THUNK
1106#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1107
3961e8fe 1108#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1109#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1110
4977bab6
ZW
1111#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1112#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1113
2e3f0db6
DJ
1114#undef TARGET_INVALID_WITHIN_DOLOOP
1115#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1116
3c50106f
RH
1117#undef TARGET_RTX_COSTS
1118#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1119#undef TARGET_ADDRESS_COST
1120#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1121
c8e4f0e9 1122#undef TARGET_VECTOR_OPAQUE_P
58646b77 1123#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1124
96714395
AH
1125#undef TARGET_DWARF_REGISTER_SPAN
1126#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1127
37ea0b7e
JM
1128#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1129#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1130
c6e8c921
GK
1131/* On rs6000, function arguments are promoted, as are function return
1132 values. */
1133#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1134#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1135#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1136#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1137
c6e8c921
GK
1138#undef TARGET_RETURN_IN_MEMORY
1139#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1140
1141#undef TARGET_SETUP_INCOMING_VARARGS
1142#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1143
1144/* Always strict argument naming on rs6000. */
1145#undef TARGET_STRICT_ARGUMENT_NAMING
1146#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1147#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1148#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1149#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1150#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1151#undef TARGET_MUST_PASS_IN_STACK
1152#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1153#undef TARGET_PASS_BY_REFERENCE
1154#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1155#undef TARGET_ARG_PARTIAL_BYTES
1156#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1157
c35d187f
RH
1158#undef TARGET_BUILD_BUILTIN_VA_LIST
1159#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1160
cd3ce9b4
JM
1161#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1162#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1163
93f90be6
FJ
1164#undef TARGET_EH_RETURN_FILTER_MODE
1165#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1166
00b79d54
BE
1167#undef TARGET_SCALAR_MODE_SUPPORTED_P
1168#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1169
f676971a
EC
1170#undef TARGET_VECTOR_MODE_SUPPORTED_P
1171#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1172
4d3e6fae
FJ
1173#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1174#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1175
78f5898b
AH
1176#undef TARGET_HANDLE_OPTION
1177#define TARGET_HANDLE_OPTION rs6000_handle_option
1178
1179#undef TARGET_DEFAULT_TARGET_FLAGS
1180#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1181 (TARGET_DEFAULT)
78f5898b 1182
3aebbe5f
JJ
1183#undef TARGET_STACK_PROTECT_FAIL
1184#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1185
445cf5eb
JM
1186/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1187 The PowerPC architecture requires only weak consistency among
1188 processors--that is, memory accesses between processors need not be
1189 sequentially consistent and memory accesses among processors can occur
1190 in any order. The ability to order memory accesses weakly provides
1191 opportunities for more efficient use of the system bus. Unless a
1192 dependency exists, the 604e allows read operations to precede store
1193 operations. */
1194#undef TARGET_RELAXED_ORDERING
1195#define TARGET_RELAXED_ORDERING true
1196
fdbe66f2
EB
1197#ifdef HAVE_AS_TLS
1198#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1199#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1200#endif
1201
aacd3885
RS
1202/* Use a 32-bit anchor range. This leads to sequences like:
1203
1204 addis tmp,anchor,high
1205 add dest,tmp,low
1206
1207 where tmp itself acts as an anchor, and can be shared between
1208 accesses to the same 64k page. */
1209#undef TARGET_MIN_ANCHOR_OFFSET
1210#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1211#undef TARGET_MAX_ANCHOR_OFFSET
1212#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1213#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1214#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1215
f6897b10 1216struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1217\f
0d1fbc8c
AH
1218
1219/* Value is 1 if hard register REGNO can hold a value of machine-mode
1220 MODE. */
1221static int
1222rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1223{
1224 /* The GPRs can hold any mode, but values bigger than one register
1225 cannot go past R31. */
1226 if (INT_REGNO_P (regno))
1227 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1228
a5a97921 1229 /* The float registers can only hold floating modes and DImode.
7393f7f8 1230 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1231 if (FP_REGNO_P (regno))
1232 return
96038623 1233 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1234 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1235 && mode != SDmode
0d1fbc8c
AH
1236 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1237 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1238 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1239 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1240 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1241
1242 /* The CR register can only hold CC modes. */
1243 if (CR_REGNO_P (regno))
1244 return GET_MODE_CLASS (mode) == MODE_CC;
1245
1246 if (XER_REGNO_P (regno))
1247 return mode == PSImode;
1248
1249 /* AltiVec only in AldyVec registers. */
1250 if (ALTIVEC_REGNO_P (regno))
1251 return ALTIVEC_VECTOR_MODE (mode);
1252
1253 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1254 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1255 return 1;
1256
1257 /* We cannot put TImode anywhere except general register and it must be
1258 able to fit within the register set. */
1259
1260 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1261}
1262
1263/* Initialize rs6000_hard_regno_mode_ok_p table. */
1264static void
1265rs6000_init_hard_regno_mode_ok (void)
1266{
1267 int r, m;
1268
1269 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1270 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1271 if (rs6000_hard_regno_mode_ok (r, m))
1272 rs6000_hard_regno_mode_ok_p[m][r] = true;
1273}
1274
e4cad568
GK
1275#if TARGET_MACHO
1276/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1277
1278static void
1279darwin_rs6000_override_options (void)
1280{
1281 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1282 off. */
1283 rs6000_altivec_abi = 1;
1284 TARGET_ALTIVEC_VRSAVE = 1;
1285 if (DEFAULT_ABI == ABI_DARWIN)
1286 {
1287 if (MACHO_DYNAMIC_NO_PIC_P)
1288 {
1289 if (flag_pic)
1290 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1291 flag_pic = 0;
1292 }
1293 else if (flag_pic == 1)
1294 {
1295 flag_pic = 2;
1296 }
1297 }
1298 if (TARGET_64BIT && ! TARGET_POWERPC64)
1299 {
1300 target_flags |= MASK_POWERPC64;
1301 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1302 }
1303 if (flag_mkernel)
1304 {
1305 rs6000_default_long_calls = 1;
1306 target_flags |= MASK_SOFT_FLOAT;
1307 }
1308
1309 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1310 Altivec. */
1311 if (!flag_mkernel && !flag_apple_kext
1312 && TARGET_64BIT
1313 && ! (target_flags_explicit & MASK_ALTIVEC))
1314 target_flags |= MASK_ALTIVEC;
1315
1316 /* Unless the user (not the configurer) has explicitly overridden
1317 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1318 G4 unless targetting the kernel. */
1319 if (!flag_mkernel
1320 && !flag_apple_kext
1321 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1322 && ! (target_flags_explicit & MASK_ALTIVEC)
1323 && ! rs6000_select[1].string)
1324 {
1325 target_flags |= MASK_ALTIVEC;
1326 }
1327}
1328#endif
1329
c1e55850
GK
1330/* If not otherwise specified by a target, make 'long double' equivalent to
1331 'double'. */
1332
1333#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1334#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1335#endif
1336
5248c961
RK
1337/* Override command line options. Mostly we process the processor
1338 type and sometimes adjust other TARGET_ options. */
1339
1340void
d779d0dc 1341rs6000_override_options (const char *default_cpu)
5248c961 1342{
c4d38ccb 1343 size_t i, j;
8e3f41e7 1344 struct rs6000_cpu_select *ptr;
66188a7e 1345 int set_masks;
5248c961 1346
66188a7e 1347 /* Simplifications for entries below. */
85638c0d 1348
66188a7e
GK
1349 enum {
1350 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1351 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1352 };
85638c0d 1353
66188a7e
GK
1354 /* This table occasionally claims that a processor does not support
1355 a particular feature even though it does, but the feature is slower
1356 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1357 complete description of the processor's support.
66188a7e
GK
1358
1359 Please keep this list in order, and don't forget to update the
1360 documentation in invoke.texi when adding a new processor or
1361 flag. */
5248c961
RK
1362 static struct ptt
1363 {
8b60264b
KG
1364 const char *const name; /* Canonical processor name. */
1365 const enum processor_type processor; /* Processor type enum value. */
1366 const int target_enable; /* Target flags to enable. */
8b60264b 1367 } const processor_target_table[]
66188a7e 1368 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1369 {"403", PROCESSOR_PPC403,
66188a7e 1370 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1371 {"405", PROCESSOR_PPC405,
716019c0
JM
1372 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1373 {"405fp", PROCESSOR_PPC405,
1374 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1375 {"440", PROCESSOR_PPC440,
716019c0
JM
1376 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1377 {"440fp", PROCESSOR_PPC440,
1378 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1379 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1380 {"601", PROCESSOR_PPC601,
66188a7e
GK
1381 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1382 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1383 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1384 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1385 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1386 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1387 {"620", PROCESSOR_PPC620,
1388 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1389 {"630", PROCESSOR_PPC630,
1390 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1391 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1392 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1393 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1394 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1395 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1396 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1397 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1398 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1399 /* 8548 has a dummy entry for now. */
a45bce6e 1400 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1401 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1402 {"970", PROCESSOR_POWER4,
66188a7e 1403 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1404 {"cell", PROCESSOR_CELL,
1405 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1406 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1407 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1408 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1409 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1410 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1411 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1412 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1413 {"power2", PROCESSOR_POWER,
1414 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1415 {"power3", PROCESSOR_PPC630,
1416 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1417 {"power4", PROCESSOR_POWER4,
fc091c8e 1418 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1419 {"power5", PROCESSOR_POWER5,
432218ba
DE
1420 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1421 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1422 {"power5+", PROCESSOR_POWER5,
1423 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1424 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1425 {"power6", PROCESSOR_POWER6,
e118597e 1426 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1427 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1428 {"power6x", PROCESSOR_POWER6,
1429 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1430 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1431 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1432 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1433 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1434 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1435 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1436 {"rios2", PROCESSOR_RIOS2,
1437 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1438 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1439 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1440 {"rs64", PROCESSOR_RS64A,
1441 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1442 };
5248c961 1443
ca7558fc 1444 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1445
66188a7e
GK
1446 /* Some OSs don't support saving the high part of 64-bit registers on
1447 context switch. Other OSs don't support saving Altivec registers.
1448 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1449 settings; if the user wants either, the user must explicitly specify
1450 them and we won't interfere with the user's specification. */
1451
1452 enum {
1453 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1454 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1455 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1456 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1457 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1458 };
0d1fbc8c
AH
1459
1460 rs6000_init_hard_regno_mode_ok ();
1461
c4ad648e 1462 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1463#ifdef OS_MISSING_POWERPC64
1464 if (OS_MISSING_POWERPC64)
1465 set_masks &= ~MASK_POWERPC64;
1466#endif
1467#ifdef OS_MISSING_ALTIVEC
1468 if (OS_MISSING_ALTIVEC)
1469 set_masks &= ~MASK_ALTIVEC;
1470#endif
1471
768875a8
AM
1472 /* Don't override by the processor default if given explicitly. */
1473 set_masks &= ~target_flags_explicit;
957211c3 1474
a4f6c312 1475 /* Identify the processor type. */
8e3f41e7 1476 rs6000_select[0].string = default_cpu;
3cb999d8 1477 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1478
b6a1cbae 1479 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1480 {
8e3f41e7
MM
1481 ptr = &rs6000_select[i];
1482 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1483 {
8e3f41e7
MM
1484 for (j = 0; j < ptt_size; j++)
1485 if (! strcmp (ptr->string, processor_target_table[j].name))
1486 {
1487 if (ptr->set_tune_p)
1488 rs6000_cpu = processor_target_table[j].processor;
1489
1490 if (ptr->set_arch_p)
1491 {
66188a7e
GK
1492 target_flags &= ~set_masks;
1493 target_flags |= (processor_target_table[j].target_enable
1494 & set_masks);
8e3f41e7
MM
1495 }
1496 break;
1497 }
1498
4406229e 1499 if (j == ptt_size)
8e3f41e7 1500 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1501 }
1502 }
8a61d227 1503
993f19a8 1504 if (TARGET_E500)
a3170dc6
AH
1505 rs6000_isel = 1;
1506
dff9f1b6
DE
1507 /* If we are optimizing big endian systems for space, use the load/store
1508 multiple and string instructions. */
ef792183 1509 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1510 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1511
a4f6c312
SS
1512 /* Don't allow -mmultiple or -mstring on little endian systems
1513 unless the cpu is a 750, because the hardware doesn't support the
1514 instructions used in little endian mode, and causes an alignment
1515 trap. The 750 does not cause an alignment trap (except when the
1516 target is unaligned). */
bef84347 1517
b21fb038 1518 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1519 {
1520 if (TARGET_MULTIPLE)
1521 {
1522 target_flags &= ~MASK_MULTIPLE;
b21fb038 1523 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1524 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1525 }
1526
1527 if (TARGET_STRING)
1528 {
1529 target_flags &= ~MASK_STRING;
b21fb038 1530 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1531 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1532 }
1533 }
3933e0e1 1534
38c1f2d7
MM
1535 /* Set debug flags */
1536 if (rs6000_debug_name)
1537 {
bfc79d3b 1538 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1539 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1540 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1541 rs6000_debug_stack = 1;
bfc79d3b 1542 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1543 rs6000_debug_arg = 1;
1544 else
c725bd79 1545 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1546 }
1547
57ac7be9
AM
1548 if (rs6000_traceback_name)
1549 {
1550 if (! strncmp (rs6000_traceback_name, "full", 4))
1551 rs6000_traceback = traceback_full;
1552 else if (! strncmp (rs6000_traceback_name, "part", 4))
1553 rs6000_traceback = traceback_part;
1554 else if (! strncmp (rs6000_traceback_name, "no", 2))
1555 rs6000_traceback = traceback_none;
1556 else
9e637a26 1557 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1558 rs6000_traceback_name);
1559 }
1560
78f5898b
AH
1561 if (!rs6000_explicit_options.long_double)
1562 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1563
602ea4d3 1564#ifndef POWERPC_LINUX
d3603e8c 1565 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1566 rs6000_ieeequad = 1;
1567#endif
1568
6d0ef01e
HP
1569 /* Set Altivec ABI as default for powerpc64 linux. */
1570 if (TARGET_ELF && TARGET_64BIT)
1571 {
1572 rs6000_altivec_abi = 1;
78f5898b 1573 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1574 }
1575
594a51fe
SS
1576 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1577 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1578 {
1579 rs6000_darwin64_abi = 1;
9c7956fd 1580#if TARGET_MACHO
6ac49599 1581 darwin_one_byte_bool = 1;
9c7956fd 1582#endif
d9168963
SS
1583 /* Default to natural alignment, for better performance. */
1584 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1585 }
1586
194c524a
DE
1587 /* Place FP constants in the constant pool instead of TOC
1588 if section anchors enabled. */
1589 if (flag_section_anchors)
1590 TARGET_NO_FP_IN_TOC = 1;
1591
c4501e62
JJ
1592 /* Handle -mtls-size option. */
1593 rs6000_parse_tls_size_option ();
1594
a7ae18e2
AH
1595#ifdef SUBTARGET_OVERRIDE_OPTIONS
1596 SUBTARGET_OVERRIDE_OPTIONS;
1597#endif
1598#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1599 SUBSUBTARGET_OVERRIDE_OPTIONS;
1600#endif
4d4cbc0e
AH
1601#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1602 SUB3TARGET_OVERRIDE_OPTIONS;
1603#endif
a7ae18e2 1604
5da702b1
AH
1605 if (TARGET_E500)
1606 {
1607 /* The e500 does not have string instructions, and we set
1608 MASK_STRING above when optimizing for size. */
1609 if ((target_flags & MASK_STRING) != 0)
1610 target_flags = target_flags & ~MASK_STRING;
1611 }
1612 else if (rs6000_select[1].string != NULL)
1613 {
1614 /* For the powerpc-eabispe configuration, we set all these by
1615 default, so let's unset them if we manually set another
1616 CPU that is not the E500. */
78f5898b 1617 if (!rs6000_explicit_options.abi)
5da702b1 1618 rs6000_spe_abi = 0;
78f5898b 1619 if (!rs6000_explicit_options.spe)
5da702b1 1620 rs6000_spe = 0;
78f5898b 1621 if (!rs6000_explicit_options.float_gprs)
5da702b1 1622 rs6000_float_gprs = 0;
78f5898b 1623 if (!rs6000_explicit_options.isel)
5da702b1
AH
1624 rs6000_isel = 0;
1625 }
b5044283 1626
eca0d5e8
JM
1627 /* Detect invalid option combinations with E500. */
1628 CHECK_E500_OPTIONS;
1629
ec507f2d 1630 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1631 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1632 && rs6000_cpu != PROCESSOR_POWER6
1633 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1634 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1635 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1636 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1637 || rs6000_cpu == PROCESSOR_POWER5
1638 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1639
ec507f2d
DE
1640 rs6000_sched_restricted_insns_priority
1641 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1642
569fa502 1643 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1644 rs6000_sched_costly_dep
1645 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1646
569fa502
DN
1647 if (rs6000_sched_costly_dep_str)
1648 {
f676971a 1649 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1650 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1651 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1652 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1653 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1654 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1655 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1656 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1657 else
c4ad648e 1658 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1659 }
1660
1661 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1662 rs6000_sched_insert_nops
1663 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1664
cbe26ab8
DN
1665 if (rs6000_sched_insert_nops_str)
1666 {
1667 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1668 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1669 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1670 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1671 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1672 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1673 else
c4ad648e 1674 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1675 }
1676
c81bebd7 1677#ifdef TARGET_REGNAMES
a4f6c312
SS
1678 /* If the user desires alternate register names, copy in the
1679 alternate names now. */
c81bebd7 1680 if (TARGET_REGNAMES)
4e135bdd 1681 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1682#endif
1683
df01da37 1684 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1685 If -maix-struct-return or -msvr4-struct-return was explicitly
1686 used, don't override with the ABI default. */
df01da37
DE
1687 if (!rs6000_explicit_options.aix_struct_ret)
1688 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1689
602ea4d3 1690 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1691 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1692
f676971a 1693 if (TARGET_TOC)
9ebbca7d 1694 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1695
301d03af
RS
1696 /* We can only guarantee the availability of DI pseudo-ops when
1697 assembling for 64-bit targets. */
ae6c1efd 1698 if (!TARGET_64BIT)
301d03af
RS
1699 {
1700 targetm.asm_out.aligned_op.di = NULL;
1701 targetm.asm_out.unaligned_op.di = NULL;
1702 }
1703
1494c534
DE
1704 /* Set branch target alignment, if not optimizing for size. */
1705 if (!optimize_size)
1706 {
d296e02e
AP
1707 /* Cell wants to be aligned 8byte for dual issue. */
1708 if (rs6000_cpu == PROCESSOR_CELL)
1709 {
1710 if (align_functions <= 0)
1711 align_functions = 8;
1712 if (align_jumps <= 0)
1713 align_jumps = 8;
1714 if (align_loops <= 0)
1715 align_loops = 8;
1716 }
44cd321e 1717 if (rs6000_align_branch_targets)
1494c534
DE
1718 {
1719 if (align_functions <= 0)
1720 align_functions = 16;
1721 if (align_jumps <= 0)
1722 align_jumps = 16;
1723 if (align_loops <= 0)
1724 align_loops = 16;
1725 }
1726 if (align_jumps_max_skip <= 0)
1727 align_jumps_max_skip = 15;
1728 if (align_loops_max_skip <= 0)
1729 align_loops_max_skip = 15;
1730 }
2792d578 1731
71f123ca
FS
1732 /* Arrange to save and restore machine status around nested functions. */
1733 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1734
1735 /* We should always be splitting complex arguments, but we can't break
1736 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1737 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1738 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1739
1740 /* Initialize rs6000_cost with the appropriate target costs. */
1741 if (optimize_size)
1742 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1743 else
1744 switch (rs6000_cpu)
1745 {
1746 case PROCESSOR_RIOS1:
1747 rs6000_cost = &rios1_cost;
1748 break;
1749
1750 case PROCESSOR_RIOS2:
1751 rs6000_cost = &rios2_cost;
1752 break;
1753
1754 case PROCESSOR_RS64A:
1755 rs6000_cost = &rs64a_cost;
1756 break;
1757
1758 case PROCESSOR_MPCCORE:
1759 rs6000_cost = &mpccore_cost;
1760 break;
1761
1762 case PROCESSOR_PPC403:
1763 rs6000_cost = &ppc403_cost;
1764 break;
1765
1766 case PROCESSOR_PPC405:
1767 rs6000_cost = &ppc405_cost;
1768 break;
1769
1770 case PROCESSOR_PPC440:
1771 rs6000_cost = &ppc440_cost;
1772 break;
1773
1774 case PROCESSOR_PPC601:
1775 rs6000_cost = &ppc601_cost;
1776 break;
1777
1778 case PROCESSOR_PPC603:
1779 rs6000_cost = &ppc603_cost;
1780 break;
1781
1782 case PROCESSOR_PPC604:
1783 rs6000_cost = &ppc604_cost;
1784 break;
1785
1786 case PROCESSOR_PPC604e:
1787 rs6000_cost = &ppc604e_cost;
1788 break;
1789
1790 case PROCESSOR_PPC620:
8b897cfa
RS
1791 rs6000_cost = &ppc620_cost;
1792 break;
1793
f0517163
RS
1794 case PROCESSOR_PPC630:
1795 rs6000_cost = &ppc630_cost;
1796 break;
1797
982afe02 1798 case PROCESSOR_CELL:
d296e02e
AP
1799 rs6000_cost = &ppccell_cost;
1800 break;
1801
8b897cfa
RS
1802 case PROCESSOR_PPC750:
1803 case PROCESSOR_PPC7400:
1804 rs6000_cost = &ppc750_cost;
1805 break;
1806
1807 case PROCESSOR_PPC7450:
1808 rs6000_cost = &ppc7450_cost;
1809 break;
1810
1811 case PROCESSOR_PPC8540:
1812 rs6000_cost = &ppc8540_cost;
1813 break;
1814
1815 case PROCESSOR_POWER4:
1816 case PROCESSOR_POWER5:
1817 rs6000_cost = &power4_cost;
1818 break;
1819
44cd321e
PS
1820 case PROCESSOR_POWER6:
1821 rs6000_cost = &power6_cost;
1822 break;
1823
8b897cfa 1824 default:
37409796 1825 gcc_unreachable ();
8b897cfa 1826 }
0b11da67
DE
1827
1828 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1829 set_param_value ("simultaneous-prefetches",
1830 rs6000_cost->simultaneous_prefetches);
1831 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1832 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1833 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1834 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1835 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1836 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
5248c961 1837}
5accd822 1838
7ccf35ed
DN
1839/* Implement targetm.vectorize.builtin_mask_for_load. */
1840static tree
1841rs6000_builtin_mask_for_load (void)
1842{
1843 if (TARGET_ALTIVEC)
1844 return altivec_builtin_mask_for_load;
1845 else
1846 return 0;
1847}
1848
f57d17f1
TM
1849/* Implement targetm.vectorize.builtin_conversion. */
1850static tree
1851rs6000_builtin_conversion (enum tree_code code, tree type)
1852{
1853 if (!TARGET_ALTIVEC)
1854 return NULL_TREE;
982afe02 1855
f57d17f1
TM
1856 switch (code)
1857 {
1858 case FLOAT_EXPR:
1859 switch (TYPE_MODE (type))
1860 {
1861 case V4SImode:
982afe02 1862 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1863 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1864 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1865 default:
1866 return NULL_TREE;
1867 }
1868 default:
1869 return NULL_TREE;
1870 }
1871}
1872
89d67cca
DN
1873/* Implement targetm.vectorize.builtin_mul_widen_even. */
1874static tree
1875rs6000_builtin_mul_widen_even (tree type)
1876{
1877 if (!TARGET_ALTIVEC)
1878 return NULL_TREE;
1879
1880 switch (TYPE_MODE (type))
1881 {
1882 case V8HImode:
982afe02 1883 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1884 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1885 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1886
1887 case V16QImode:
1888 return TYPE_UNSIGNED (type) ?
1889 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1890 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1891 default:
1892 return NULL_TREE;
1893 }
1894}
1895
1896/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1897static tree
1898rs6000_builtin_mul_widen_odd (tree type)
1899{
1900 if (!TARGET_ALTIVEC)
1901 return NULL_TREE;
1902
1903 switch (TYPE_MODE (type))
1904 {
1905 case V8HImode:
1906 return TYPE_UNSIGNED (type) ?
1907 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1908 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1909
1910 case V16QImode:
1911 return TYPE_UNSIGNED (type) ?
1912 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1913 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1914 default:
1915 return NULL_TREE;
1916 }
1917}
1918
5b900a4c
DN
1919
1920/* Return true iff, data reference of TYPE can reach vector alignment (16)
1921 after applying N number of iterations. This routine does not determine
1922 how may iterations are required to reach desired alignment. */
1923
1924static bool
3101faab 1925rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1926{
1927 if (is_packed)
1928 return false;
1929
1930 if (TARGET_32BIT)
1931 {
1932 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1933 return true;
1934
1935 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1936 return true;
1937
1938 return false;
1939 }
1940 else
1941 {
1942 if (TARGET_MACHO)
1943 return false;
1944
1945 /* Assuming that all other types are naturally aligned. CHECKME! */
1946 return true;
1947 }
1948}
1949
5da702b1
AH
1950/* Handle generic options of the form -mfoo=yes/no.
1951 NAME is the option name.
1952 VALUE is the option value.
1953 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1954 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1955static void
5da702b1 1956rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1957{
5da702b1 1958 if (value == 0)
993f19a8 1959 return;
5da702b1
AH
1960 else if (!strcmp (value, "yes"))
1961 *flag = 1;
1962 else if (!strcmp (value, "no"))
1963 *flag = 0;
08b57fb3 1964 else
5da702b1 1965 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1966}
1967
c4501e62
JJ
1968/* Validate and record the size specified with the -mtls-size option. */
1969
1970static void
863d938c 1971rs6000_parse_tls_size_option (void)
c4501e62
JJ
1972{
1973 if (rs6000_tls_size_string == 0)
1974 return;
1975 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1976 rs6000_tls_size = 16;
1977 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1978 rs6000_tls_size = 32;
1979 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1980 rs6000_tls_size = 64;
1981 else
9e637a26 1982 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1983}
1984
5accd822 1985void
a2369ed3 1986optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1987{
2e3f0db6
DJ
1988 if (DEFAULT_ABI == ABI_DARWIN)
1989 /* The Darwin libraries never set errno, so we might as well
1990 avoid calling them when that's the only reason we would. */
1991 flag_errno_math = 0;
59d6560b
DE
1992
1993 /* Double growth factor to counter reduced min jump length. */
1994 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
1995
1996 /* Enable section anchors by default.
1997 Skip section anchors for Objective C and Objective C++
1998 until front-ends fixed. */
23f99493 1999 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2000 flag_section_anchors = 1;
5accd822 2001}
78f5898b
AH
2002
2003/* Implement TARGET_HANDLE_OPTION. */
2004
2005static bool
2006rs6000_handle_option (size_t code, const char *arg, int value)
2007{
2008 switch (code)
2009 {
2010 case OPT_mno_power:
2011 target_flags &= ~(MASK_POWER | MASK_POWER2
2012 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2013 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2014 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2015 break;
2016 case OPT_mno_powerpc:
2017 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2018 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2019 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2020 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2021 break;
2022 case OPT_mfull_toc:
d2894ab5
DE
2023 target_flags &= ~MASK_MINIMAL_TOC;
2024 TARGET_NO_FP_IN_TOC = 0;
2025 TARGET_NO_SUM_IN_TOC = 0;
2026 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2027#ifdef TARGET_USES_SYSV4_OPT
2028 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2029 just the same as -mminimal-toc. */
2030 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2031 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2032#endif
2033 break;
2034
2035#ifdef TARGET_USES_SYSV4_OPT
2036 case OPT_mtoc:
2037 /* Make -mtoc behave like -mminimal-toc. */
2038 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2039 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2040 break;
2041#endif
2042
2043#ifdef TARGET_USES_AIX64_OPT
2044 case OPT_maix64:
2045#else
2046 case OPT_m64:
2047#endif
2c9c9afd
AM
2048 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2049 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2050 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2051 break;
2052
2053#ifdef TARGET_USES_AIX64_OPT
2054 case OPT_maix32:
2055#else
2056 case OPT_m32:
2057#endif
2058 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2059 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2060 break;
2061
2062 case OPT_minsert_sched_nops_:
2063 rs6000_sched_insert_nops_str = arg;
2064 break;
2065
2066 case OPT_mminimal_toc:
2067 if (value == 1)
2068 {
d2894ab5
DE
2069 TARGET_NO_FP_IN_TOC = 0;
2070 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2071 }
2072 break;
2073
2074 case OPT_mpower:
2075 if (value == 1)
c2dba4ab
AH
2076 {
2077 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2078 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2079 }
78f5898b
AH
2080 break;
2081
2082 case OPT_mpower2:
2083 if (value == 1)
c2dba4ab
AH
2084 {
2085 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2086 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2087 }
78f5898b
AH
2088 break;
2089
2090 case OPT_mpowerpc_gpopt:
2091 case OPT_mpowerpc_gfxopt:
2092 if (value == 1)
c2dba4ab
AH
2093 {
2094 target_flags |= MASK_POWERPC;
2095 target_flags_explicit |= MASK_POWERPC;
2096 }
78f5898b
AH
2097 break;
2098
df01da37
DE
2099 case OPT_maix_struct_return:
2100 case OPT_msvr4_struct_return:
2101 rs6000_explicit_options.aix_struct_ret = true;
2102 break;
2103
78f5898b
AH
2104 case OPT_mvrsave_:
2105 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2106 break;
78f5898b
AH
2107
2108 case OPT_misel_:
2109 rs6000_explicit_options.isel = true;
2110 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2111 break;
2112
2113 case OPT_mspe_:
2114 rs6000_explicit_options.spe = true;
2115 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2116 break;
2117
2118 case OPT_mdebug_:
2119 rs6000_debug_name = arg;
2120 break;
2121
2122#ifdef TARGET_USES_SYSV4_OPT
2123 case OPT_mcall_:
2124 rs6000_abi_name = arg;
2125 break;
2126
2127 case OPT_msdata_:
2128 rs6000_sdata_name = arg;
2129 break;
2130
2131 case OPT_mtls_size_:
2132 rs6000_tls_size_string = arg;
2133 break;
2134
2135 case OPT_mrelocatable:
2136 if (value == 1)
c2dba4ab 2137 {
e0bf274f
AM
2138 target_flags |= MASK_MINIMAL_TOC;
2139 target_flags_explicit |= MASK_MINIMAL_TOC;
2140 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2141 }
78f5898b
AH
2142 break;
2143
2144 case OPT_mrelocatable_lib:
2145 if (value == 1)
c2dba4ab 2146 {
e0bf274f
AM
2147 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2148 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2149 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2150 }
78f5898b 2151 else
c2dba4ab
AH
2152 {
2153 target_flags &= ~MASK_RELOCATABLE;
2154 target_flags_explicit |= MASK_RELOCATABLE;
2155 }
78f5898b
AH
2156 break;
2157#endif
2158
2159 case OPT_mabi_:
78f5898b
AH
2160 if (!strcmp (arg, "altivec"))
2161 {
d3603e8c 2162 rs6000_explicit_options.abi = true;
78f5898b
AH
2163 rs6000_altivec_abi = 1;
2164 rs6000_spe_abi = 0;
2165 }
2166 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2167 {
2168 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2169 the default for rs6000_spe_abi to be chosen later. */
2170 rs6000_altivec_abi = 0;
2171 }
78f5898b
AH
2172 else if (! strcmp (arg, "spe"))
2173 {
d3603e8c 2174 rs6000_explicit_options.abi = true;
78f5898b
AH
2175 rs6000_spe_abi = 1;
2176 rs6000_altivec_abi = 0;
2177 if (!TARGET_SPE_ABI)
2178 error ("not configured for ABI: '%s'", arg);
2179 }
2180 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2181 {
2182 rs6000_explicit_options.abi = true;
2183 rs6000_spe_abi = 0;
2184 }
78f5898b
AH
2185
2186 /* These are here for testing during development only, do not
2187 document in the manual please. */
2188 else if (! strcmp (arg, "d64"))
2189 {
2190 rs6000_darwin64_abi = 1;
2191 warning (0, "Using darwin64 ABI");
2192 }
2193 else if (! strcmp (arg, "d32"))
2194 {
2195 rs6000_darwin64_abi = 0;
2196 warning (0, "Using old darwin ABI");
2197 }
2198
602ea4d3
JJ
2199 else if (! strcmp (arg, "ibmlongdouble"))
2200 {
d3603e8c 2201 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2202 rs6000_ieeequad = 0;
2203 warning (0, "Using IBM extended precision long double");
2204 }
2205 else if (! strcmp (arg, "ieeelongdouble"))
2206 {
d3603e8c 2207 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2208 rs6000_ieeequad = 1;
2209 warning (0, "Using IEEE extended precision long double");
2210 }
2211
78f5898b
AH
2212 else
2213 {
2214 error ("unknown ABI specified: '%s'", arg);
2215 return false;
2216 }
2217 break;
2218
2219 case OPT_mcpu_:
2220 rs6000_select[1].string = arg;
2221 break;
2222
2223 case OPT_mtune_:
2224 rs6000_select[2].string = arg;
2225 break;
2226
2227 case OPT_mtraceback_:
2228 rs6000_traceback_name = arg;
2229 break;
2230
2231 case OPT_mfloat_gprs_:
2232 rs6000_explicit_options.float_gprs = true;
2233 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2234 rs6000_float_gprs = 1;
2235 else if (! strcmp (arg, "double"))
2236 rs6000_float_gprs = 2;
2237 else if (! strcmp (arg, "no"))
2238 rs6000_float_gprs = 0;
2239 else
2240 {
2241 error ("invalid option for -mfloat-gprs: '%s'", arg);
2242 return false;
2243 }
2244 break;
2245
2246 case OPT_mlong_double_:
2247 rs6000_explicit_options.long_double = true;
2248 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2249 if (value != 64 && value != 128)
2250 {
2251 error ("Unknown switch -mlong-double-%s", arg);
2252 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2253 return false;
2254 }
2255 else
2256 rs6000_long_double_type_size = value;
2257 break;
2258
2259 case OPT_msched_costly_dep_:
2260 rs6000_sched_costly_dep_str = arg;
2261 break;
2262
2263 case OPT_malign_:
2264 rs6000_explicit_options.alignment = true;
2265 if (! strcmp (arg, "power"))
2266 {
2267 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2268 some C library functions, so warn about it. The flag may be
2269 useful for performance studies from time to time though, so
2270 don't disable it entirely. */
2271 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2272 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2273 " it is incompatible with the installed C and C++ libraries");
2274 rs6000_alignment_flags = MASK_ALIGN_POWER;
2275 }
2276 else if (! strcmp (arg, "natural"))
2277 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2278 else
2279 {
2280 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2281 return false;
2282 }
2283 break;
2284 }
2285 return true;
2286}
3cfa4909
MM
2287\f
2288/* Do anything needed at the start of the asm file. */
2289
1bc7c5b6 2290static void
863d938c 2291rs6000_file_start (void)
3cfa4909 2292{
c4d38ccb 2293 size_t i;
3cfa4909 2294 char buffer[80];
d330fd93 2295 const char *start = buffer;
3cfa4909 2296 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2297 const char *default_cpu = TARGET_CPU_DEFAULT;
2298 FILE *file = asm_out_file;
2299
2300 default_file_start ();
2301
2302#ifdef TARGET_BI_ARCH
2303 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2304 default_cpu = 0;
2305#endif
3cfa4909
MM
2306
2307 if (flag_verbose_asm)
2308 {
2309 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2310 rs6000_select[0].string = default_cpu;
2311
b6a1cbae 2312 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2313 {
2314 ptr = &rs6000_select[i];
2315 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2316 {
2317 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2318 start = "";
2319 }
2320 }
2321
9c6b4ed9 2322 if (PPC405_ERRATUM77)
b0bfee6e 2323 {
9c6b4ed9 2324 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2325 start = "";
2326 }
b0bfee6e 2327
b91da81f 2328#ifdef USING_ELFOS_H
3cfa4909
MM
2329 switch (rs6000_sdata)
2330 {
2331 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2332 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2333 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2334 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2335 }
2336
2337 if (rs6000_sdata && g_switch_value)
2338 {
307b599c
MK
2339 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2340 g_switch_value);
3cfa4909
MM
2341 start = "";
2342 }
2343#endif
2344
2345 if (*start == '\0')
949ea356 2346 putc ('\n', file);
3cfa4909 2347 }
b723e82f 2348
e51917ae
JM
2349#ifdef HAVE_AS_GNU_ATTRIBUTE
2350 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2351 {
2352 fprintf (file, "\t.gnu_attribute 4, %d\n",
2353 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2354 fprintf (file, "\t.gnu_attribute 8, %d\n",
2355 (TARGET_ALTIVEC_ABI ? 2
2356 : TARGET_SPE_ABI ? 3
2357 : 1));
2358 }
e51917ae
JM
2359#endif
2360
b723e82f
JJ
2361 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2362 {
d6b5193b
RS
2363 switch_to_section (toc_section);
2364 switch_to_section (text_section);
b723e82f 2365 }
3cfa4909 2366}
c4e18b1c 2367
5248c961 2368\f
a0ab749a 2369/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2370
2371int
863d938c 2372direct_return (void)
9878760c 2373{
4697a36c
MM
2374 if (reload_completed)
2375 {
2376 rs6000_stack_t *info = rs6000_stack_info ();
2377
2378 if (info->first_gp_reg_save == 32
2379 && info->first_fp_reg_save == 64
00b960c7 2380 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2381 && ! info->lr_save_p
2382 && ! info->cr_save_p
00b960c7 2383 && info->vrsave_mask == 0
c81fc13e 2384 && ! info->push_p)
4697a36c
MM
2385 return 1;
2386 }
2387
2388 return 0;
9878760c
RK
2389}
2390
4e74d8ec
MM
2391/* Return the number of instructions it takes to form a constant in an
2392 integer register. */
2393
48d72335 2394int
a2369ed3 2395num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2396{
2397 /* signed constant loadable with {cal|addi} */
547b216d 2398 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2399 return 1;
2400
4e74d8ec 2401 /* constant loadable with {cau|addis} */
547b216d
DE
2402 else if ((value & 0xffff) == 0
2403 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2404 return 1;
2405
5f59ecb7 2406#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2407 else if (TARGET_POWERPC64)
4e74d8ec 2408 {
a65c591c
DE
2409 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2410 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2411
a65c591c 2412 if (high == 0 || high == -1)
4e74d8ec
MM
2413 return 2;
2414
a65c591c 2415 high >>= 1;
4e74d8ec 2416
a65c591c 2417 if (low == 0)
4e74d8ec 2418 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2419 else
2420 return (num_insns_constant_wide (high)
e396202a 2421 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2422 }
2423#endif
2424
2425 else
2426 return 2;
2427}
2428
2429int
a2369ed3 2430num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2431{
37409796 2432 HOST_WIDE_INT low, high;
bb8df8a6 2433
37409796 2434 switch (GET_CODE (op))
0d30d435 2435 {
37409796 2436 case CONST_INT:
0d30d435 2437#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2438 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2439 && mask64_operand (op, mode))
c4ad648e 2440 return 2;
0d30d435
DE
2441 else
2442#endif
2443 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2444
37409796
NS
2445 case CONST_DOUBLE:
2446 if (mode == SFmode)
2447 {
2448 long l;
2449 REAL_VALUE_TYPE rv;
bb8df8a6 2450
37409796
NS
2451 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2452 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2453 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2454 }
a260abc9 2455
37409796
NS
2456 if (mode == VOIDmode || mode == DImode)
2457 {
2458 high = CONST_DOUBLE_HIGH (op);
2459 low = CONST_DOUBLE_LOW (op);
2460 }
2461 else
2462 {
2463 long l[2];
2464 REAL_VALUE_TYPE rv;
bb8df8a6 2465
37409796 2466 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2467 if (DECIMAL_FLOAT_MODE_P (mode))
2468 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2469 else
2470 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2471 high = l[WORDS_BIG_ENDIAN == 0];
2472 low = l[WORDS_BIG_ENDIAN != 0];
2473 }
47ad8c61 2474
37409796
NS
2475 if (TARGET_32BIT)
2476 return (num_insns_constant_wide (low)
2477 + num_insns_constant_wide (high));
2478 else
2479 {
2480 if ((high == 0 && low >= 0)
2481 || (high == -1 && low < 0))
2482 return num_insns_constant_wide (low);
bb8df8a6 2483
1990cd79 2484 else if (mask64_operand (op, mode))
37409796 2485 return 2;
bb8df8a6 2486
37409796
NS
2487 else if (low == 0)
2488 return num_insns_constant_wide (high) + 1;
bb8df8a6 2489
37409796
NS
2490 else
2491 return (num_insns_constant_wide (high)
2492 + num_insns_constant_wide (low) + 1);
2493 }
bb8df8a6 2494
37409796
NS
2495 default:
2496 gcc_unreachable ();
4e74d8ec 2497 }
4e74d8ec
MM
2498}
2499
0972012c
RS
2500/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2501 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2502 corresponding element of the vector, but for V4SFmode and V2SFmode,
2503 the corresponding "float" is interpreted as an SImode integer. */
2504
2505static HOST_WIDE_INT
2506const_vector_elt_as_int (rtx op, unsigned int elt)
2507{
2508 rtx tmp = CONST_VECTOR_ELT (op, elt);
2509 if (GET_MODE (op) == V4SFmode
2510 || GET_MODE (op) == V2SFmode)
2511 tmp = gen_lowpart (SImode, tmp);
2512 return INTVAL (tmp);
2513}
452a7d36 2514
77ccdfed 2515/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2516 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2517 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2518 all items are set to the same value and contain COPIES replicas of the
2519 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2520 operand and the others are set to the value of the operand's msb. */
2521
2522static bool
2523vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2524{
66180ff3
PB
2525 enum machine_mode mode = GET_MODE (op);
2526 enum machine_mode inner = GET_MODE_INNER (mode);
2527
2528 unsigned i;
2529 unsigned nunits = GET_MODE_NUNITS (mode);
2530 unsigned bitsize = GET_MODE_BITSIZE (inner);
2531 unsigned mask = GET_MODE_MASK (inner);
2532
0972012c 2533 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2534 HOST_WIDE_INT splat_val = val;
2535 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2536
2537 /* Construct the value to be splatted, if possible. If not, return 0. */
2538 for (i = 2; i <= copies; i *= 2)
452a7d36 2539 {
66180ff3
PB
2540 HOST_WIDE_INT small_val;
2541 bitsize /= 2;
2542 small_val = splat_val >> bitsize;
2543 mask >>= bitsize;
2544 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2545 return false;
2546 splat_val = small_val;
2547 }
c4ad648e 2548
66180ff3
PB
2549 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2550 if (EASY_VECTOR_15 (splat_val))
2551 ;
2552
2553 /* Also check if we can splat, and then add the result to itself. Do so if
2554 the value is positive, of if the splat instruction is using OP's mode;
2555 for splat_val < 0, the splat and the add should use the same mode. */
2556 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2557 && (splat_val >= 0 || (step == 1 && copies == 1)))
2558 ;
2559
2560 else
2561 return false;
2562
2563 /* Check if VAL is present in every STEP-th element, and the
2564 other elements are filled with its most significant bit. */
2565 for (i = 0; i < nunits - 1; ++i)
2566 {
2567 HOST_WIDE_INT desired_val;
2568 if (((i + 1) & (step - 1)) == 0)
2569 desired_val = val;
2570 else
2571 desired_val = msb_val;
2572
0972012c 2573 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2574 return false;
452a7d36 2575 }
66180ff3
PB
2576
2577 return true;
452a7d36
HP
2578}
2579
69ef87e2 2580
77ccdfed 2581/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2582 with a vspltisb, vspltish or vspltisw. */
2583
2584bool
2585easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2586{
66180ff3 2587 unsigned step, copies;
d744e06e 2588
66180ff3
PB
2589 if (mode == VOIDmode)
2590 mode = GET_MODE (op);
2591 else if (mode != GET_MODE (op))
2592 return false;
d744e06e 2593
66180ff3
PB
2594 /* Start with a vspltisw. */
2595 step = GET_MODE_NUNITS (mode) / 4;
2596 copies = 1;
2597
2598 if (vspltis_constant (op, step, copies))
2599 return true;
2600
2601 /* Then try with a vspltish. */
2602 if (step == 1)
2603 copies <<= 1;
2604 else
2605 step >>= 1;
2606
2607 if (vspltis_constant (op, step, copies))
2608 return true;
2609
2610 /* And finally a vspltisb. */
2611 if (step == 1)
2612 copies <<= 1;
2613 else
2614 step >>= 1;
2615
2616 if (vspltis_constant (op, step, copies))
2617 return true;
2618
2619 return false;
d744e06e
AH
2620}
2621
66180ff3
PB
2622/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2623 result is OP. Abort if it is not possible. */
d744e06e 2624
f676971a 2625rtx
66180ff3 2626gen_easy_altivec_constant (rtx op)
452a7d36 2627{
66180ff3
PB
2628 enum machine_mode mode = GET_MODE (op);
2629 int nunits = GET_MODE_NUNITS (mode);
2630 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2631 unsigned step = nunits / 4;
2632 unsigned copies = 1;
2633
2634 /* Start with a vspltisw. */
2635 if (vspltis_constant (op, step, copies))
2636 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2637
2638 /* Then try with a vspltish. */
2639 if (step == 1)
2640 copies <<= 1;
2641 else
2642 step >>= 1;
2643
2644 if (vspltis_constant (op, step, copies))
2645 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2646
2647 /* And finally a vspltisb. */
2648 if (step == 1)
2649 copies <<= 1;
2650 else
2651 step >>= 1;
2652
2653 if (vspltis_constant (op, step, copies))
2654 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2655
2656 gcc_unreachable ();
d744e06e
AH
2657}
2658
2659const char *
a2369ed3 2660output_vec_const_move (rtx *operands)
d744e06e
AH
2661{
2662 int cst, cst2;
2663 enum machine_mode mode;
2664 rtx dest, vec;
2665
2666 dest = operands[0];
2667 vec = operands[1];
d744e06e 2668 mode = GET_MODE (dest);
69ef87e2 2669
d744e06e
AH
2670 if (TARGET_ALTIVEC)
2671 {
66180ff3 2672 rtx splat_vec;
d744e06e
AH
2673 if (zero_constant (vec, mode))
2674 return "vxor %0,%0,%0";
37409796 2675
66180ff3
PB
2676 splat_vec = gen_easy_altivec_constant (vec);
2677 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2678 operands[1] = XEXP (splat_vec, 0);
2679 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2680 return "#";
bb8df8a6 2681
66180ff3 2682 switch (GET_MODE (splat_vec))
98ef3137 2683 {
37409796 2684 case V4SImode:
66180ff3 2685 return "vspltisw %0,%1";
c4ad648e 2686
37409796 2687 case V8HImode:
66180ff3 2688 return "vspltish %0,%1";
c4ad648e 2689
37409796 2690 case V16QImode:
66180ff3 2691 return "vspltisb %0,%1";
bb8df8a6 2692
37409796
NS
2693 default:
2694 gcc_unreachable ();
98ef3137 2695 }
69ef87e2
AH
2696 }
2697
37409796 2698 gcc_assert (TARGET_SPE);
bb8df8a6 2699
37409796
NS
2700 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2701 pattern of V1DI, V4HI, and V2SF.
2702
2703 FIXME: We should probably return # and add post reload
2704 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2705 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2706 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2707 operands[1] = CONST_VECTOR_ELT (vec, 0);
2708 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2709 if (cst == cst2)
2710 return "li %0,%1\n\tevmergelo %0,%0,%0";
2711 else
2712 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2713}
2714
7a4eca66
DE
2715/* Initialize vector TARGET to VALS. */
2716
2717void
2718rs6000_expand_vector_init (rtx target, rtx vals)
2719{
2720 enum machine_mode mode = GET_MODE (target);
2721 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2722 int n_elts = GET_MODE_NUNITS (mode);
2723 int n_var = 0, one_var = -1;
2724 bool all_same = true, all_const_zero = true;
2725 rtx x, mem;
2726 int i;
2727
2728 for (i = 0; i < n_elts; ++i)
2729 {
2730 x = XVECEXP (vals, 0, i);
2731 if (!CONSTANT_P (x))
2732 ++n_var, one_var = i;
2733 else if (x != CONST0_RTX (inner_mode))
2734 all_const_zero = false;
2735
2736 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2737 all_same = false;
2738 }
2739
2740 if (n_var == 0)
2741 {
2742 if (mode != V4SFmode && all_const_zero)
2743 {
2744 /* Zero register. */
2745 emit_insn (gen_rtx_SET (VOIDmode, target,
2746 gen_rtx_XOR (mode, target, target)));
2747 return;
2748 }
66180ff3 2749 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2750 {
2751 /* Splat immediate. */
66180ff3 2752 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2753 return;
2754 }
2755 else if (all_same)
2756 ; /* Splat vector element. */
2757 else
2758 {
2759 /* Load from constant pool. */
2760 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2761 return;
2762 }
2763 }
2764
2765 /* Store value to stack temp. Load vector element. Splat. */
2766 if (all_same)
2767 {
2768 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2769 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2770 XVECEXP (vals, 0, 0));
2771 x = gen_rtx_UNSPEC (VOIDmode,
2772 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2773 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2774 gen_rtvec (2,
2775 gen_rtx_SET (VOIDmode,
2776 target, mem),
2777 x)));
2778 x = gen_rtx_VEC_SELECT (inner_mode, target,
2779 gen_rtx_PARALLEL (VOIDmode,
2780 gen_rtvec (1, const0_rtx)));
2781 emit_insn (gen_rtx_SET (VOIDmode, target,
2782 gen_rtx_VEC_DUPLICATE (mode, x)));
2783 return;
2784 }
2785
2786 /* One field is non-constant. Load constant then overwrite
2787 varying field. */
2788 if (n_var == 1)
2789 {
2790 rtx copy = copy_rtx (vals);
2791
57b51d4d 2792 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2793 varying element. */
2794 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2795 rs6000_expand_vector_init (target, copy);
2796
2797 /* Insert variable. */
2798 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2799 return;
2800 }
2801
2802 /* Construct the vector in memory one field at a time
2803 and load the whole vector. */
2804 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2805 for (i = 0; i < n_elts; i++)
2806 emit_move_insn (adjust_address_nv (mem, inner_mode,
2807 i * GET_MODE_SIZE (inner_mode)),
2808 XVECEXP (vals, 0, i));
2809 emit_move_insn (target, mem);
2810}
2811
2812/* Set field ELT of TARGET to VAL. */
2813
2814void
2815rs6000_expand_vector_set (rtx target, rtx val, int elt)
2816{
2817 enum machine_mode mode = GET_MODE (target);
2818 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2819 rtx reg = gen_reg_rtx (mode);
2820 rtx mask, mem, x;
2821 int width = GET_MODE_SIZE (inner_mode);
2822 int i;
2823
2824 /* Load single variable value. */
2825 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2826 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2827 x = gen_rtx_UNSPEC (VOIDmode,
2828 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2829 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2830 gen_rtvec (2,
2831 gen_rtx_SET (VOIDmode,
2832 reg, mem),
2833 x)));
2834
2835 /* Linear sequence. */
2836 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2837 for (i = 0; i < 16; ++i)
2838 XVECEXP (mask, 0, i) = GEN_INT (i);
2839
2840 /* Set permute mask to insert element into target. */
2841 for (i = 0; i < width; ++i)
2842 XVECEXP (mask, 0, elt*width + i)
2843 = GEN_INT (i + 0x10);
2844 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2845 x = gen_rtx_UNSPEC (mode,
2846 gen_rtvec (3, target, reg,
2847 force_reg (V16QImode, x)),
2848 UNSPEC_VPERM);
2849 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2850}
2851
2852/* Extract field ELT from VEC into TARGET. */
2853
2854void
2855rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2856{
2857 enum machine_mode mode = GET_MODE (vec);
2858 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2859 rtx mem, x;
2860
2861 /* Allocate mode-sized buffer. */
2862 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2863
2864 /* Add offset to field within buffer matching vector element. */
2865 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2866
2867 /* Store single field into mode-sized buffer. */
2868 x = gen_rtx_UNSPEC (VOIDmode,
2869 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2870 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2871 gen_rtvec (2,
2872 gen_rtx_SET (VOIDmode,
2873 mem, vec),
2874 x)));
2875 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2876}
2877
0ba1b2ff
AM
2878/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2879 implement ANDing by the mask IN. */
2880void
a2369ed3 2881build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2882{
2883#if HOST_BITS_PER_WIDE_INT >= 64
2884 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2885 int shift;
2886
37409796 2887 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2888
2889 c = INTVAL (in);
2890 if (c & 1)
2891 {
2892 /* Assume c initially something like 0x00fff000000fffff. The idea
2893 is to rotate the word so that the middle ^^^^^^ group of zeros
2894 is at the MS end and can be cleared with an rldicl mask. We then
2895 rotate back and clear off the MS ^^ group of zeros with a
2896 second rldicl. */
2897 c = ~c; /* c == 0xff000ffffff00000 */
2898 lsb = c & -c; /* lsb == 0x0000000000100000 */
2899 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2900 c = ~c; /* c == 0x00fff000000fffff */
2901 c &= -lsb; /* c == 0x00fff00000000000 */
2902 lsb = c & -c; /* lsb == 0x0000100000000000 */
2903 c = ~c; /* c == 0xff000fffffffffff */
2904 c &= -lsb; /* c == 0xff00000000000000 */
2905 shift = 0;
2906 while ((lsb >>= 1) != 0)
2907 shift++; /* shift == 44 on exit from loop */
2908 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2909 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2910 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2911 }
2912 else
0ba1b2ff
AM
2913 {
2914 /* Assume c initially something like 0xff000f0000000000. The idea
2915 is to rotate the word so that the ^^^ middle group of zeros
2916 is at the LS end and can be cleared with an rldicr mask. We then
2917 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2918 a second rldicr. */
2919 lsb = c & -c; /* lsb == 0x0000010000000000 */
2920 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2921 c = ~c; /* c == 0x00fff0ffffffffff */
2922 c &= -lsb; /* c == 0x00fff00000000000 */
2923 lsb = c & -c; /* lsb == 0x0000100000000000 */
2924 c = ~c; /* c == 0xff000fffffffffff */
2925 c &= -lsb; /* c == 0xff00000000000000 */
2926 shift = 0;
2927 while ((lsb >>= 1) != 0)
2928 shift++; /* shift == 44 on exit from loop */
2929 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2930 m1 >>= shift; /* m1 == 0x0000000000000fff */
2931 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2932 }
2933
2934 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2935 masks will be all 1's. We are guaranteed more than one transition. */
2936 out[0] = GEN_INT (64 - shift);
2937 out[1] = GEN_INT (m1);
2938 out[2] = GEN_INT (shift);
2939 out[3] = GEN_INT (m2);
2940#else
045572c7
GK
2941 (void)in;
2942 (void)out;
37409796 2943 gcc_unreachable ();
0ba1b2ff 2944#endif
a260abc9
DE
2945}
2946
54b695e7 2947/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
2948
2949bool
54b695e7
AH
2950invalid_e500_subreg (rtx op, enum machine_mode mode)
2951{
61c76239
JM
2952 if (TARGET_E500_DOUBLE)
2953 {
17caeff2
JM
2954 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
2955 subreg:TI and reg:TF. */
61c76239 2956 if (GET_CODE (op) == SUBREG
17caeff2 2957 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 2958 && REG_P (SUBREG_REG (op))
17caeff2
JM
2959 && (GET_MODE (SUBREG_REG (op)) == DFmode
2960 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
2961 return true;
2962
17caeff2
JM
2963 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
2964 reg:TI. */
61c76239 2965 if (GET_CODE (op) == SUBREG
17caeff2 2966 && (mode == DFmode || mode == TFmode)
61c76239 2967 && REG_P (SUBREG_REG (op))
17caeff2
JM
2968 && (GET_MODE (SUBREG_REG (op)) == DImode
2969 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
2970 return true;
2971 }
54b695e7 2972
61c76239
JM
2973 if (TARGET_SPE
2974 && GET_CODE (op) == SUBREG
2975 && mode == SImode
54b695e7 2976 && REG_P (SUBREG_REG (op))
14502dad 2977 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
2978 return true;
2979
2980 return false;
2981}
2982
58182de3 2983/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
2984 field is an FP double while the FP fields remain word aligned. */
2985
19d66194 2986unsigned int
fa5b0972
AM
2987rs6000_special_round_type_align (tree type, unsigned int computed,
2988 unsigned int specified)
95727fb8 2989{
fa5b0972 2990 unsigned int align = MAX (computed, specified);
95727fb8 2991 tree field = TYPE_FIELDS (type);
95727fb8 2992
bb8df8a6 2993 /* Skip all non field decls */
85962ac8 2994 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
2995 field = TREE_CHAIN (field);
2996
fa5b0972
AM
2997 if (field != NULL && field != type)
2998 {
2999 type = TREE_TYPE (field);
3000 while (TREE_CODE (type) == ARRAY_TYPE)
3001 type = TREE_TYPE (type);
3002
3003 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3004 align = MAX (align, 64);
3005 }
95727fb8 3006
fa5b0972 3007 return align;
95727fb8
AP
3008}
3009
58182de3
GK
3010/* Darwin increases record alignment to the natural alignment of
3011 the first field. */
3012
3013unsigned int
3014darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3015 unsigned int specified)
3016{
3017 unsigned int align = MAX (computed, specified);
3018
3019 if (TYPE_PACKED (type))
3020 return align;
3021
3022 /* Find the first field, looking down into aggregates. */
3023 do {
3024 tree field = TYPE_FIELDS (type);
3025 /* Skip all non field decls */
3026 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3027 field = TREE_CHAIN (field);
3028 if (! field)
3029 break;
3030 type = TREE_TYPE (field);
3031 while (TREE_CODE (type) == ARRAY_TYPE)
3032 type = TREE_TYPE (type);
3033 } while (AGGREGATE_TYPE_P (type));
3034
3035 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3036 align = MAX (align, TYPE_ALIGN (type));
3037
3038 return align;
3039}
3040
a4f6c312 3041/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3042
3043int
f676971a 3044small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3045 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3046{
38c1f2d7 3047#if TARGET_ELF
5f59ecb7 3048 rtx sym_ref;
7509c759 3049
d9407988 3050 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3051 return 0;
a54d04b7 3052
f607bc57 3053 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3054 return 0;
3055
88228c4b
MM
3056 if (GET_CODE (op) == SYMBOL_REF)
3057 sym_ref = op;
3058
3059 else if (GET_CODE (op) != CONST
3060 || GET_CODE (XEXP (op, 0)) != PLUS
3061 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3062 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3063 return 0;
3064
88228c4b 3065 else
dbf55e53
MM
3066 {
3067 rtx sum = XEXP (op, 0);
3068 HOST_WIDE_INT summand;
3069
3070 /* We have to be careful here, because it is the referenced address
c4ad648e 3071 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3072 summand = INTVAL (XEXP (sum, 1));
307b599c 3073 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3074 return 0;
dbf55e53
MM
3075
3076 sym_ref = XEXP (sum, 0);
3077 }
88228c4b 3078
20bfcd69 3079 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3080#else
3081 return 0;
3082#endif
7509c759 3083}
46c07df8 3084
3a1f863f 3085/* Return true if either operand is a general purpose register. */
46c07df8 3086
3a1f863f
DE
3087bool
3088gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3089{
3a1f863f
DE
3090 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3091 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3092}
3093
9ebbca7d 3094\f
4d588c14
RH
3095/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3096
f676971a
EC
3097static int
3098constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3099{
9390387d 3100 switch (GET_CODE (op))
9ebbca7d
GK
3101 {
3102 case SYMBOL_REF:
c4501e62
JJ
3103 if (RS6000_SYMBOL_REF_TLS_P (op))
3104 return 0;
3105 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3106 {
3107 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3108 {
3109 *have_sym = 1;
3110 return 1;
3111 }
3112 else
3113 return 0;
3114 }
3115 else if (! strcmp (XSTR (op, 0), toc_label_name))
3116 {
3117 *have_toc = 1;
3118 return 1;
3119 }
3120 else
3121 return 0;
9ebbca7d
GK
3122 case PLUS:
3123 case MINUS:
c1f11548
DE
3124 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3125 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3126 case CONST:
a4f6c312 3127 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3128 case CONST_INT:
a4f6c312 3129 return 1;
9ebbca7d 3130 default:
a4f6c312 3131 return 0;
9ebbca7d
GK
3132 }
3133}
3134
4d588c14 3135static bool
a2369ed3 3136constant_pool_expr_p (rtx op)
9ebbca7d
GK
3137{
3138 int have_sym = 0;
3139 int have_toc = 0;
3140 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3141}
3142
48d72335 3143bool
a2369ed3 3144toc_relative_expr_p (rtx op)
9ebbca7d 3145{
4d588c14
RH
3146 int have_sym = 0;
3147 int have_toc = 0;
3148 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3149}
3150
4d588c14 3151bool
a2369ed3 3152legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3153{
3154 return (TARGET_TOC
3155 && GET_CODE (x) == PLUS
3156 && GET_CODE (XEXP (x, 0)) == REG
3157 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3158 && constant_pool_expr_p (XEXP (x, 1)));
3159}
3160
d04b6e6e
EB
3161static bool
3162legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3163{
3164 return (DEFAULT_ABI == ABI_V4
3165 && !flag_pic && !TARGET_TOC
3166 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3167 && small_data_operand (x, mode));
3168}
3169
60cdabab
DE
3170/* SPE offset addressing is limited to 5-bits worth of double words. */
3171#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3172
76d2b81d
DJ
3173bool
3174rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3175{
3176 unsigned HOST_WIDE_INT offset, extra;
3177
3178 if (GET_CODE (x) != PLUS)
3179 return false;
3180 if (GET_CODE (XEXP (x, 0)) != REG)
3181 return false;
3182 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3183 return false;
60cdabab
DE
3184 if (legitimate_constant_pool_address_p (x))
3185 return true;
4d588c14
RH
3186 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3187 return false;
3188
3189 offset = INTVAL (XEXP (x, 1));
3190 extra = 0;
3191 switch (mode)
3192 {
3193 case V16QImode:
3194 case V8HImode:
3195 case V4SFmode:
3196 case V4SImode:
7a4eca66
DE
3197 /* AltiVec vector modes. Only reg+reg addressing is valid and
3198 constant offset zero should not occur due to canonicalization.
3199 Allow any offset when not strict before reload. */
3200 return !strict;
4d588c14
RH
3201
3202 case V4HImode:
3203 case V2SImode:
3204 case V1DImode:
3205 case V2SFmode:
3206 /* SPE vector modes. */
3207 return SPE_CONST_OFFSET_OK (offset);
3208
3209 case DFmode:
7393f7f8 3210 case DDmode:
4d4cbc0e
AH
3211 if (TARGET_E500_DOUBLE)
3212 return SPE_CONST_OFFSET_OK (offset);
3213
4d588c14 3214 case DImode:
54b695e7
AH
3215 /* On e500v2, we may have:
3216
3217 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3218
3219 Which gets addressed with evldd instructions. */
3220 if (TARGET_E500_DOUBLE)
3221 return SPE_CONST_OFFSET_OK (offset);
3222
7393f7f8 3223 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3224 extra = 4;
3225 else if (offset & 3)
3226 return false;
3227 break;
3228
3229 case TFmode:
17caeff2
JM
3230 if (TARGET_E500_DOUBLE)
3231 return (SPE_CONST_OFFSET_OK (offset)
3232 && SPE_CONST_OFFSET_OK (offset + 8));
3233
4d588c14 3234 case TImode:
7393f7f8
BE
3235 case TDmode:
3236 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3237 extra = 12;
3238 else if (offset & 3)
3239 return false;
3240 else
3241 extra = 8;
3242 break;
3243
3244 default:
3245 break;
3246 }
3247
b1917422
AM
3248 offset += 0x8000;
3249 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3250}
3251
6fb5fa3c 3252bool
a2369ed3 3253legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3254{
3255 rtx op0, op1;
3256
3257 if (GET_CODE (x) != PLUS)
3258 return false;
850e8d3d 3259
4d588c14
RH
3260 op0 = XEXP (x, 0);
3261 op1 = XEXP (x, 1);
3262
bf00cc0f 3263 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3264 replaced with proper base and index regs. */
3265 if (!strict
3266 && reload_in_progress
3267 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3268 && REG_P (op1))
3269 return true;
3270
3271 return (REG_P (op0) && REG_P (op1)
3272 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3273 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3274 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3275 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3276}
3277
48d72335 3278inline bool
a2369ed3 3279legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3280{
3281 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3282}
3283
48d72335 3284bool
4c81e946
FJ
3285macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3286{
c4ad648e 3287 if (!TARGET_MACHO || !flag_pic
9390387d 3288 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3289 return false;
3290 x = XEXP (x, 0);
4c81e946
FJ
3291
3292 if (GET_CODE (x) != LO_SUM)
3293 return false;
3294 if (GET_CODE (XEXP (x, 0)) != REG)
3295 return false;
3296 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3297 return false;
3298 x = XEXP (x, 1);
3299
3300 return CONSTANT_P (x);
3301}
3302
4d588c14 3303static bool
a2369ed3 3304legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3305{
3306 if (GET_CODE (x) != LO_SUM)
3307 return false;
3308 if (GET_CODE (XEXP (x, 0)) != REG)
3309 return false;
3310 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3311 return false;
54b695e7 3312 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3313 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3314 || mode == DImode))
f82f556d 3315 return false;
4d588c14
RH
3316 x = XEXP (x, 1);
3317
8622e235 3318 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3319 {
a29077da 3320 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3321 return false;
3322 if (TARGET_TOC)
3323 return false;
3324 if (GET_MODE_NUNITS (mode) != 1)
3325 return false;
5e5f01b9 3326 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
3327 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
3328 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
3329 return false;
3330
3331 return CONSTANT_P (x);
3332 }
3333
3334 return false;
3335}
3336
3337
9ebbca7d
GK
3338/* Try machine-dependent ways of modifying an illegitimate address
3339 to be legitimate. If we find one, return the new, valid address.
3340 This is used from only one place: `memory_address' in explow.c.
3341
a4f6c312
SS
3342 OLDX is the address as it was before break_out_memory_refs was
3343 called. In some cases it is useful to look at this to decide what
3344 needs to be done.
9ebbca7d 3345
a4f6c312 3346 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3347
a4f6c312
SS
3348 It is always safe for this function to do nothing. It exists to
3349 recognize opportunities to optimize the output.
9ebbca7d
GK
3350
3351 On RS/6000, first check for the sum of a register with a constant
3352 integer that is out of range. If so, generate code to add the
3353 constant with the low-order 16 bits masked to the register and force
3354 this result into another register (this can be done with `cau').
3355 Then generate an address of REG+(CONST&0xffff), allowing for the
3356 possibility of bit 16 being a one.
3357
3358 Then check for the sum of a register and something not constant, try to
3359 load the other things into a register and return the sum. */
4d588c14 3360
9ebbca7d 3361rtx
a2369ed3
DJ
3362rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3363 enum machine_mode mode)
0ac081f6 3364{
c4501e62
JJ
3365 if (GET_CODE (x) == SYMBOL_REF)
3366 {
3367 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3368 if (model != 0)
3369 return rs6000_legitimize_tls_address (x, model);
3370 }
3371
f676971a 3372 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3373 && GET_CODE (XEXP (x, 0)) == REG
3374 && GET_CODE (XEXP (x, 1)) == CONST_INT
3375 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
f676971a 3376 {
9ebbca7d
GK
3377 HOST_WIDE_INT high_int, low_int;
3378 rtx sum;
a65c591c
DE
3379 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3380 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3381 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3382 GEN_INT (high_int)), 0);
3383 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3384 }
f676971a 3385 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3386 && GET_CODE (XEXP (x, 0)) == REG
3387 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3388 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3389 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3390 || TARGET_POWERPC64
7393f7f8
BE
3391 || (((mode != DImode && mode != DFmode && mode != DDmode)
3392 || TARGET_E500_DOUBLE)
3393 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3394 && (TARGET_POWERPC64 || mode != DImode)
3395 && mode != TImode)
3396 {
3397 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3398 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3399 }
0ac081f6
AH
3400 else if (ALTIVEC_VECTOR_MODE (mode))
3401 {
3402 rtx reg;
3403
3404 /* Make sure both operands are registers. */
3405 if (GET_CODE (x) == PLUS)
9f85ed45 3406 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3407 force_reg (Pmode, XEXP (x, 1)));
3408
3409 reg = force_reg (Pmode, x);
3410 return reg;
3411 }
4d4cbc0e 3412 else if (SPE_VECTOR_MODE (mode)
17caeff2 3413 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3414 || mode == DDmode || mode == TDmode
54b695e7 3415 || mode == DImode)))
a3170dc6 3416 {
54b695e7
AH
3417 if (mode == DImode)
3418 return NULL_RTX;
a3170dc6
AH
3419 /* We accept [reg + reg] and [reg + OFFSET]. */
3420
3421 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3422 {
3423 rtx op1 = XEXP (x, 0);
3424 rtx op2 = XEXP (x, 1);
a3170dc6 3425
c4ad648e 3426 op1 = force_reg (Pmode, op1);
a3170dc6 3427
c4ad648e
AM
3428 if (GET_CODE (op2) != REG
3429 && (GET_CODE (op2) != CONST_INT
3430 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3431 op2 = force_reg (Pmode, op2);
a3170dc6 3432
c4ad648e
AM
3433 return gen_rtx_PLUS (Pmode, op1, op2);
3434 }
a3170dc6
AH
3435
3436 return force_reg (Pmode, x);
3437 }
f1384257
AM
3438 else if (TARGET_ELF
3439 && TARGET_32BIT
3440 && TARGET_NO_TOC
3441 && ! flag_pic
9ebbca7d 3442 && GET_CODE (x) != CONST_INT
f676971a 3443 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3444 && CONSTANT_P (x)
6ac7bf2c
GK
3445 && GET_MODE_NUNITS (mode) == 1
3446 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 3447 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
3448 {
3449 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3450 emit_insn (gen_elf_high (reg, x));
3451 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3452 }
ee890fe2
SS
3453 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3454 && ! flag_pic
ab82a49f
AP
3455#if TARGET_MACHO
3456 && ! MACHO_DYNAMIC_NO_PIC_P
3457#endif
ee890fe2 3458 && GET_CODE (x) != CONST_INT
f676971a 3459 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3460 && CONSTANT_P (x)
f82f556d 3461 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 3462 && mode != DImode
ee890fe2
SS
3463 && mode != TImode)
3464 {
3465 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3466 emit_insn (gen_macho_high (reg, x));
3467 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3468 }
f676971a 3469 else if (TARGET_TOC
4d588c14 3470 && constant_pool_expr_p (x)
a9098fd0 3471 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3472 {
3473 return create_TOC_reference (x);
3474 }
3475 else
3476 return NULL_RTX;
3477}
258bfae2 3478
fdbe66f2 3479/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3480 We need to emit DTP-relative relocations. */
3481
fdbe66f2 3482static void
c973d557
JJ
3483rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3484{
3485 switch (size)
3486 {
3487 case 4:
3488 fputs ("\t.long\t", file);
3489 break;
3490 case 8:
3491 fputs (DOUBLE_INT_ASM_OP, file);
3492 break;
3493 default:
37409796 3494 gcc_unreachable ();
c973d557
JJ
3495 }
3496 output_addr_const (file, x);
3497 fputs ("@dtprel+0x8000", file);
3498}
3499
c4501e62
JJ
3500/* Construct the SYMBOL_REF for the tls_get_addr function. */
3501
3502static GTY(()) rtx rs6000_tls_symbol;
3503static rtx
863d938c 3504rs6000_tls_get_addr (void)
c4501e62
JJ
3505{
3506 if (!rs6000_tls_symbol)
3507 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3508
3509 return rs6000_tls_symbol;
3510}
3511
3512/* Construct the SYMBOL_REF for TLS GOT references. */
3513
3514static GTY(()) rtx rs6000_got_symbol;
3515static rtx
863d938c 3516rs6000_got_sym (void)
c4501e62
JJ
3517{
3518 if (!rs6000_got_symbol)
3519 {
3520 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3521 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3522 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3523 }
c4501e62
JJ
3524
3525 return rs6000_got_symbol;
3526}
3527
3528/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3529 this (thread-local) address. */
3530
3531static rtx
a2369ed3 3532rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3533{
3534 rtx dest, insn;
3535
3536 dest = gen_reg_rtx (Pmode);
3537 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3538 {
3539 rtx tlsreg;
3540
3541 if (TARGET_64BIT)
3542 {
3543 tlsreg = gen_rtx_REG (Pmode, 13);
3544 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3545 }
3546 else
3547 {
3548 tlsreg = gen_rtx_REG (Pmode, 2);
3549 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3550 }
3551 emit_insn (insn);
3552 }
3553 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3554 {
3555 rtx tlsreg, tmp;
3556
3557 tmp = gen_reg_rtx (Pmode);
3558 if (TARGET_64BIT)
3559 {
3560 tlsreg = gen_rtx_REG (Pmode, 13);
3561 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3562 }
3563 else
3564 {
3565 tlsreg = gen_rtx_REG (Pmode, 2);
3566 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3567 }
3568 emit_insn (insn);
3569 if (TARGET_64BIT)
3570 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3571 else
3572 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3573 emit_insn (insn);
3574 }
3575 else
3576 {
3577 rtx r3, got, tga, tmp1, tmp2, eqv;
3578
4fed8f8f
AM
3579 /* We currently use relocations like @got@tlsgd for tls, which
3580 means the linker will handle allocation of tls entries, placing
3581 them in the .got section. So use a pointer to the .got section,
3582 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3583 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3584 if (TARGET_64BIT)
972f427b 3585 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3586 else
3587 {
3588 if (flag_pic == 1)
3589 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3590 else
3591 {
3592 rtx gsym = rs6000_got_sym ();
3593 got = gen_reg_rtx (Pmode);
3594 if (flag_pic == 0)
3595 rs6000_emit_move (got, gsym, Pmode);
3596 else
3597 {
e65a3857 3598 rtx tmp3, mem;
c4501e62
JJ
3599 rtx first, last;
3600
c4501e62
JJ
3601 tmp1 = gen_reg_rtx (Pmode);
3602 tmp2 = gen_reg_rtx (Pmode);
3603 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3604 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3605
e65a3857
DE
3606 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3607 emit_move_insn (tmp1,
1de43f85 3608 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3609 emit_move_insn (tmp2, mem);
3610 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3611 last = emit_move_insn (got, tmp3);
bd94cb6e 3612 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3613 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3614 }
3615 }
3616 }
3617
3618 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3619 {
3620 r3 = gen_rtx_REG (Pmode, 3);
3621 if (TARGET_64BIT)
3622 insn = gen_tls_gd_64 (r3, got, addr);
3623 else
3624 insn = gen_tls_gd_32 (r3, got, addr);
3625 start_sequence ();
3626 emit_insn (insn);
3627 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3628 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3629 insn = emit_call_insn (insn);
3630 CONST_OR_PURE_CALL_P (insn) = 1;
3631 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3632 insn = get_insns ();
3633 end_sequence ();
3634 emit_libcall_block (insn, dest, r3, addr);
3635 }
3636 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3637 {
3638 r3 = gen_rtx_REG (Pmode, 3);
3639 if (TARGET_64BIT)
3640 insn = gen_tls_ld_64 (r3, got);
3641 else
3642 insn = gen_tls_ld_32 (r3, got);
3643 start_sequence ();
3644 emit_insn (insn);
3645 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3646 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3647 insn = emit_call_insn (insn);
3648 CONST_OR_PURE_CALL_P (insn) = 1;
3649 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3650 insn = get_insns ();
3651 end_sequence ();
3652 tmp1 = gen_reg_rtx (Pmode);
3653 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3654 UNSPEC_TLSLD);
3655 emit_libcall_block (insn, tmp1, r3, eqv);
3656 if (rs6000_tls_size == 16)
3657 {
3658 if (TARGET_64BIT)
3659 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3660 else
3661 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3662 }
3663 else if (rs6000_tls_size == 32)
3664 {
3665 tmp2 = gen_reg_rtx (Pmode);
3666 if (TARGET_64BIT)
3667 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3668 else
3669 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3670 emit_insn (insn);
3671 if (TARGET_64BIT)
3672 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3673 else
3674 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3675 }
3676 else
3677 {
3678 tmp2 = gen_reg_rtx (Pmode);
3679 if (TARGET_64BIT)
3680 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3681 else
3682 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3683 emit_insn (insn);
3684 insn = gen_rtx_SET (Pmode, dest,
3685 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3686 }
3687 emit_insn (insn);
3688 }
3689 else
3690 {
a7b376ee 3691 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3692 tmp2 = gen_reg_rtx (Pmode);
3693 if (TARGET_64BIT)
3694 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3695 else
3696 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3697 emit_insn (insn);
3698 if (TARGET_64BIT)
3699 insn = gen_tls_tls_64 (dest, tmp2, addr);
3700 else
3701 insn = gen_tls_tls_32 (dest, tmp2, addr);
3702 emit_insn (insn);
3703 }
3704 }
3705
3706 return dest;
3707}
3708
c4501e62
JJ
3709/* Return 1 if X contains a thread-local symbol. */
3710
3711bool
a2369ed3 3712rs6000_tls_referenced_p (rtx x)
c4501e62 3713{
cd413cab
AP
3714 if (! TARGET_HAVE_TLS)
3715 return false;
3716
c4501e62
JJ
3717 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3718}
3719
3720/* Return 1 if *X is a thread-local symbol. This is the same as
3721 rs6000_tls_symbol_ref except for the type of the unused argument. */
3722
9390387d 3723static int
a2369ed3 3724rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3725{
3726 return RS6000_SYMBOL_REF_TLS_P (*x);
3727}
3728
24ea750e
DJ
3729/* The convention appears to be to define this wherever it is used.
3730 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3731 is now used here. */
3732#ifndef REG_MODE_OK_FOR_BASE_P
3733#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3734#endif
3735
3736/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3737 replace the input X, or the original X if no replacement is called for.
3738 The output parameter *WIN is 1 if the calling macro should goto WIN,
3739 0 if it should not.
3740
3741 For RS/6000, we wish to handle large displacements off a base
3742 register by splitting the addend across an addiu/addis and the mem insn.
3743 This cuts number of extra insns needed from 3 to 1.
3744
3745 On Darwin, we use this to generate code for floating point constants.
3746 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3747 The Darwin code is inside #if TARGET_MACHO because only then is
3748 machopic_function_base_name() defined. */
3749rtx
f676971a 3750rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3751 int opnum, int type,
3752 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3753{
f676971a 3754 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3755 if (GET_CODE (x) == PLUS
3756 && GET_CODE (XEXP (x, 0)) == PLUS
3757 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3758 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3759 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3760 {
3761 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3762 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3763 opnum, (enum reload_type)type);
24ea750e
DJ
3764 *win = 1;
3765 return x;
3766 }
3deb2758 3767
24ea750e
DJ
3768#if TARGET_MACHO
3769 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3770 && GET_CODE (x) == LO_SUM
3771 && GET_CODE (XEXP (x, 0)) == PLUS
3772 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3773 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3774 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3775 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3776 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3777 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3778 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3779 {
3780 /* Result of previous invocation of this function on Darwin
6f317ef3 3781 floating point constant. */
24ea750e 3782 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3783 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3784 opnum, (enum reload_type)type);
24ea750e
DJ
3785 *win = 1;
3786 return x;
3787 }
3788#endif
4937d02d
DE
3789
3790 /* Force ld/std non-word aligned offset into base register by wrapping
3791 in offset 0. */
3792 if (GET_CODE (x) == PLUS
3793 && GET_CODE (XEXP (x, 0)) == REG
3794 && REGNO (XEXP (x, 0)) < 32
3795 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3796 && GET_CODE (XEXP (x, 1)) == CONST_INT
3797 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3798 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3799 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3800 && TARGET_POWERPC64)
3801 {
3802 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3803 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3804 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3805 opnum, (enum reload_type) type);
3806 *win = 1;
3807 return x;
3808 }
3809
24ea750e
DJ
3810 if (GET_CODE (x) == PLUS
3811 && GET_CODE (XEXP (x, 0)) == REG
3812 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3813 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3814 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3815 && !SPE_VECTOR_MODE (mode)
17caeff2 3816 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
54b695e7 3817 || mode == DImode))
78c875e8 3818 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3819 {
3820 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3821 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3822 HOST_WIDE_INT high
c4ad648e 3823 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3824
3825 /* Check for 32-bit overflow. */
3826 if (high + low != val)
c4ad648e 3827 {
24ea750e
DJ
3828 *win = 0;
3829 return x;
3830 }
3831
3832 /* Reload the high part into a base reg; leave the low part
c4ad648e 3833 in the mem directly. */
24ea750e
DJ
3834
3835 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3836 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3837 GEN_INT (high)),
3838 GEN_INT (low));
24ea750e
DJ
3839
3840 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3841 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3842 opnum, (enum reload_type)type);
24ea750e
DJ
3843 *win = 1;
3844 return x;
3845 }
4937d02d 3846
24ea750e 3847 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3848 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 3849 && !SPE_VECTOR_MODE (mode)
8308679f
DE
3850#if TARGET_MACHO
3851 && DEFAULT_ABI == ABI_DARWIN
a29077da 3852 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3853#else
3854 && DEFAULT_ABI == ABI_V4
3855 && !flag_pic
3856#endif
7393f7f8 3857 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
7b5d92b2
AM
3858 The same goes for DImode without 64-bit gprs and DFmode
3859 without fprs. */
0d8c1c97 3860 && mode != TFmode
7393f7f8 3861 && mode != TDmode
7b5d92b2
AM
3862 && (mode != DImode || TARGET_POWERPC64)
3863 && (mode != DFmode || TARGET_POWERPC64
3864 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 3865 {
8308679f 3866#if TARGET_MACHO
a29077da
GK
3867 if (flag_pic)
3868 {
3869 rtx offset = gen_rtx_CONST (Pmode,
3870 gen_rtx_MINUS (Pmode, x,
11abc112 3871 machopic_function_base_sym ()));
a29077da
GK
3872 x = gen_rtx_LO_SUM (GET_MODE (x),
3873 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3874 gen_rtx_HIGH (Pmode, offset)), offset);
3875 }
3876 else
8308679f 3877#endif
a29077da 3878 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3879 gen_rtx_HIGH (Pmode, x), x);
a29077da 3880
24ea750e 3881 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3882 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3883 opnum, (enum reload_type)type);
24ea750e
DJ
3884 *win = 1;
3885 return x;
3886 }
4937d02d 3887
dec1f3aa
DE
3888 /* Reload an offset address wrapped by an AND that represents the
3889 masking of the lower bits. Strip the outer AND and let reload
3890 convert the offset address into an indirect address. */
3891 if (TARGET_ALTIVEC
3892 && ALTIVEC_VECTOR_MODE (mode)
3893 && GET_CODE (x) == AND
3894 && GET_CODE (XEXP (x, 0)) == PLUS
3895 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3896 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3897 && GET_CODE (XEXP (x, 1)) == CONST_INT
3898 && INTVAL (XEXP (x, 1)) == -16)
3899 {
3900 x = XEXP (x, 0);
3901 *win = 1;
3902 return x;
3903 }
3904
24ea750e 3905 if (TARGET_TOC
4d588c14 3906 && constant_pool_expr_p (x)
c1f11548 3907 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 3908 {
194c524a 3909 x = create_TOC_reference (x);
24ea750e
DJ
3910 *win = 1;
3911 return x;
3912 }
3913 *win = 0;
3914 return x;
f676971a 3915}
24ea750e 3916
258bfae2
FS
3917/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3918 that is a valid memory address for an instruction.
3919 The MODE argument is the machine mode for the MEM expression
3920 that wants to use this address.
3921
3922 On the RS/6000, there are four valid address: a SYMBOL_REF that
3923 refers to a constant pool entry of an address (or the sum of it
3924 plus a constant), a short (16-bit signed) constant plus a register,
3925 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3926 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3927 we must ensure that both words are addressable or PowerPC64 with offset
3928 word aligned.
3929
3930 For modes spanning multiple registers (DFmode in 32-bit GPRs,
7393f7f8
BE
3931 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
3932 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
3933 during assembly output. */
3934int
a2369ed3 3935rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 3936{
850e8d3d
DN
3937 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3938 if (TARGET_ALTIVEC
3939 && ALTIVEC_VECTOR_MODE (mode)
3940 && GET_CODE (x) == AND
3941 && GET_CODE (XEXP (x, 1)) == CONST_INT
3942 && INTVAL (XEXP (x, 1)) == -16)
3943 x = XEXP (x, 0);
3944
c4501e62
JJ
3945 if (RS6000_SYMBOL_REF_TLS_P (x))
3946 return 0;
4d588c14 3947 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
3948 return 1;
3949 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 3950 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 3951 && !SPE_VECTOR_MODE (mode)
429ec7dc 3952 && mode != TFmode
7393f7f8 3953 && mode != TDmode
54b695e7 3954 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3955 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3956 || mode == DImode))
258bfae2 3957 && TARGET_UPDATE
4d588c14 3958 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 3959 return 1;
d04b6e6e 3960 if (legitimate_small_data_p (mode, x))
258bfae2 3961 return 1;
4d588c14 3962 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
3963 return 1;
3964 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3965 if (! reg_ok_strict
3966 && GET_CODE (x) == PLUS
3967 && GET_CODE (XEXP (x, 0)) == REG
708d2456 3968 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 3969 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
3970 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3971 return 1;
76d2b81d 3972 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3973 return 1;
3974 if (mode != TImode
76d2b81d 3975 && mode != TFmode
7393f7f8 3976 && mode != TDmode
a3170dc6
AH
3977 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3978 || TARGET_POWERPC64
4d4cbc0e 3979 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 3980 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 3981 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 3982 return 1;
6fb5fa3c
DB
3983 if (GET_CODE (x) == PRE_MODIFY
3984 && mode != TImode
3985 && mode != TFmode
3986 && mode != TDmode
3987 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3988 || TARGET_POWERPC64
3989 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3990 && (TARGET_POWERPC64 || mode != DImode)
3991 && !ALTIVEC_VECTOR_MODE (mode)
3992 && !SPE_VECTOR_MODE (mode)
3993 /* Restrict addressing for DI because of our SUBREG hackery. */
3994 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3995 && TARGET_UPDATE
3996 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
3997 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
3998 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
3999 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4000 return 1;
4d588c14 4001 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4002 return 1;
4003 return 0;
4004}
4d588c14
RH
4005
4006/* Go to LABEL if ADDR (a legitimate address expression)
4007 has an effect that depends on the machine mode it is used for.
4008
4009 On the RS/6000 this is true of all integral offsets (since AltiVec
4010 modes don't allow them) or is a pre-increment or decrement.
4011
4012 ??? Except that due to conceptual problems in offsettable_address_p
4013 we can't really report the problems of integral offsets. So leave
f676971a 4014 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4015 sub-words of a TFmode operand, which is what we had before. */
4016
4017bool
a2369ed3 4018rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4019{
4020 switch (GET_CODE (addr))
4021 {
4022 case PLUS:
4023 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4024 {
4025 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4026 return val + 12 + 0x8000 >= 0x10000;
4027 }
4028 break;
4029
4030 case LO_SUM:
4031 return true;
4032
6fb5fa3c
DB
4033 case PRE_INC:
4034 case PRE_DEC:
4035 case PRE_MODIFY:
4036 return TARGET_UPDATE;
4d588c14
RH
4037
4038 default:
4039 break;
4040 }
4041
4042 return false;
4043}
d8ecbcdb 4044
d04b6e6e
EB
4045/* More elaborate version of recog's offsettable_memref_p predicate
4046 that works around the ??? note of rs6000_mode_dependent_address.
4047 In particular it accepts
4048
4049 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4050
4051 in 32-bit mode, that the recog predicate rejects. */
4052
4053bool
4054rs6000_offsettable_memref_p (rtx op)
4055{
4056 if (!MEM_P (op))
4057 return false;
4058
4059 /* First mimic offsettable_memref_p. */
4060 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4061 return true;
4062
4063 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4064 the latter predicate knows nothing about the mode of the memory
4065 reference and, therefore, assumes that it is the largest supported
4066 mode (TFmode). As a consequence, legitimate offsettable memory
4067 references are rejected. rs6000_legitimate_offset_address_p contains
4068 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4069 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4070}
4071
d8ecbcdb
AH
4072/* Return number of consecutive hard regs needed starting at reg REGNO
4073 to hold something of mode MODE.
4074 This is ordinarily the length in words of a value of mode MODE
4075 but can be less for certain modes in special long registers.
4076
4077 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4078 scalar instructions. The upper 32 bits are only available to the
4079 SIMD instructions.
4080
4081 POWER and PowerPC GPRs hold 32 bits worth;
4082 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4083
4084int
4085rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4086{
4087 if (FP_REGNO_P (regno))
4088 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4089
4090 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4091 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4092
4093 if (ALTIVEC_REGNO_P (regno))
4094 return
4095 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4096
8521c414
JM
4097 /* The value returned for SCmode in the E500 double case is 2 for
4098 ABI compatibility; storing an SCmode value in a single register
4099 would require function_arg and rs6000_spe_function_arg to handle
4100 SCmode so as to pass the value correctly in a pair of
4101 registers. */
4102 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4103 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4104
d8ecbcdb
AH
4105 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4106}
2aa4498c
AH
4107
4108/* Change register usage conditional on target flags. */
4109void
4110rs6000_conditional_register_usage (void)
4111{
4112 int i;
4113
4114 /* Set MQ register fixed (already call_used) if not POWER
4115 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4116 be allocated. */
4117 if (! TARGET_POWER)
4118 fixed_regs[64] = 1;
4119
7c9ac5c0 4120 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4121 if (TARGET_64BIT)
4122 fixed_regs[13] = call_used_regs[13]
4123 = call_really_used_regs[13] = 1;
4124
4125 /* Conditionally disable FPRs. */
4126 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4127 for (i = 32; i < 64; i++)
4128 fixed_regs[i] = call_used_regs[i]
c4ad648e 4129 = call_really_used_regs[i] = 1;
2aa4498c 4130
7c9ac5c0
PH
4131 /* The TOC register is not killed across calls in a way that is
4132 visible to the compiler. */
4133 if (DEFAULT_ABI == ABI_AIX)
4134 call_really_used_regs[2] = 0;
4135
2aa4498c
AH
4136 if (DEFAULT_ABI == ABI_V4
4137 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4138 && flag_pic == 2)
4139 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4140
4141 if (DEFAULT_ABI == ABI_V4
4142 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4143 && flag_pic == 1)
4144 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4145 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4146 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4147
4148 if (DEFAULT_ABI == ABI_DARWIN
4149 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4150 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4151 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4152 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4153
b4db40bf
JJ
4154 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4155 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4156 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4157
2aa4498c
AH
4158 if (TARGET_ALTIVEC)
4159 global_regs[VSCR_REGNO] = 1;
4160
4161 if (TARGET_SPE)
4162 {
4163 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4164 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4165 registers in prologues and epilogues. We no longer use r14
4166 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4167 pool for link-compatibility with older versions of GCC. Once
4168 "old" code has died out, we can return r14 to the allocation
4169 pool. */
4170 fixed_regs[14]
4171 = call_used_regs[14]
4172 = call_really_used_regs[14] = 1;
2aa4498c
AH
4173 }
4174
4175 if (! TARGET_ALTIVEC)
4176 {
4177 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4178 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4179 call_really_used_regs[VRSAVE_REGNO] = 1;
4180 }
4181
4182 if (TARGET_ALTIVEC_ABI)
4183 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4184 call_used_regs[i] = call_really_used_regs[i] = 1;
4185}
fb4d4348 4186\f
a4f6c312
SS
4187/* Try to output insns to set TARGET equal to the constant C if it can
4188 be done in less than N insns. Do all computations in MODE.
4189 Returns the place where the output has been placed if it can be
4190 done and the insns have been emitted. If it would take more than N
4191 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4192
4193rtx
f676971a 4194rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4195 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4196{
af8cb5c5 4197 rtx result, insn, set;
2bfcf297
DB
4198 HOST_WIDE_INT c0, c1;
4199
37409796 4200 switch (mode)
2bfcf297 4201 {
37409796
NS
4202 case QImode:
4203 case HImode:
2bfcf297 4204 if (dest == NULL)
c4ad648e 4205 dest = gen_reg_rtx (mode);
2bfcf297
DB
4206 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4207 return dest;
bb8df8a6 4208
37409796 4209 case SImode:
b3a13419 4210 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4211
d448860e 4212 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4213 GEN_INT (INTVAL (source)
4214 & (~ (HOST_WIDE_INT) 0xffff))));
4215 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4216 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4217 GEN_INT (INTVAL (source) & 0xffff))));
4218 result = dest;
37409796
NS
4219 break;
4220
4221 case DImode:
4222 switch (GET_CODE (source))
af8cb5c5 4223 {
37409796 4224 case CONST_INT:
af8cb5c5
DE
4225 c0 = INTVAL (source);
4226 c1 = -(c0 < 0);
37409796 4227 break;
bb8df8a6 4228
37409796 4229 case CONST_DOUBLE:
2bfcf297 4230#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4231 c0 = CONST_DOUBLE_LOW (source);
4232 c1 = -(c0 < 0);
2bfcf297 4233#else
af8cb5c5
DE
4234 c0 = CONST_DOUBLE_LOW (source);
4235 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4236#endif
37409796
NS
4237 break;
4238
4239 default:
4240 gcc_unreachable ();
af8cb5c5 4241 }
af8cb5c5
DE
4242
4243 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4244 break;
4245
4246 default:
4247 gcc_unreachable ();
2bfcf297 4248 }
2bfcf297 4249
af8cb5c5
DE
4250 insn = get_last_insn ();
4251 set = single_set (insn);
4252 if (! CONSTANT_P (SET_SRC (set)))
4253 set_unique_reg_note (insn, REG_EQUAL, source);
4254
4255 return result;
2bfcf297
DB
4256}
4257
4258/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4259 fall back to a straight forward decomposition. We do this to avoid
4260 exponential run times encountered when looking for longer sequences
4261 with rs6000_emit_set_const. */
4262static rtx
a2369ed3 4263rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4264{
4265 if (!TARGET_POWERPC64)
4266 {
4267 rtx operand1, operand2;
4268
4269 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4270 DImode);
d448860e 4271 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4272 DImode);
4273 emit_move_insn (operand1, GEN_INT (c1));
4274 emit_move_insn (operand2, GEN_INT (c2));
4275 }
4276 else
4277 {
bc06712d 4278 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4279
bc06712d 4280 ud1 = c1 & 0xffff;
f921c9c9 4281 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4282#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4283 c2 = c1 >> 32;
2bfcf297 4284#endif
bc06712d 4285 ud3 = c2 & 0xffff;
f921c9c9 4286 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4287
f676971a 4288 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4289 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4290 {
bc06712d 4291 if (ud1 & 0x8000)
b78d48dd 4292 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4293 else
4294 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4295 }
2bfcf297 4296
f676971a 4297 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4298 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4299 {
bc06712d 4300 if (ud2 & 0x8000)
f676971a 4301 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4302 - 0x80000000));
252b88f7 4303 else
bc06712d
TR
4304 emit_move_insn (dest, GEN_INT (ud2 << 16));
4305 if (ud1 != 0)
d448860e
JH
4306 emit_move_insn (copy_rtx (dest),
4307 gen_rtx_IOR (DImode, copy_rtx (dest),
4308 GEN_INT (ud1)));
252b88f7 4309 }
f676971a 4310 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4311 || (ud4 == 0 && ! (ud3 & 0x8000)))
4312 {
4313 if (ud3 & 0x8000)
f676971a 4314 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4315 - 0x80000000));
4316 else
4317 emit_move_insn (dest, GEN_INT (ud3 << 16));
4318
4319 if (ud2 != 0)
d448860e
JH
4320 emit_move_insn (copy_rtx (dest),
4321 gen_rtx_IOR (DImode, copy_rtx (dest),
4322 GEN_INT (ud2)));
4323 emit_move_insn (copy_rtx (dest),
4324 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4325 GEN_INT (16)));
bc06712d 4326 if (ud1 != 0)
d448860e
JH
4327 emit_move_insn (copy_rtx (dest),
4328 gen_rtx_IOR (DImode, copy_rtx (dest),
4329 GEN_INT (ud1)));
bc06712d 4330 }
f676971a 4331 else
bc06712d
TR
4332 {
4333 if (ud4 & 0x8000)
f676971a 4334 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4335 - 0x80000000));
4336 else
4337 emit_move_insn (dest, GEN_INT (ud4 << 16));
4338
4339 if (ud3 != 0)
d448860e
JH
4340 emit_move_insn (copy_rtx (dest),
4341 gen_rtx_IOR (DImode, copy_rtx (dest),
4342 GEN_INT (ud3)));
2bfcf297 4343
d448860e
JH
4344 emit_move_insn (copy_rtx (dest),
4345 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4346 GEN_INT (32)));
bc06712d 4347 if (ud2 != 0)
d448860e
JH
4348 emit_move_insn (copy_rtx (dest),
4349 gen_rtx_IOR (DImode, copy_rtx (dest),
4350 GEN_INT (ud2 << 16)));
bc06712d 4351 if (ud1 != 0)
d448860e
JH
4352 emit_move_insn (copy_rtx (dest),
4353 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4354 }
4355 }
2bfcf297
DB
4356 return dest;
4357}
4358
76d2b81d 4359/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4360 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4361
4362static void
4363rs6000_eliminate_indexed_memrefs (rtx operands[2])
4364{
4365 if (GET_CODE (operands[0]) == MEM
4366 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4367 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4368 && ! reload_in_progress)
4369 operands[0]
4370 = replace_equiv_address (operands[0],
4371 copy_addr_to_reg (XEXP (operands[0], 0)));
4372
4373 if (GET_CODE (operands[1]) == MEM
4374 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4375 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4376 && ! reload_in_progress)
4377 operands[1]
4378 = replace_equiv_address (operands[1],
4379 copy_addr_to_reg (XEXP (operands[1], 0)));
4380}
4381
fb4d4348
GK
4382/* Emit a move from SOURCE to DEST in mode MODE. */
4383void
a2369ed3 4384rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4385{
4386 rtx operands[2];
4387 operands[0] = dest;
4388 operands[1] = source;
f676971a 4389
fb4d4348
GK
4390 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4391 if (GET_CODE (operands[1]) == CONST_DOUBLE
4392 && ! FLOAT_MODE_P (mode)
4393 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4394 {
4395 /* FIXME. This should never happen. */
4396 /* Since it seems that it does, do the safe thing and convert
4397 to a CONST_INT. */
2496c7bd 4398 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4399 }
37409796
NS
4400 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4401 || FLOAT_MODE_P (mode)
4402 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4403 || CONST_DOUBLE_LOW (operands[1]) < 0)
4404 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4405 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4406
c9e8cb32
DD
4407 /* Check if GCC is setting up a block move that will end up using FP
4408 registers as temporaries. We must make sure this is acceptable. */
4409 if (GET_CODE (operands[0]) == MEM
4410 && GET_CODE (operands[1]) == MEM
4411 && mode == DImode
41543739
GK
4412 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4413 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4414 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4415 ? 32 : MEM_ALIGN (operands[0])))
4416 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4417 ? 32
41543739
GK
4418 : MEM_ALIGN (operands[1]))))
4419 && ! MEM_VOLATILE_P (operands [0])
4420 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4421 {
41543739
GK
4422 emit_move_insn (adjust_address (operands[0], SImode, 0),
4423 adjust_address (operands[1], SImode, 0));
d448860e
JH
4424 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4425 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4426 return;
4427 }
630d42a0 4428
b3a13419 4429 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4430 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4431 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4432
a3170dc6
AH
4433 if (mode == SFmode && ! TARGET_POWERPC
4434 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4435 && GET_CODE (operands[0]) == MEM)
fb4d4348 4436 {
ffc14f31
GK
4437 int regnum;
4438
4439 if (reload_in_progress || reload_completed)
4440 regnum = true_regnum (operands[1]);
4441 else if (GET_CODE (operands[1]) == REG)
4442 regnum = REGNO (operands[1]);
4443 else
4444 regnum = -1;
f676971a 4445
fb4d4348
GK
4446 /* If operands[1] is a register, on POWER it may have
4447 double-precision data in it, so truncate it to single
4448 precision. */
4449 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4450 {
4451 rtx newreg;
b3a13419 4452 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4453 : gen_reg_rtx (mode));
fb4d4348
GK
4454 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4455 operands[1] = newreg;
4456 }
4457 }
4458
c4501e62
JJ
4459 /* Recognize the case where operand[1] is a reference to thread-local
4460 data and load its address to a register. */
84f52ebd 4461 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4462 {
84f52ebd
RH
4463 enum tls_model model;
4464 rtx tmp = operands[1];
4465 rtx addend = NULL;
4466
4467 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4468 {
4469 addend = XEXP (XEXP (tmp, 0), 1);
4470 tmp = XEXP (XEXP (tmp, 0), 0);
4471 }
4472
4473 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4474 model = SYMBOL_REF_TLS_MODEL (tmp);
4475 gcc_assert (model != 0);
4476
4477 tmp = rs6000_legitimize_tls_address (tmp, model);
4478 if (addend)
4479 {
4480 tmp = gen_rtx_PLUS (mode, tmp, addend);
4481 tmp = force_operand (tmp, operands[0]);
4482 }
4483 operands[1] = tmp;
c4501e62
JJ
4484 }
4485
8f4e6caf
RH
4486 /* Handle the case where reload calls us with an invalid address. */
4487 if (reload_in_progress && mode == Pmode
69ef87e2 4488 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4489 || ! nonimmediate_operand (operands[0], mode)))
4490 goto emit_set;
4491
a9baceb1
GK
4492 /* 128-bit constant floating-point values on Darwin should really be
4493 loaded as two parts. */
8521c414 4494 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4495 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4496 {
4497 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4498 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4499 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4500 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4501 simplify_gen_subreg (imode, operands[1], mode, 0),
4502 imode);
4503 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4504 GET_MODE_SIZE (imode)),
4505 simplify_gen_subreg (imode, operands[1], mode,
4506 GET_MODE_SIZE (imode)),
4507 imode);
a9baceb1
GK
4508 return;
4509 }
4510
fb4d4348
GK
4511 /* FIXME: In the long term, this switch statement should go away
4512 and be replaced by a sequence of tests based on things like
4513 mode == Pmode. */
4514 switch (mode)
4515 {
4516 case HImode:
4517 case QImode:
4518 if (CONSTANT_P (operands[1])
4519 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4520 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4521 break;
4522
06f4e019 4523 case TFmode:
7393f7f8 4524 case TDmode:
76d2b81d
DJ
4525 rs6000_eliminate_indexed_memrefs (operands);
4526 /* fall through */
4527
fb4d4348 4528 case DFmode:
7393f7f8 4529 case DDmode:
fb4d4348 4530 case SFmode:
f676971a 4531 if (CONSTANT_P (operands[1])
fb4d4348 4532 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4533 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4534 break;
f676971a 4535
0ac081f6
AH
4536 case V16QImode:
4537 case V8HImode:
4538 case V4SFmode:
4539 case V4SImode:
a3170dc6
AH
4540 case V4HImode:
4541 case V2SFmode:
4542 case V2SImode:
00a892b8 4543 case V1DImode:
69ef87e2 4544 if (CONSTANT_P (operands[1])
d744e06e 4545 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4546 operands[1] = force_const_mem (mode, operands[1]);
4547 break;
f676971a 4548
fb4d4348 4549 case SImode:
a9098fd0 4550 case DImode:
fb4d4348
GK
4551 /* Use default pattern for address of ELF small data */
4552 if (TARGET_ELF
a9098fd0 4553 && mode == Pmode
f607bc57 4554 && DEFAULT_ABI == ABI_V4
f676971a 4555 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4556 || GET_CODE (operands[1]) == CONST)
4557 && small_data_operand (operands[1], mode))
fb4d4348
GK
4558 {
4559 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4560 return;
4561 }
4562
f607bc57 4563 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4564 && mode == Pmode && mode == SImode
4565 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4566 {
4567 emit_insn (gen_movsi_got (operands[0], operands[1]));
4568 return;
4569 }
4570
ee890fe2 4571 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4572 && TARGET_NO_TOC
4573 && ! flag_pic
a9098fd0 4574 && mode == Pmode
fb4d4348
GK
4575 && CONSTANT_P (operands[1])
4576 && GET_CODE (operands[1]) != HIGH
4577 && GET_CODE (operands[1]) != CONST_INT)
4578 {
b3a13419
ILT
4579 rtx target = (!can_create_pseudo_p ()
4580 ? operands[0]
4581 : gen_reg_rtx (mode));
fb4d4348
GK
4582
4583 /* If this is a function address on -mcall-aixdesc,
4584 convert it to the address of the descriptor. */
4585 if (DEFAULT_ABI == ABI_AIX
4586 && GET_CODE (operands[1]) == SYMBOL_REF
4587 && XSTR (operands[1], 0)[0] == '.')
4588 {
4589 const char *name = XSTR (operands[1], 0);
4590 rtx new_ref;
4591 while (*name == '.')
4592 name++;
4593 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4594 CONSTANT_POOL_ADDRESS_P (new_ref)
4595 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4596 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4597 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4598 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4599 operands[1] = new_ref;
4600 }
7509c759 4601
ee890fe2
SS
4602 if (DEFAULT_ABI == ABI_DARWIN)
4603 {
ab82a49f
AP
4604#if TARGET_MACHO
4605 if (MACHO_DYNAMIC_NO_PIC_P)
4606 {
4607 /* Take care of any required data indirection. */
4608 operands[1] = rs6000_machopic_legitimize_pic_address (
4609 operands[1], mode, operands[0]);
4610 if (operands[0] != operands[1])
4611 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4612 operands[0], operands[1]));
ab82a49f
AP
4613 return;
4614 }
4615#endif
b8a55285
AP
4616 emit_insn (gen_macho_high (target, operands[1]));
4617 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4618 return;
4619 }
4620
fb4d4348
GK
4621 emit_insn (gen_elf_high (target, operands[1]));
4622 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4623 return;
4624 }
4625
a9098fd0
GK
4626 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4627 and we have put it in the TOC, we just need to make a TOC-relative
4628 reference to it. */
4629 if (TARGET_TOC
4630 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4631 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4632 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4633 get_pool_mode (operands[1])))
fb4d4348 4634 {
a9098fd0 4635 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4636 }
a9098fd0
GK
4637 else if (mode == Pmode
4638 && CONSTANT_P (operands[1])
38886f37
AO
4639 && ((GET_CODE (operands[1]) != CONST_INT
4640 && ! easy_fp_constant (operands[1], mode))
4641 || (GET_CODE (operands[1]) == CONST_INT
4642 && num_insns_constant (operands[1], mode) > 2)
4643 || (GET_CODE (operands[0]) == REG
4644 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4645 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4646 && ! legitimate_constant_pool_address_p (operands[1])
4647 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4648 {
4649 /* Emit a USE operation so that the constant isn't deleted if
4650 expensive optimizations are turned on because nobody
4651 references it. This should only be done for operands that
4652 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4653 This should not be done for operands that contain LABEL_REFs.
4654 For now, we just handle the obvious case. */
4655 if (GET_CODE (operands[1]) != LABEL_REF)
4656 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4657
c859cda6 4658#if TARGET_MACHO
ee890fe2 4659 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4660 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4661 {
ee890fe2
SS
4662 operands[1] =
4663 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4664 operands[0]);
4665 if (operands[0] != operands[1])
4666 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4667 return;
4668 }
c859cda6 4669#endif
ee890fe2 4670
fb4d4348
GK
4671 /* If we are to limit the number of things we put in the TOC and
4672 this is a symbol plus a constant we can add in one insn,
4673 just put the symbol in the TOC and add the constant. Don't do
4674 this if reload is in progress. */
4675 if (GET_CODE (operands[1]) == CONST
4676 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4677 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4678 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4679 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4680 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4681 && ! side_effects_p (operands[0]))
4682 {
a4f6c312
SS
4683 rtx sym =
4684 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4685 rtx other = XEXP (XEXP (operands[1], 0), 1);
4686
a9098fd0
GK
4687 sym = force_reg (mode, sym);
4688 if (mode == SImode)
4689 emit_insn (gen_addsi3 (operands[0], sym, other));
4690 else
4691 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4692 return;
4693 }
4694
a9098fd0 4695 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4696
f676971a 4697 if (TARGET_TOC
4d588c14 4698 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4699 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4700 get_pool_constant (XEXP (operands[1], 0)),
4701 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4702 {
ba4828e0 4703 operands[1]
542a8afa 4704 = gen_const_mem (mode,
c4ad648e 4705 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4706 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4707 }
fb4d4348
GK
4708 }
4709 break;
a9098fd0 4710
fb4d4348 4711 case TImode:
76d2b81d
DJ
4712 rs6000_eliminate_indexed_memrefs (operands);
4713
27dc0551
DE
4714 if (TARGET_POWER)
4715 {
4716 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4717 gen_rtvec (2,
4718 gen_rtx_SET (VOIDmode,
4719 operands[0], operands[1]),
4720 gen_rtx_CLOBBER (VOIDmode,
4721 gen_rtx_SCRATCH (SImode)))));
4722 return;
4723 }
fb4d4348
GK
4724 break;
4725
4726 default:
37409796 4727 gcc_unreachable ();
fb4d4348
GK
4728 }
4729
a9098fd0
GK
4730 /* Above, we may have called force_const_mem which may have returned
4731 an invalid address. If we can, fix this up; otherwise, reload will
4732 have to deal with it. */
8f4e6caf
RH
4733 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4734 operands[1] = validize_mem (operands[1]);
a9098fd0 4735
8f4e6caf 4736 emit_set:
fb4d4348
GK
4737 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4738}
4697a36c 4739\f
2858f73a
GK
4740/* Nonzero if we can use a floating-point register to pass this arg. */
4741#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4742 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4743 && (MODE) != SDmode \
2858f73a
GK
4744 && (CUM)->fregno <= FP_ARG_MAX_REG \
4745 && TARGET_HARD_FLOAT && TARGET_FPRS)
4746
4747/* Nonzero if we can use an AltiVec register to pass this arg. */
4748#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4749 (ALTIVEC_VECTOR_MODE (MODE) \
4750 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4751 && TARGET_ALTIVEC_ABI \
83953138 4752 && (NAMED))
2858f73a 4753
c6e8c921
GK
4754/* Return a nonzero value to say to return the function value in
4755 memory, just as large structures are always returned. TYPE will be
4756 the data type of the value, and FNTYPE will be the type of the
4757 function doing the returning, or @code{NULL} for libcalls.
4758
4759 The AIX ABI for the RS/6000 specifies that all structures are
4760 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4761 specifies that structures <= 8 bytes are returned in r3/r4, but a
4762 draft put them in memory, and GCC used to implement the draft
df01da37 4763 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4764 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4765 compatibility can change DRAFT_V4_STRUCT_RET to override the
4766 default, and -m switches get the final word. See
4767 rs6000_override_options for more details.
4768
4769 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4770 long double support is enabled. These values are returned in memory.
4771
4772 int_size_in_bytes returns -1 for variable size objects, which go in
4773 memory always. The cast to unsigned makes -1 > 8. */
4774
4775static bool
586de218 4776rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 4777{
594a51fe
SS
4778 /* In the darwin64 abi, try to use registers for larger structs
4779 if possible. */
0b5383eb 4780 if (rs6000_darwin64_abi
594a51fe 4781 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4782 && int_size_in_bytes (type) > 0)
4783 {
4784 CUMULATIVE_ARGS valcum;
4785 rtx valret;
4786
4787 valcum.words = 0;
4788 valcum.fregno = FP_ARG_MIN_REG;
4789 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4790 /* Do a trial code generation as if this were going to be passed
4791 as an argument; if any part goes in memory, we return NULL. */
4792 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4793 if (valret)
4794 return false;
4795 /* Otherwise fall through to more conventional ABI rules. */
4796 }
594a51fe 4797
c6e8c921 4798 if (AGGREGATE_TYPE_P (type)
df01da37 4799 && (aix_struct_return
c6e8c921
GK
4800 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4801 return true;
b693336b 4802
bada2eb8
DE
4803 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4804 modes only exist for GCC vector types if -maltivec. */
4805 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4806 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4807 return false;
4808
b693336b
PB
4809 /* Return synthetic vectors in memory. */
4810 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4811 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4812 {
4813 static bool warned_for_return_big_vectors = false;
4814 if (!warned_for_return_big_vectors)
4815 {
d4ee4d25 4816 warning (0, "GCC vector returned by reference: "
b693336b
PB
4817 "non-standard ABI extension with no compatibility guarantee");
4818 warned_for_return_big_vectors = true;
4819 }
4820 return true;
4821 }
4822
602ea4d3 4823 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 4824 return true;
ad630bef 4825
c6e8c921
GK
4826 return false;
4827}
4828
4697a36c
MM
4829/* Initialize a variable CUM of type CUMULATIVE_ARGS
4830 for a call to a function whose data type is FNTYPE.
4831 For a library call, FNTYPE is 0.
4832
4833 For incoming args we set the number of arguments in the prototype large
1c20ae99 4834 so we never return a PARALLEL. */
4697a36c
MM
4835
4836void
f676971a 4837init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4838 rtx libname ATTRIBUTE_UNUSED, int incoming,
4839 int libcall, int n_named_args)
4697a36c
MM
4840{
4841 static CUMULATIVE_ARGS zero_cumulative;
4842
4843 *cum = zero_cumulative;
4844 cum->words = 0;
4845 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4846 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4847 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4848 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4849 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4850 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4851 cum->stdarg = fntype
4852 && (TYPE_ARG_TYPES (fntype) != 0
4853 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4854 != void_type_node));
4697a36c 4855
0f6937fe
AM
4856 cum->nargs_prototype = 0;
4857 if (incoming || cum->prototype)
4858 cum->nargs_prototype = n_named_args;
4697a36c 4859
a5c76ee6 4860 /* Check for a longcall attribute. */
3eb4e360
AM
4861 if ((!fntype && rs6000_default_long_calls)
4862 || (fntype
4863 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4864 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4865 cum->call_cookie |= CALL_LONG;
6a4cee5f 4866
4697a36c
MM
4867 if (TARGET_DEBUG_ARG)
4868 {
4869 fprintf (stderr, "\ninit_cumulative_args:");
4870 if (fntype)
4871 {
4872 tree ret_type = TREE_TYPE (fntype);
4873 fprintf (stderr, " ret code = %s,",
4874 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4875 }
4876
6a4cee5f
MM
4877 if (cum->call_cookie & CALL_LONG)
4878 fprintf (stderr, " longcall,");
4879
4697a36c
MM
4880 fprintf (stderr, " proto = %d, nargs = %d\n",
4881 cum->prototype, cum->nargs_prototype);
4882 }
f676971a 4883
c4ad648e
AM
4884 if (fntype
4885 && !TARGET_ALTIVEC
4886 && TARGET_ALTIVEC_ABI
4887 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4888 {
c85ce869 4889 error ("cannot return value in vector register because"
c4ad648e 4890 " altivec instructions are disabled, use -maltivec"
c85ce869 4891 " to enable them");
c4ad648e 4892 }
4697a36c
MM
4893}
4894\f
fe984136
RH
4895/* Return true if TYPE must be passed on the stack and not in registers. */
4896
4897static bool
586de218 4898rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
4899{
4900 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4901 return must_pass_in_stack_var_size (mode, type);
4902 else
4903 return must_pass_in_stack_var_size_or_pad (mode, type);
4904}
4905
c229cba9
DE
4906/* If defined, a C expression which determines whether, and in which
4907 direction, to pad out an argument with extra space. The value
4908 should be of type `enum direction': either `upward' to pad above
4909 the argument, `downward' to pad below, or `none' to inhibit
4910 padding.
4911
4912 For the AIX ABI structs are always stored left shifted in their
4913 argument slot. */
4914
9ebbca7d 4915enum direction
586de218 4916function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 4917{
6e985040
AM
4918#ifndef AGGREGATE_PADDING_FIXED
4919#define AGGREGATE_PADDING_FIXED 0
4920#endif
4921#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4922#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4923#endif
4924
4925 if (!AGGREGATE_PADDING_FIXED)
4926 {
4927 /* GCC used to pass structures of the same size as integer types as
4928 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4929 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4930 passed padded downward, except that -mstrict-align further
4931 muddied the water in that multi-component structures of 2 and 4
4932 bytes in size were passed padded upward.
4933
4934 The following arranges for best compatibility with previous
4935 versions of gcc, but removes the -mstrict-align dependency. */
4936 if (BYTES_BIG_ENDIAN)
4937 {
4938 HOST_WIDE_INT size = 0;
4939
4940 if (mode == BLKmode)
4941 {
4942 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4943 size = int_size_in_bytes (type);
4944 }
4945 else
4946 size = GET_MODE_SIZE (mode);
4947
4948 if (size == 1 || size == 2 || size == 4)
4949 return downward;
4950 }
4951 return upward;
4952 }
4953
4954 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4955 {
4956 if (type != 0 && AGGREGATE_TYPE_P (type))
4957 return upward;
4958 }
c229cba9 4959
d3704c46
KH
4960 /* Fall back to the default. */
4961 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
4962}
4963
b6c9286a 4964/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 4965 of an argument with the specified mode and type. If it is not defined,
b6c9286a 4966 PARM_BOUNDARY is used for all arguments.
f676971a 4967
84e9ad15
AM
4968 V.4 wants long longs and doubles to be double word aligned. Just
4969 testing the mode size is a boneheaded way to do this as it means
4970 that other types such as complex int are also double word aligned.
4971 However, we're stuck with this because changing the ABI might break
4972 existing library interfaces.
4973
b693336b
PB
4974 Doubleword align SPE vectors.
4975 Quadword align Altivec vectors.
4976 Quadword align large synthetic vector types. */
b6c9286a
MM
4977
4978int
b693336b 4979function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 4980{
84e9ad15
AM
4981 if (DEFAULT_ABI == ABI_V4
4982 && (GET_MODE_SIZE (mode) == 8
4983 || (TARGET_HARD_FLOAT
4984 && TARGET_FPRS
7393f7f8 4985 && (mode == TFmode || mode == TDmode))))
4ed78545 4986 return 64;
ad630bef
DE
4987 else if (SPE_VECTOR_MODE (mode)
4988 || (type && TREE_CODE (type) == VECTOR_TYPE
4989 && int_size_in_bytes (type) >= 8
4990 && int_size_in_bytes (type) < 16))
e1f83b4d 4991 return 64;
ad630bef
DE
4992 else if (ALTIVEC_VECTOR_MODE (mode)
4993 || (type && TREE_CODE (type) == VECTOR_TYPE
4994 && int_size_in_bytes (type) >= 16))
0ac081f6 4995 return 128;
0b5383eb
DJ
4996 else if (rs6000_darwin64_abi && mode == BLKmode
4997 && type && TYPE_ALIGN (type) > 64)
4998 return 128;
9ebbca7d 4999 else
b6c9286a 5000 return PARM_BOUNDARY;
b6c9286a 5001}
c53bdcf5 5002
294bd182
AM
5003/* For a function parm of MODE and TYPE, return the starting word in
5004 the parameter area. NWORDS of the parameter area are already used. */
5005
5006static unsigned int
5007rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5008{
5009 unsigned int align;
5010 unsigned int parm_offset;
5011
5012 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5013 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5014 return nwords + (-(parm_offset + nwords) & align);
5015}
5016
c53bdcf5
AM
5017/* Compute the size (in words) of a function argument. */
5018
5019static unsigned long
5020rs6000_arg_size (enum machine_mode mode, tree type)
5021{
5022 unsigned long size;
5023
5024 if (mode != BLKmode)
5025 size = GET_MODE_SIZE (mode);
5026 else
5027 size = int_size_in_bytes (type);
5028
5029 if (TARGET_32BIT)
5030 return (size + 3) >> 2;
5031 else
5032 return (size + 7) >> 3;
5033}
b6c9286a 5034\f
0b5383eb 5035/* Use this to flush pending int fields. */
594a51fe
SS
5036
5037static void
0b5383eb
DJ
5038rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5039 HOST_WIDE_INT bitpos)
594a51fe 5040{
0b5383eb
DJ
5041 unsigned int startbit, endbit;
5042 int intregs, intoffset;
5043 enum machine_mode mode;
594a51fe 5044
0b5383eb
DJ
5045 if (cum->intoffset == -1)
5046 return;
594a51fe 5047
0b5383eb
DJ
5048 intoffset = cum->intoffset;
5049 cum->intoffset = -1;
5050
5051 if (intoffset % BITS_PER_WORD != 0)
5052 {
5053 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5054 MODE_INT, 0);
5055 if (mode == BLKmode)
594a51fe 5056 {
0b5383eb
DJ
5057 /* We couldn't find an appropriate mode, which happens,
5058 e.g., in packed structs when there are 3 bytes to load.
5059 Back intoffset back to the beginning of the word in this
5060 case. */
5061 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5062 }
594a51fe 5063 }
0b5383eb
DJ
5064
5065 startbit = intoffset & -BITS_PER_WORD;
5066 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5067 intregs = (endbit - startbit) / BITS_PER_WORD;
5068 cum->words += intregs;
5069}
5070
5071/* The darwin64 ABI calls for us to recurse down through structs,
5072 looking for elements passed in registers. Unfortunately, we have
5073 to track int register count here also because of misalignments
5074 in powerpc alignment mode. */
5075
5076static void
5077rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5078 tree type,
5079 HOST_WIDE_INT startbitpos)
5080{
5081 tree f;
5082
5083 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5084 if (TREE_CODE (f) == FIELD_DECL)
5085 {
5086 HOST_WIDE_INT bitpos = startbitpos;
5087 tree ftype = TREE_TYPE (f);
70fb00df
AP
5088 enum machine_mode mode;
5089 if (ftype == error_mark_node)
5090 continue;
5091 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5092
5093 if (DECL_SIZE (f) != 0
5094 && host_integerp (bit_position (f), 1))
5095 bitpos += int_bit_position (f);
5096
5097 /* ??? FIXME: else assume zero offset. */
5098
5099 if (TREE_CODE (ftype) == RECORD_TYPE)
5100 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5101 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5102 {
5103 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5104 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5105 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5106 }
5107 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5108 {
5109 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5110 cum->vregno++;
5111 cum->words += 2;
5112 }
5113 else if (cum->intoffset == -1)
5114 cum->intoffset = bitpos;
5115 }
594a51fe
SS
5116}
5117
4697a36c
MM
5118/* Update the data in CUM to advance over an argument
5119 of mode MODE and data type TYPE.
b2d04ecf
AM
5120 (TYPE is null for libcalls where that information may not be available.)
5121
5122 Note that for args passed by reference, function_arg will be called
5123 with MODE and TYPE set to that of the pointer to the arg, not the arg
5124 itself. */
4697a36c
MM
5125
5126void
f676971a 5127function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5128 tree type, int named, int depth)
4697a36c 5129{
0b5383eb
DJ
5130 int size;
5131
594a51fe
SS
5132 /* Only tick off an argument if we're not recursing. */
5133 if (depth == 0)
5134 cum->nargs_prototype--;
4697a36c 5135
ad630bef
DE
5136 if (TARGET_ALTIVEC_ABI
5137 && (ALTIVEC_VECTOR_MODE (mode)
5138 || (type && TREE_CODE (type) == VECTOR_TYPE
5139 && int_size_in_bytes (type) == 16)))
0ac081f6 5140 {
4ed78545
AM
5141 bool stack = false;
5142
2858f73a 5143 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5144 {
6d0ef01e
HP
5145 cum->vregno++;
5146 if (!TARGET_ALTIVEC)
c85ce869 5147 error ("cannot pass argument in vector register because"
6d0ef01e 5148 " altivec instructions are disabled, use -maltivec"
c85ce869 5149 " to enable them");
4ed78545
AM
5150
5151 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5152 even if it is going to be passed in a vector register.
4ed78545
AM
5153 Darwin does the same for variable-argument functions. */
5154 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5155 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5156 stack = true;
6d0ef01e 5157 }
4ed78545
AM
5158 else
5159 stack = true;
5160
5161 if (stack)
c4ad648e 5162 {
a594a19c 5163 int align;
f676971a 5164
2858f73a
GK
5165 /* Vector parameters must be 16-byte aligned. This places
5166 them at 2 mod 4 in terms of words in 32-bit mode, since
5167 the parameter save area starts at offset 24 from the
5168 stack. In 64-bit mode, they just have to start on an
5169 even word, since the parameter save area is 16-byte
5170 aligned. Space for GPRs is reserved even if the argument
5171 will be passed in memory. */
5172 if (TARGET_32BIT)
4ed78545 5173 align = (2 - cum->words) & 3;
2858f73a
GK
5174 else
5175 align = cum->words & 1;
c53bdcf5 5176 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5177
a594a19c
GK
5178 if (TARGET_DEBUG_ARG)
5179 {
f676971a 5180 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5181 cum->words, align);
5182 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5183 cum->nargs_prototype, cum->prototype,
2858f73a 5184 GET_MODE_NAME (mode));
a594a19c
GK
5185 }
5186 }
0ac081f6 5187 }
a4b0320c 5188 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5189 && !cum->stdarg
5190 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5191 cum->sysv_gregno++;
594a51fe
SS
5192
5193 else if (rs6000_darwin64_abi
5194 && mode == BLKmode
0b5383eb
DJ
5195 && TREE_CODE (type) == RECORD_TYPE
5196 && (size = int_size_in_bytes (type)) > 0)
5197 {
5198 /* Variable sized types have size == -1 and are
5199 treated as if consisting entirely of ints.
5200 Pad to 16 byte boundary if needed. */
5201 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5202 && (cum->words % 2) != 0)
5203 cum->words++;
5204 /* For varargs, we can just go up by the size of the struct. */
5205 if (!named)
5206 cum->words += (size + 7) / 8;
5207 else
5208 {
5209 /* It is tempting to say int register count just goes up by
5210 sizeof(type)/8, but this is wrong in a case such as
5211 { int; double; int; } [powerpc alignment]. We have to
5212 grovel through the fields for these too. */
5213 cum->intoffset = 0;
5214 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5215 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5216 size * BITS_PER_UNIT);
5217 }
5218 }
f607bc57 5219 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5220 {
a3170dc6 5221 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5222 && (mode == SFmode || mode == DFmode
7393f7f8 5223 || mode == DDmode || mode == TDmode
602ea4d3 5224 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5225 {
2d83f070
JJ
5226 /* _Decimal128 must use an even/odd register pair. This assumes
5227 that the register number is odd when fregno is odd. */
5228 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5229 cum->fregno++;
5230
5231 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5232 <= FP_ARG_V4_MAX_REG)
602ea4d3 5233 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5234 else
5235 {
602ea4d3 5236 cum->fregno = FP_ARG_V4_MAX_REG + 1;
7393f7f8 5237 if (mode == DFmode || mode == TFmode || mode == DDmode || mode == TDmode)
c4ad648e 5238 cum->words += cum->words & 1;
c53bdcf5 5239 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5240 }
4697a36c 5241 }
4cc833b7
RH
5242 else
5243 {
b2d04ecf 5244 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5245 int gregno = cum->sysv_gregno;
5246
4ed78545
AM
5247 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5248 (r7,r8) or (r9,r10). As does any other 2 word item such
5249 as complex int due to a historical mistake. */
5250 if (n_words == 2)
5251 gregno += (1 - gregno) & 1;
4cc833b7 5252
4ed78545 5253 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5254 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5255 {
4ed78545
AM
5256 /* Long long and SPE vectors are aligned on the stack.
5257 So are other 2 word items such as complex int due to
5258 a historical mistake. */
4cc833b7
RH
5259 if (n_words == 2)
5260 cum->words += cum->words & 1;
5261 cum->words += n_words;
5262 }
4697a36c 5263
4cc833b7
RH
5264 /* Note: continuing to accumulate gregno past when we've started
5265 spilling to the stack indicates the fact that we've started
5266 spilling to the stack to expand_builtin_saveregs. */
5267 cum->sysv_gregno = gregno + n_words;
5268 }
4697a36c 5269
4cc833b7
RH
5270 if (TARGET_DEBUG_ARG)
5271 {
5272 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5273 cum->words, cum->fregno);
5274 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5275 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5276 fprintf (stderr, "mode = %4s, named = %d\n",
5277 GET_MODE_NAME (mode), named);
5278 }
4697a36c
MM
5279 }
5280 else
4cc833b7 5281 {
b2d04ecf 5282 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5283 int start_words = cum->words;
5284 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5285
294bd182 5286 cum->words = align_words + n_words;
4697a36c 5287
ebb109ad 5288 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5289 && mode != SDmode
a3170dc6 5290 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5291 {
5292 /* _Decimal128 must be passed in an even/odd float register pair.
5293 This assumes that the register number is odd when fregno is
5294 odd. */
5295 if (mode == TDmode && (cum->fregno % 2) == 1)
5296 cum->fregno++;
5297 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5298 }
4cc833b7
RH
5299
5300 if (TARGET_DEBUG_ARG)
5301 {
5302 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5303 cum->words, cum->fregno);
5304 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5305 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5306 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5307 named, align_words - start_words, depth);
4cc833b7
RH
5308 }
5309 }
4697a36c 5310}
a6c9bed4 5311
f82f556d
AH
5312static rtx
5313spe_build_register_parallel (enum machine_mode mode, int gregno)
5314{
17caeff2 5315 rtx r1, r3, r5, r7;
f82f556d 5316
37409796 5317 switch (mode)
f82f556d 5318 {
37409796 5319 case DFmode:
54b695e7
AH
5320 r1 = gen_rtx_REG (DImode, gregno);
5321 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5322 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5323
5324 case DCmode:
17caeff2 5325 case TFmode:
54b695e7
AH
5326 r1 = gen_rtx_REG (DImode, gregno);
5327 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5328 r3 = gen_rtx_REG (DImode, gregno + 2);
5329 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5330 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5331
17caeff2
JM
5332 case TCmode:
5333 r1 = gen_rtx_REG (DImode, gregno);
5334 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5335 r3 = gen_rtx_REG (DImode, gregno + 2);
5336 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5337 r5 = gen_rtx_REG (DImode, gregno + 4);
5338 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5339 r7 = gen_rtx_REG (DImode, gregno + 6);
5340 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5341 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5342
37409796
NS
5343 default:
5344 gcc_unreachable ();
f82f556d 5345 }
f82f556d 5346}
b78d48dd 5347
f82f556d 5348/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5349static rtx
f676971a 5350rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5351 tree type)
a6c9bed4 5352{
f82f556d
AH
5353 int gregno = cum->sysv_gregno;
5354
5355 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5356 are passed and returned in a pair of GPRs for ABI compatibility. */
17caeff2
JM
5357 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode
5358 || mode == TFmode || mode == TCmode))
f82f556d 5359 {
b5870bee
AH
5360 int n_words = rs6000_arg_size (mode, type);
5361
f82f556d 5362 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
5363 if (mode == DFmode)
5364 gregno += (1 - gregno) & 1;
f82f556d 5365
b5870bee
AH
5366 /* Multi-reg args are not split between registers and stack. */
5367 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5368 return NULL_RTX;
5369
5370 return spe_build_register_parallel (mode, gregno);
5371 }
a6c9bed4
AH
5372 if (cum->stdarg)
5373 {
c53bdcf5 5374 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5375
5376 /* SPE vectors are put in odd registers. */
5377 if (n_words == 2 && (gregno & 1) == 0)
5378 gregno += 1;
5379
5380 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5381 {
5382 rtx r1, r2;
5383 enum machine_mode m = SImode;
5384
5385 r1 = gen_rtx_REG (m, gregno);
5386 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5387 r2 = gen_rtx_REG (m, gregno + 1);
5388 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5389 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5390 }
5391 else
b78d48dd 5392 return NULL_RTX;
a6c9bed4
AH
5393 }
5394 else
5395 {
f82f556d
AH
5396 if (gregno <= GP_ARG_MAX_REG)
5397 return gen_rtx_REG (mode, gregno);
a6c9bed4 5398 else
b78d48dd 5399 return NULL_RTX;
a6c9bed4
AH
5400 }
5401}
5402
0b5383eb
DJ
5403/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5404 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5405
0b5383eb 5406static void
bb8df8a6 5407rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5408 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5409{
0b5383eb
DJ
5410 enum machine_mode mode;
5411 unsigned int regno;
5412 unsigned int startbit, endbit;
5413 int this_regno, intregs, intoffset;
5414 rtx reg;
594a51fe 5415
0b5383eb
DJ
5416 if (cum->intoffset == -1)
5417 return;
5418
5419 intoffset = cum->intoffset;
5420 cum->intoffset = -1;
5421
5422 /* If this is the trailing part of a word, try to only load that
5423 much into the register. Otherwise load the whole register. Note
5424 that in the latter case we may pick up unwanted bits. It's not a
5425 problem at the moment but may wish to revisit. */
5426
5427 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5428 {
0b5383eb
DJ
5429 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5430 MODE_INT, 0);
5431 if (mode == BLKmode)
5432 {
5433 /* We couldn't find an appropriate mode, which happens,
5434 e.g., in packed structs when there are 3 bytes to load.
5435 Back intoffset back to the beginning of the word in this
5436 case. */
5437 intoffset = intoffset & -BITS_PER_WORD;
5438 mode = word_mode;
5439 }
5440 }
5441 else
5442 mode = word_mode;
5443
5444 startbit = intoffset & -BITS_PER_WORD;
5445 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5446 intregs = (endbit - startbit) / BITS_PER_WORD;
5447 this_regno = cum->words + intoffset / BITS_PER_WORD;
5448
5449 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5450 cum->use_stack = 1;
bb8df8a6 5451
0b5383eb
DJ
5452 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5453 if (intregs <= 0)
5454 return;
5455
5456 intoffset /= BITS_PER_UNIT;
5457 do
5458 {
5459 regno = GP_ARG_MIN_REG + this_regno;
5460 reg = gen_rtx_REG (mode, regno);
5461 rvec[(*k)++] =
5462 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5463
5464 this_regno += 1;
5465 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5466 mode = word_mode;
5467 intregs -= 1;
5468 }
5469 while (intregs > 0);
5470}
5471
5472/* Recursive workhorse for the following. */
5473
5474static void
586de218 5475rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5476 HOST_WIDE_INT startbitpos, rtx rvec[],
5477 int *k)
5478{
5479 tree f;
5480
5481 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5482 if (TREE_CODE (f) == FIELD_DECL)
5483 {
5484 HOST_WIDE_INT bitpos = startbitpos;
5485 tree ftype = TREE_TYPE (f);
70fb00df
AP
5486 enum machine_mode mode;
5487 if (ftype == error_mark_node)
5488 continue;
5489 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5490
5491 if (DECL_SIZE (f) != 0
5492 && host_integerp (bit_position (f), 1))
5493 bitpos += int_bit_position (f);
5494
5495 /* ??? FIXME: else assume zero offset. */
5496
5497 if (TREE_CODE (ftype) == RECORD_TYPE)
5498 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5499 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5500 {
0b5383eb
DJ
5501#if 0
5502 switch (mode)
594a51fe 5503 {
0b5383eb
DJ
5504 case SCmode: mode = SFmode; break;
5505 case DCmode: mode = DFmode; break;
5506 case TCmode: mode = TFmode; break;
5507 default: break;
594a51fe 5508 }
0b5383eb
DJ
5509#endif
5510 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5511 rvec[(*k)++]
bb8df8a6 5512 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5513 gen_rtx_REG (mode, cum->fregno++),
5514 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5515 if (mode == TFmode || mode == TDmode)
0b5383eb 5516 cum->fregno++;
594a51fe 5517 }
0b5383eb
DJ
5518 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5519 {
5520 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5521 rvec[(*k)++]
bb8df8a6
EC
5522 = gen_rtx_EXPR_LIST (VOIDmode,
5523 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5524 GEN_INT (bitpos / BITS_PER_UNIT));
5525 }
5526 else if (cum->intoffset == -1)
5527 cum->intoffset = bitpos;
5528 }
5529}
594a51fe 5530
0b5383eb
DJ
5531/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5532 the register(s) to be used for each field and subfield of a struct
5533 being passed by value, along with the offset of where the
5534 register's value may be found in the block. FP fields go in FP
5535 register, vector fields go in vector registers, and everything
bb8df8a6 5536 else goes in int registers, packed as in memory.
8ff40a74 5537
0b5383eb
DJ
5538 This code is also used for function return values. RETVAL indicates
5539 whether this is the case.
8ff40a74 5540
a4d05547 5541 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5542 calling convention. */
594a51fe 5543
0b5383eb 5544static rtx
586de218 5545rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5546 int named, bool retval)
5547{
5548 rtx rvec[FIRST_PSEUDO_REGISTER];
5549 int k = 1, kbase = 1;
5550 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5551 /* This is a copy; modifications are not visible to our caller. */
5552 CUMULATIVE_ARGS copy_cum = *orig_cum;
5553 CUMULATIVE_ARGS *cum = &copy_cum;
5554
5555 /* Pad to 16 byte boundary if needed. */
5556 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5557 && (cum->words % 2) != 0)
5558 cum->words++;
5559
5560 cum->intoffset = 0;
5561 cum->use_stack = 0;
5562 cum->named = named;
5563
5564 /* Put entries into rvec[] for individual FP and vector fields, and
5565 for the chunks of memory that go in int regs. Note we start at
5566 element 1; 0 is reserved for an indication of using memory, and
5567 may or may not be filled in below. */
5568 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5569 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5570
5571 /* If any part of the struct went on the stack put all of it there.
5572 This hack is because the generic code for
5573 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5574 parts of the struct are not at the beginning. */
5575 if (cum->use_stack)
5576 {
5577 if (retval)
5578 return NULL_RTX; /* doesn't go in registers at all */
5579 kbase = 0;
5580 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5581 }
5582 if (k > 1 || cum->use_stack)
5583 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5584 else
5585 return NULL_RTX;
5586}
5587
b78d48dd
FJ
5588/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5589
5590static rtx
ec6376ab 5591rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5592{
ec6376ab
AM
5593 int n_units;
5594 int i, k;
5595 rtx rvec[GP_ARG_NUM_REG + 1];
5596
5597 if (align_words >= GP_ARG_NUM_REG)
5598 return NULL_RTX;
5599
5600 n_units = rs6000_arg_size (mode, type);
5601
5602 /* Optimize the simple case where the arg fits in one gpr, except in
5603 the case of BLKmode due to assign_parms assuming that registers are
5604 BITS_PER_WORD wide. */
5605 if (n_units == 0
5606 || (n_units == 1 && mode != BLKmode))
5607 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5608
5609 k = 0;
5610 if (align_words + n_units > GP_ARG_NUM_REG)
5611 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5612 using a magic NULL_RTX component.
79773478
AM
5613 This is not strictly correct. Only some of the arg belongs in
5614 memory, not all of it. However, the normal scheme using
5615 function_arg_partial_nregs can result in unusual subregs, eg.
5616 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5617 store the whole arg to memory is often more efficient than code
5618 to store pieces, and we know that space is available in the right
5619 place for the whole arg. */
ec6376ab
AM
5620 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5621
5622 i = 0;
5623 do
36a454e1 5624 {
ec6376ab
AM
5625 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5626 rtx off = GEN_INT (i++ * 4);
5627 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5628 }
ec6376ab
AM
5629 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5630
5631 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5632}
5633
4697a36c
MM
5634/* Determine where to put an argument to a function.
5635 Value is zero to push the argument on the stack,
5636 or a hard register in which to store the argument.
5637
5638 MODE is the argument's machine mode.
5639 TYPE is the data type of the argument (as a tree).
5640 This is null for libcalls where that information may
5641 not be available.
5642 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5643 the preceding args and about the function being called. It is
5644 not modified in this routine.
4697a36c
MM
5645 NAMED is nonzero if this argument is a named parameter
5646 (otherwise it is an extra parameter matching an ellipsis).
5647
5648 On RS/6000 the first eight words of non-FP are normally in registers
5649 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5650 Under V.4, the first 8 FP args are in registers.
5651
5652 If this is floating-point and no prototype is specified, we use
5653 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5654 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5655 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5656 doesn't support PARALLEL anyway.
5657
5658 Note that for args passed by reference, function_arg will be called
5659 with MODE and TYPE set to that of the pointer to the arg, not the arg
5660 itself. */
4697a36c 5661
9390387d 5662rtx
f676971a 5663function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5664 tree type, int named)
4697a36c 5665{
4cc833b7 5666 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5667
a4f6c312
SS
5668 /* Return a marker to indicate whether CR1 needs to set or clear the
5669 bit that V.4 uses to say fp args were passed in registers.
5670 Assume that we don't need the marker for software floating point,
5671 or compiler generated library calls. */
4697a36c
MM
5672 if (mode == VOIDmode)
5673 {
f607bc57 5674 if (abi == ABI_V4
b9599e46 5675 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5676 && (cum->stdarg
5677 || (cum->nargs_prototype < 0
5678 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5679 {
a3170dc6
AH
5680 /* For the SPE, we need to crxor CR6 always. */
5681 if (TARGET_SPE_ABI)
5682 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5683 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5684 return GEN_INT (cum->call_cookie
5685 | ((cum->fregno == FP_ARG_MIN_REG)
5686 ? CALL_V4_SET_FP_ARGS
5687 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5688 }
4697a36c 5689
7509c759 5690 return GEN_INT (cum->call_cookie);
4697a36c
MM
5691 }
5692
0b5383eb
DJ
5693 if (rs6000_darwin64_abi && mode == BLKmode
5694 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5695 {
0b5383eb 5696 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5697 if (rslt != NULL_RTX)
5698 return rslt;
5699 /* Else fall through to usual handling. */
5700 }
5701
2858f73a 5702 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5703 if (TARGET_64BIT && ! cum->prototype)
5704 {
c4ad648e
AM
5705 /* Vector parameters get passed in vector register
5706 and also in GPRs or memory, in absence of prototype. */
5707 int align_words;
5708 rtx slot;
5709 align_words = (cum->words + 1) & ~1;
5710
5711 if (align_words >= GP_ARG_NUM_REG)
5712 {
5713 slot = NULL_RTX;
5714 }
5715 else
5716 {
5717 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5718 }
5719 return gen_rtx_PARALLEL (mode,
5720 gen_rtvec (2,
5721 gen_rtx_EXPR_LIST (VOIDmode,
5722 slot, const0_rtx),
5723 gen_rtx_EXPR_LIST (VOIDmode,
5724 gen_rtx_REG (mode, cum->vregno),
5725 const0_rtx)));
c72d6c26
HP
5726 }
5727 else
5728 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5729 else if (TARGET_ALTIVEC_ABI
5730 && (ALTIVEC_VECTOR_MODE (mode)
5731 || (type && TREE_CODE (type) == VECTOR_TYPE
5732 && int_size_in_bytes (type) == 16)))
0ac081f6 5733 {
2858f73a 5734 if (named || abi == ABI_V4)
a594a19c 5735 return NULL_RTX;
0ac081f6 5736 else
a594a19c
GK
5737 {
5738 /* Vector parameters to varargs functions under AIX or Darwin
5739 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5740 int align, align_words, n_words;
5741 enum machine_mode part_mode;
a594a19c
GK
5742
5743 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5744 2 mod 4 in terms of words in 32-bit mode, since the parameter
5745 save area starts at offset 24 from the stack. In 64-bit mode,
5746 they just have to start on an even word, since the parameter
5747 save area is 16-byte aligned. */
5748 if (TARGET_32BIT)
4ed78545 5749 align = (2 - cum->words) & 3;
2858f73a
GK
5750 else
5751 align = cum->words & 1;
a594a19c
GK
5752 align_words = cum->words + align;
5753
5754 /* Out of registers? Memory, then. */
5755 if (align_words >= GP_ARG_NUM_REG)
5756 return NULL_RTX;
ec6376ab
AM
5757
5758 if (TARGET_32BIT && TARGET_POWERPC64)
5759 return rs6000_mixed_function_arg (mode, type, align_words);
5760
2858f73a
GK
5761 /* The vector value goes in GPRs. Only the part of the
5762 value in GPRs is reported here. */
ec6376ab
AM
5763 part_mode = mode;
5764 n_words = rs6000_arg_size (mode, type);
5765 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5766 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5767 is either wholly in GPRs or half in GPRs and half not. */
5768 part_mode = DImode;
ec6376ab
AM
5769
5770 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5771 }
0ac081f6 5772 }
f82f556d
AH
5773 else if (TARGET_SPE_ABI && TARGET_SPE
5774 && (SPE_VECTOR_MODE (mode)
18f63bfa 5775 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5776 || mode == DDmode
17caeff2
JM
5777 || mode == DCmode
5778 || mode == TFmode
7393f7f8 5779 || mode == TDmode
17caeff2 5780 || mode == TCmode))))
a6c9bed4 5781 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5782
f607bc57 5783 else if (abi == ABI_V4)
4697a36c 5784 {
a3170dc6 5785 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5786 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5787 || (mode == TFmode && !TARGET_IEEEQUAD)
5788 || mode == DDmode || mode == TDmode))
4cc833b7 5789 {
2d83f070
JJ
5790 /* _Decimal128 must use an even/odd register pair. This assumes
5791 that the register number is odd when fregno is odd. */
5792 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5793 cum->fregno++;
5794
5795 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5796 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5797 return gen_rtx_REG (mode, cum->fregno);
5798 else
b78d48dd 5799 return NULL_RTX;
4cc833b7
RH
5800 }
5801 else
5802 {
b2d04ecf 5803 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5804 int gregno = cum->sysv_gregno;
5805
4ed78545
AM
5806 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5807 (r7,r8) or (r9,r10). As does any other 2 word item such
5808 as complex int due to a historical mistake. */
5809 if (n_words == 2)
5810 gregno += (1 - gregno) & 1;
4cc833b7 5811
4ed78545 5812 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5813 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5814 return NULL_RTX;
ec6376ab
AM
5815
5816 if (TARGET_32BIT && TARGET_POWERPC64)
5817 return rs6000_mixed_function_arg (mode, type,
5818 gregno - GP_ARG_MIN_REG);
5819 return gen_rtx_REG (mode, gregno);
4cc833b7 5820 }
4697a36c 5821 }
4cc833b7
RH
5822 else
5823 {
294bd182 5824 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5825
2d83f070
JJ
5826 /* _Decimal128 must be passed in an even/odd float register pair.
5827 This assumes that the register number is odd when fregno is odd. */
5828 if (mode == TDmode && (cum->fregno % 2) == 1)
5829 cum->fregno++;
5830
2858f73a 5831 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5832 {
ec6376ab
AM
5833 rtx rvec[GP_ARG_NUM_REG + 1];
5834 rtx r;
5835 int k;
c53bdcf5
AM
5836 bool needs_psave;
5837 enum machine_mode fmode = mode;
c53bdcf5
AM
5838 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5839
5840 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5841 {
c53bdcf5
AM
5842 /* Currently, we only ever need one reg here because complex
5843 doubles are split. */
7393f7f8
BE
5844 gcc_assert (cum->fregno == FP_ARG_MAX_REG
5845 && (fmode == TFmode || fmode == TDmode));
ec6376ab 5846
7393f7f8
BE
5847 /* Long double or _Decimal128 split over regs and memory. */
5848 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 5849 }
c53bdcf5
AM
5850
5851 /* Do we also need to pass this arg in the parameter save
5852 area? */
5853 needs_psave = (type
5854 && (cum->nargs_prototype <= 0
5855 || (DEFAULT_ABI == ABI_AIX
de17c25f 5856 && TARGET_XL_COMPAT
c53bdcf5
AM
5857 && align_words >= GP_ARG_NUM_REG)));
5858
5859 if (!needs_psave && mode == fmode)
ec6376ab 5860 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5861
ec6376ab 5862 k = 0;
c53bdcf5
AM
5863 if (needs_psave)
5864 {
ec6376ab 5865 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5866 This piece must come first, before the fprs. */
c53bdcf5
AM
5867 if (align_words < GP_ARG_NUM_REG)
5868 {
5869 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5870
5871 if (align_words + n_words > GP_ARG_NUM_REG
5872 || (TARGET_32BIT && TARGET_POWERPC64))
5873 {
5874 /* If this is partially on the stack, then we only
5875 include the portion actually in registers here. */
5876 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5877 rtx off;
79773478
AM
5878 int i = 0;
5879 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
5880 /* Not all of the arg fits in gprs. Say that it
5881 goes in memory too, using a magic NULL_RTX
5882 component. Also see comment in
5883 rs6000_mixed_function_arg for why the normal
5884 function_arg_partial_nregs scheme doesn't work
5885 in this case. */
5886 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5887 const0_rtx);
ec6376ab
AM
5888 do
5889 {
5890 r = gen_rtx_REG (rmode,
5891 GP_ARG_MIN_REG + align_words);
2e6c9641 5892 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5893 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5894 }
5895 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5896 }
5897 else
5898 {
5899 /* The whole arg fits in gprs. */
5900 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5901 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5902 }
c53bdcf5 5903 }
ec6376ab
AM
5904 else
5905 /* It's entirely in memory. */
5906 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5907 }
5908
ec6376ab
AM
5909 /* Describe where this piece goes in the fprs. */
5910 r = gen_rtx_REG (fmode, cum->fregno);
5911 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5912
5913 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5914 }
5915 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5916 {
ec6376ab
AM
5917 if (TARGET_32BIT && TARGET_POWERPC64)
5918 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5919
4eeca74f
AM
5920 if (mode == BLKmode)
5921 mode = Pmode;
5922
b2d04ecf
AM
5923 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5924 }
4cc833b7
RH
5925 else
5926 return NULL_RTX;
4697a36c 5927 }
4697a36c
MM
5928}
5929\f
ec6376ab 5930/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5931 the number of bytes passed in registers. For args passed entirely in
5932 registers or entirely in memory, zero. When an arg is described by a
5933 PARALLEL, perhaps using more than one register type, this function
5934 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 5935
78a52f11
RH
5936static int
5937rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5938 tree type, bool named)
4697a36c 5939{
c53bdcf5 5940 int ret = 0;
ec6376ab 5941 int align_words;
c53bdcf5 5942
f607bc57 5943 if (DEFAULT_ABI == ABI_V4)
4697a36c 5944 return 0;
4697a36c 5945
c53bdcf5
AM
5946 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5947 && cum->nargs_prototype >= 0)
5948 return 0;
5949
0b5383eb
DJ
5950 /* In this complicated case we just disable the partial_nregs code. */
5951 if (rs6000_darwin64_abi && mode == BLKmode
5952 && TREE_CODE (type) == RECORD_TYPE
5953 && int_size_in_bytes (type) > 0)
5954 return 0;
5955
294bd182 5956 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 5957
79773478
AM
5958 if (USE_FP_FOR_ARG_P (cum, mode, type))
5959 {
fb63c729
AM
5960 /* If we are passing this arg in the fixed parameter save area
5961 (gprs or memory) as well as fprs, then this function should
79773478
AM
5962 return the number of partial bytes passed in the parameter
5963 save area rather than partial bytes passed in fprs. */
5964 if (type
5965 && (cum->nargs_prototype <= 0
5966 || (DEFAULT_ABI == ABI_AIX
5967 && TARGET_XL_COMPAT
5968 && align_words >= GP_ARG_NUM_REG)))
5969 return 0;
5970 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5971 > FP_ARG_MAX_REG + 1)
ac7e839c 5972 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 5973 else if (cum->nargs_prototype >= 0)
4697a36c
MM
5974 return 0;
5975 }
5976
ec6376ab
AM
5977 if (align_words < GP_ARG_NUM_REG
5978 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 5979 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 5980
c53bdcf5 5981 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 5982 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 5983
c53bdcf5 5984 return ret;
4697a36c
MM
5985}
5986\f
5987/* A C expression that indicates when an argument must be passed by
5988 reference. If nonzero for an argument, a copy of that argument is
5989 made in memory and a pointer to the argument is passed instead of
5990 the argument itself. The pointer is passed in whatever way is
5991 appropriate for passing a pointer to that type.
5992
b2d04ecf
AM
5993 Under V.4, aggregates and long double are passed by reference.
5994
5995 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5996 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
5997
5998 As an extension to all ABIs, variable sized types are passed by
5999 reference. */
4697a36c 6000
8cd5a4e0 6001static bool
f676971a 6002rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6003 enum machine_mode mode, const_tree type,
bada2eb8 6004 bool named ATTRIBUTE_UNUSED)
4697a36c 6005{
602ea4d3 6006 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6007 {
6008 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6009 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6010 return 1;
6011 }
6012
6013 if (!type)
6014 return 0;
4697a36c 6015
bada2eb8
DE
6016 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6017 {
6018 if (TARGET_DEBUG_ARG)
6019 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6020 return 1;
6021 }
6022
6023 if (int_size_in_bytes (type) < 0)
6024 {
6025 if (TARGET_DEBUG_ARG)
6026 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6027 return 1;
6028 }
6029
6030 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6031 modes only exist for GCC vector types if -maltivec. */
6032 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6033 {
6034 if (TARGET_DEBUG_ARG)
6035 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6036 return 1;
6037 }
b693336b
PB
6038
6039 /* Pass synthetic vectors in memory. */
bada2eb8 6040 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6041 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6042 {
6043 static bool warned_for_pass_big_vectors = false;
6044 if (TARGET_DEBUG_ARG)
6045 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6046 if (!warned_for_pass_big_vectors)
6047 {
d4ee4d25 6048 warning (0, "GCC vector passed by reference: "
b693336b
PB
6049 "non-standard ABI extension with no compatibility guarantee");
6050 warned_for_pass_big_vectors = true;
6051 }
6052 return 1;
6053 }
6054
b2d04ecf 6055 return 0;
4697a36c 6056}
5985c7a6
FJ
6057
6058static void
2d9db8eb 6059rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6060{
6061 int i;
6062 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6063
6064 if (nregs == 0)
6065 return;
6066
c4ad648e 6067 for (i = 0; i < nregs; i++)
5985c7a6 6068 {
9390387d 6069 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6070 if (reload_completed)
c4ad648e
AM
6071 {
6072 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6073 tem = NULL_RTX;
6074 else
6075 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6076 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6077 }
5985c7a6
FJ
6078 else
6079 tem = replace_equiv_address (tem, XEXP (tem, 0));
6080
37409796 6081 gcc_assert (tem);
5985c7a6
FJ
6082
6083 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6084 }
6085}
4697a36c
MM
6086\f
6087/* Perform any needed actions needed for a function that is receiving a
f676971a 6088 variable number of arguments.
4697a36c
MM
6089
6090 CUM is as above.
6091
6092 MODE and TYPE are the mode and type of the current parameter.
6093
6094 PRETEND_SIZE is a variable that should be set to the amount of stack
6095 that must be pushed by the prolog to pretend that our caller pushed
6096 it.
6097
6098 Normally, this macro will push all remaining incoming registers on the
6099 stack and set PRETEND_SIZE to the length of the registers pushed. */
6100
c6e8c921 6101static void
f676971a 6102setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6103 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6104 int no_rtl)
4697a36c 6105{
4cc833b7
RH
6106 CUMULATIVE_ARGS next_cum;
6107 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6108 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6109 int first_reg_offset;
6110 alias_set_type set;
4697a36c 6111
f31bf321 6112 /* Skip the last named argument. */
d34c5b80 6113 next_cum = *cum;
594a51fe 6114 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6115
f607bc57 6116 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6117 {
5b667039
JJ
6118 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6119
60e2d0ca 6120 if (! no_rtl)
5b667039
JJ
6121 {
6122 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6123 HOST_WIDE_INT offset = 0;
6124
6125 /* Try to optimize the size of the varargs save area.
6126 The ABI requires that ap.reg_save_area is doubleword
6127 aligned, but we don't need to allocate space for all
6128 the bytes, only those to which we actually will save
6129 anything. */
6130 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6131 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6132 if (TARGET_HARD_FLOAT && TARGET_FPRS
6133 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6134 && cfun->va_list_fpr_size)
6135 {
6136 if (gpr_reg_num)
6137 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6138 * UNITS_PER_FP_WORD;
6139 if (cfun->va_list_fpr_size
6140 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6141 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6142 else
6143 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6144 * UNITS_PER_FP_WORD;
6145 }
6146 if (gpr_reg_num)
6147 {
6148 offset = -((first_reg_offset * reg_size) & ~7);
6149 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6150 {
6151 gpr_reg_num = cfun->va_list_gpr_size;
6152 if (reg_size == 4 && (first_reg_offset & 1))
6153 gpr_reg_num++;
6154 }
6155 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6156 }
6157 else if (fpr_size)
6158 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6159 * UNITS_PER_FP_WORD
6160 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6161
5b667039
JJ
6162 if (gpr_size + fpr_size)
6163 {
6164 rtx reg_save_area
6165 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6166 gcc_assert (GET_CODE (reg_save_area) == MEM);
6167 reg_save_area = XEXP (reg_save_area, 0);
6168 if (GET_CODE (reg_save_area) == PLUS)
6169 {
6170 gcc_assert (XEXP (reg_save_area, 0)
6171 == virtual_stack_vars_rtx);
6172 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6173 offset += INTVAL (XEXP (reg_save_area, 1));
6174 }
6175 else
6176 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6177 }
6178
6179 cfun->machine->varargs_save_offset = offset;
6180 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6181 }
4697a36c 6182 }
60e2d0ca 6183 else
4697a36c 6184 {
d34c5b80 6185 first_reg_offset = next_cum.words;
4cc833b7 6186 save_area = virtual_incoming_args_rtx;
4697a36c 6187
fe984136 6188 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6189 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6190 }
4697a36c 6191
dfafc897 6192 set = get_varargs_alias_set ();
9d30f3c1
JJ
6193 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6194 && cfun->va_list_gpr_size)
4cc833b7 6195 {
9d30f3c1
JJ
6196 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6197
6198 if (va_list_gpr_counter_field)
6199 {
6200 /* V4 va_list_gpr_size counts number of registers needed. */
6201 if (nregs > cfun->va_list_gpr_size)
6202 nregs = cfun->va_list_gpr_size;
6203 }
6204 else
6205 {
6206 /* char * va_list instead counts number of bytes needed. */
6207 if (nregs > cfun->va_list_gpr_size / reg_size)
6208 nregs = cfun->va_list_gpr_size / reg_size;
6209 }
6210
dfafc897 6211 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6212 plus_constant (save_area,
13e2e16e
DE
6213 first_reg_offset * reg_size));
6214 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6215 set_mem_alias_set (mem, set);
8ac61af7 6216 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6217
f676971a 6218 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6219 nregs);
4697a36c
MM
6220 }
6221
4697a36c 6222 /* Save FP registers if needed. */
f607bc57 6223 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6224 && TARGET_HARD_FLOAT && TARGET_FPRS
6225 && ! no_rtl
9d30f3c1
JJ
6226 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6227 && cfun->va_list_fpr_size)
4697a36c 6228 {
9d30f3c1 6229 int fregno = next_cum.fregno, nregs;
9ebbca7d 6230 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6231 rtx lab = gen_label_rtx ();
5b667039
JJ
6232 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6233 * UNITS_PER_FP_WORD);
4697a36c 6234
c4ad648e
AM
6235 emit_jump_insn
6236 (gen_rtx_SET (VOIDmode,
6237 pc_rtx,
6238 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6239 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6240 const0_rtx),
39403d82 6241 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6242 pc_rtx)));
6243
9d30f3c1
JJ
6244 for (nregs = 0;
6245 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6246 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6247 {
5496b36f 6248 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6249 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6250 set_mem_alias_set (mem, set);
94ff898d 6251 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6252 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6253 }
4cc833b7
RH
6254
6255 emit_label (lab);
4697a36c 6256 }
4697a36c 6257}
4697a36c 6258
dfafc897 6259/* Create the va_list data type. */
2c4974b7 6260
c35d187f
RH
6261static tree
6262rs6000_build_builtin_va_list (void)
dfafc897 6263{
64c2816f 6264 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6265
9ebbca7d
GK
6266 /* For AIX, prefer 'char *' because that's what the system
6267 header files like. */
f607bc57 6268 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6269 return build_pointer_type (char_type_node);
dfafc897 6270
f1e639b1 6271 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6272 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6273
f676971a 6274 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6275 unsigned_char_type_node);
f676971a 6276 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6277 unsigned_char_type_node);
64c2816f
DT
6278 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6279 every user file. */
6280 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6281 short_unsigned_type_node);
dfafc897
FS
6282 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6283 ptr_type_node);
6284 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6285 ptr_type_node);
6286
9d30f3c1
JJ
6287 va_list_gpr_counter_field = f_gpr;
6288 va_list_fpr_counter_field = f_fpr;
6289
dfafc897
FS
6290 DECL_FIELD_CONTEXT (f_gpr) = record;
6291 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6292 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6293 DECL_FIELD_CONTEXT (f_ovf) = record;
6294 DECL_FIELD_CONTEXT (f_sav) = record;
6295
bab45a51
FS
6296 TREE_CHAIN (record) = type_decl;
6297 TYPE_NAME (record) = type_decl;
dfafc897
FS
6298 TYPE_FIELDS (record) = f_gpr;
6299 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6300 TREE_CHAIN (f_fpr) = f_res;
6301 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6302 TREE_CHAIN (f_ovf) = f_sav;
6303
6304 layout_type (record);
6305
6306 /* The correct type is an array type of one element. */
6307 return build_array_type (record, build_index_type (size_zero_node));
6308}
6309
6310/* Implement va_start. */
6311
6312void
a2369ed3 6313rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6314{
dfafc897 6315 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6316 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6317 tree gpr, fpr, ovf, sav, t;
2c4974b7 6318
dfafc897 6319 /* Only SVR4 needs something special. */
f607bc57 6320 if (DEFAULT_ABI != ABI_V4)
dfafc897 6321 {
e5faf155 6322 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6323 return;
6324 }
6325
973a648b 6326 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6327 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6328 f_res = TREE_CHAIN (f_fpr);
6329 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6330 f_sav = TREE_CHAIN (f_ovf);
6331
872a65b5 6332 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6333 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6334 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6335 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6336 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6337
6338 /* Count number of gp and fp argument registers used. */
4cc833b7 6339 words = current_function_args_info.words;
987732e0
DE
6340 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6341 GP_ARG_NUM_REG);
6342 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6343 FP_ARG_NUM_REG);
dfafc897
FS
6344
6345 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6346 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6347 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6348 words, n_gpr, n_fpr);
dfafc897 6349
9d30f3c1
JJ
6350 if (cfun->va_list_gpr_size)
6351 {
07beea0d 6352 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6353 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6354 TREE_SIDE_EFFECTS (t) = 1;
6355 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6356 }
58c8adc1 6357
9d30f3c1
JJ
6358 if (cfun->va_list_fpr_size)
6359 {
07beea0d 6360 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6361 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6362 TREE_SIDE_EFFECTS (t) = 1;
6363 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6364 }
dfafc897
FS
6365
6366 /* Find the overflow area. */
6367 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6368 if (words != 0)
5be014d5
AP
6369 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6370 size_int (words * UNITS_PER_WORD));
07beea0d 6371 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6372 TREE_SIDE_EFFECTS (t) = 1;
6373 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6374
9d30f3c1
JJ
6375 /* If there were no va_arg invocations, don't set up the register
6376 save area. */
6377 if (!cfun->va_list_gpr_size
6378 && !cfun->va_list_fpr_size
6379 && n_gpr < GP_ARG_NUM_REG
6380 && n_fpr < FP_ARG_V4_MAX_REG)
6381 return;
6382
dfafc897
FS
6383 /* Find the register save area. */
6384 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6385 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6386 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6387 size_int (cfun->machine->varargs_save_offset));
07beea0d 6388 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6389 TREE_SIDE_EFFECTS (t) = 1;
6390 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6391}
6392
6393/* Implement va_arg. */
6394
23a60a04
JM
6395tree
6396rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6397{
cd3ce9b4
JM
6398 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6399 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6400 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6401 tree lab_false, lab_over, addr;
6402 int align;
6403 tree ptrtype = build_pointer_type (type);
7393f7f8 6404 int regalign = 0;
cd3ce9b4 6405
08b0dc1b
RH
6406 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6407 {
6408 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6409 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6410 }
6411
cd3ce9b4
JM
6412 if (DEFAULT_ABI != ABI_V4)
6413 {
08b0dc1b 6414 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6415 {
6416 tree elem_type = TREE_TYPE (type);
6417 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6418 int elem_size = GET_MODE_SIZE (elem_mode);
6419
6420 if (elem_size < UNITS_PER_WORD)
6421 {
23a60a04 6422 tree real_part, imag_part;
cd3ce9b4
JM
6423 tree post = NULL_TREE;
6424
23a60a04
JM
6425 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6426 &post);
6427 /* Copy the value into a temporary, lest the formal temporary
6428 be reused out from under us. */
6429 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6430 append_to_statement_list (post, pre_p);
6431
23a60a04
JM
6432 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6433 post_p);
cd3ce9b4 6434
47a25a46 6435 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6436 }
6437 }
6438
23a60a04 6439 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6440 }
6441
6442 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6443 f_fpr = TREE_CHAIN (f_gpr);
6444 f_res = TREE_CHAIN (f_fpr);
6445 f_ovf = TREE_CHAIN (f_res);
6446 f_sav = TREE_CHAIN (f_ovf);
6447
872a65b5 6448 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6449 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6450 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6451 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6452 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6453
6454 size = int_size_in_bytes (type);
6455 rsize = (size + 3) / 4;
6456 align = 1;
6457
08b0dc1b 6458 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6459 && (TYPE_MODE (type) == SFmode
6460 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6461 || TYPE_MODE (type) == TFmode
6462 || TYPE_MODE (type) == DDmode
6463 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6464 {
6465 /* FP args go in FP registers, if present. */
cd3ce9b4 6466 reg = fpr;
602ea4d3 6467 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6468 sav_ofs = 8*4;
6469 sav_scale = 8;
602ea4d3 6470 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6471 align = 8;
6472 }
6473 else
6474 {
6475 /* Otherwise into GP registers. */
cd3ce9b4
JM
6476 reg = gpr;
6477 n_reg = rsize;
6478 sav_ofs = 0;
6479 sav_scale = 4;
6480 if (n_reg == 2)
6481 align = 8;
6482 }
6483
6484 /* Pull the value out of the saved registers.... */
6485
6486 lab_over = NULL;
6487 addr = create_tmp_var (ptr_type_node, "addr");
6488 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6489
6490 /* AltiVec vectors never go in registers when -mabi=altivec. */
6491 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6492 align = 16;
6493 else
6494 {
6495 lab_false = create_artificial_label ();
6496 lab_over = create_artificial_label ();
6497
6498 /* Long long and SPE vectors are aligned in the registers.
6499 As are any other 2 gpr item such as complex int due to a
6500 historical mistake. */
6501 u = reg;
602ea4d3 6502 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6503 {
7393f7f8 6504 regalign = 1;
cd3ce9b4 6505 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6506 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6507 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6508 }
7393f7f8
BE
6509 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6510 reg number is 0 for f1, so we want to make it odd. */
6511 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6512 {
6513 regalign = 1;
6514 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6515 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6516 }
cd3ce9b4 6517
95674810 6518 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6519 t = build2 (GE_EXPR, boolean_type_node, u, t);
6520 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6521 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6522 gimplify_and_add (t, pre_p);
6523
6524 t = sav;
6525 if (sav_ofs)
5be014d5 6526 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6527
8fb632eb
ZD
6528 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6529 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6530 u = fold_convert (sizetype, u);
6531 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6532 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6533
07beea0d 6534 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6535 gimplify_and_add (t, pre_p);
6536
6537 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6538 gimplify_and_add (t, pre_p);
6539
6540 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6541 append_to_statement_list (t, pre_p);
6542
7393f7f8 6543 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6544 {
6545 /* Ensure that we don't find any more args in regs.
7393f7f8 6546 Alignment has taken care of for special cases. */
07beea0d 6547 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6548 gimplify_and_add (t, pre_p);
6549 }
6550 }
6551
6552 /* ... otherwise out of the overflow area. */
6553
6554 /* Care for on-stack alignment if needed. */
6555 t = ovf;
6556 if (align != 1)
6557 {
5be014d5
AP
6558 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6559 t = fold_convert (sizetype, t);
4a90aeeb 6560 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6561 size_int (-align));
6562 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6563 }
6564 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6565
07beea0d 6566 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6567 gimplify_and_add (u, pre_p);
6568
5be014d5 6569 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6570 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6571 gimplify_and_add (t, pre_p);
6572
6573 if (lab_over)
6574 {
6575 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6576 append_to_statement_list (t, pre_p);
6577 }
6578
0cfbc62b
JM
6579 if (STRICT_ALIGNMENT
6580 && (TYPE_ALIGN (type)
6581 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6582 {
6583 /* The value (of type complex double, for example) may not be
6584 aligned in memory in the saved registers, so copy via a
6585 temporary. (This is the same code as used for SPARC.) */
6586 tree tmp = create_tmp_var (type, "va_arg_tmp");
6587 tree dest_addr = build_fold_addr_expr (tmp);
6588
5039610b
SL
6589 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6590 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6591
6592 gimplify_and_add (copy, pre_p);
6593 addr = dest_addr;
6594 }
6595
08b0dc1b 6596 addr = fold_convert (ptrtype, addr);
872a65b5 6597 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6598}
6599
0ac081f6
AH
6600/* Builtins. */
6601
58646b77
PB
6602static void
6603def_builtin (int mask, const char *name, tree type, int code)
6604{
96038623 6605 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6606 {
6607 if (rs6000_builtin_decls[code])
6608 abort ();
6609
6610 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6611 add_builtin_function (name, type, code, BUILT_IN_MD,
6612 NULL, NULL_TREE);
58646b77
PB
6613 }
6614}
0ac081f6 6615
24408032
AH
6616/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6617
2212663f 6618static const struct builtin_description bdesc_3arg[] =
24408032
AH
6619{
6620 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6621 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6622 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6623 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6624 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6625 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6626 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6627 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6628 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6629 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6630 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6631 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6632 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6633 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6634 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6635 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6636 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6637 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6638 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6639 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6640 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6641 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6642 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6643
6644 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6645 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6646 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6647 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6648 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6649 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6650 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6651 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6652 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6653 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6654 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6655 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6656 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6657 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6658 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6659
6660 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6661 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6662 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6663 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6664 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6665 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6666 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6667 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
24408032 6668};
2212663f 6669
95385cbb
AH
6670/* DST operations: void foo (void *, const int, const char). */
6671
6672static const struct builtin_description bdesc_dst[] =
6673{
6674 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6675 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6676 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6677 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6678
6679 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6680 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6681 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6682 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6683};
6684
2212663f 6685/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6686
a3170dc6 6687static struct builtin_description bdesc_2arg[] =
0ac081f6 6688{
f18c054f
DB
6689 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6690 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6691 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6692 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6693 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6694 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6695 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6696 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6697 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6698 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6699 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6700 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6701 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6702 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6703 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6704 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6705 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6706 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6707 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6708 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6709 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6710 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6711 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6712 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6713 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6714 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6715 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6716 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6717 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6718 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6719 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6720 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6721 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6722 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6723 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6724 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6725 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6726 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6727 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6728 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6729 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6730 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6731 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6732 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6733 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6734 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6735 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6736 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6737 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6738 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6739 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6740 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6741 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6742 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6743 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6744 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6745 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6746 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6747 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6748 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6749 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6750 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6751 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6752 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6753 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6754 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6755 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6756 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6757 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6758 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6759 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6760 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6761 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6762 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6763 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6764 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6765 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6766 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6767 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6768 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6769 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6770 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6771 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6772 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6773 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6774 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6775 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6776 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6777 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6778 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6779 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6780 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6781 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6782 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6783 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6784 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6785 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6786 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6787 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6788 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6789 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6790 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6791 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6792 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6793 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6794 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6795 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6796 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6797 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6798 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6799 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6800
58646b77
PB
6801 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6802 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6803 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6804 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6805 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6806 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6807 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6808 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6809 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6810 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6811 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6812 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6813 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6814 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6815 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6816 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6817 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6818 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6819 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6820 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6821 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6822 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6823 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6824 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6825 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6826 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6827 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6828 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6829 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6830 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6831 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6832 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6833 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6834 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6835 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6836 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6837 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6838 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6842 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6843 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6844 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6845 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6846 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6847 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6848 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6849 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6850 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6851 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6852 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6853 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6854 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6855 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6856 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6857 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6858 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6859 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6860 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6861 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6862 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6863 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6864 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6865 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6866 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6867 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6868 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6869 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6870 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6871 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6872 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6879 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6880 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6881 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6882 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6883 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6884 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6885 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6886 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6887 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6888 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6889 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6890 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6891 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6892 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6893 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6894 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6895 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6896 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6897 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6898 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6899 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6900 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6901 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6902 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6903 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6904 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6905 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6906 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6907 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6908 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6909 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6910 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6911 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6912 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6913 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6914 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6915 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6916 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6917 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6918 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6919 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6920 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6921 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6922 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6923 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6924 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6925 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6926 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6927 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6928
96038623
DE
6929 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
6930 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
6931 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
6932 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
6933 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
6934 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
6935 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
6936 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
6937 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
6938 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
6939
a3170dc6
AH
6940 /* Place holder, leave as first spe builtin. */
6941 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6942 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6943 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6944 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6945 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6946 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6947 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6948 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6949 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6950 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6951 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6952 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6953 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6954 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6955 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6956 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6957 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6958 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6959 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6960 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6961 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6962 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6963 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6964 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6965 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6966 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6967 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6968 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6969 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6970 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6971 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6972 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6973 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6974 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6975 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6976 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6977 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6978 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6979 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6980 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6981 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6982 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6983 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6984 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6985 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6986 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6987 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6988 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6989 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6990 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6991 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6992 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6993 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6994 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6995 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6996 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6997 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6998 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6999 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7000 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7001 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7002 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7003 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7004 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7005 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7006 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7007 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7008 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7009 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7010 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7011 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7012 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7013 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7014 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7015 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7016 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7017 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7018 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7019 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7020 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7021 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7022 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7023 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7024 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7025 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7026 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7027 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7028 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7029 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7030 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7031 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7032 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7033 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7034 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7035 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7036 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7037 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7038 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7039 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7040 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7041 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7042 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7043 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7044 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7045 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7046 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7047 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7048 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7049 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7050
7051 /* SPE binary operations expecting a 5-bit unsigned literal. */
7052 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7053
7054 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7055 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7056 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7057 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7058 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7059 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7060 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7061 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7062 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7063 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7064 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7065 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7066 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7067 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7068 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7069 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7070 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7071 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7072 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7073 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7074 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7075 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7076 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7077 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7078 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7079 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7080
7081 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7082 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7083};
7084
7085/* AltiVec predicates. */
7086
7087struct builtin_description_predicates
7088{
7089 const unsigned int mask;
7090 const enum insn_code icode;
7091 const char *opcode;
7092 const char *const name;
7093 const enum rs6000_builtins code;
7094};
7095
7096static const struct builtin_description_predicates bdesc_altivec_preds[] =
7097{
7098 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7099 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7100 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7101 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7102 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7103 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7104 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7105 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7106 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7107 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7108 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7109 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7110 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7111
7112 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7113 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7114 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7115};
24408032 7116
a3170dc6
AH
7117/* SPE predicates. */
7118static struct builtin_description bdesc_spe_predicates[] =
7119{
7120 /* Place-holder. Leave as first. */
7121 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7122 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7123 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7124 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7125 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7126 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7127 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7128 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7129 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7130 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7131 /* Place-holder. Leave as last. */
7132 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7133};
7134
7135/* SPE evsel predicates. */
7136static struct builtin_description bdesc_spe_evsel[] =
7137{
7138 /* Place-holder. Leave as first. */
7139 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7140 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7141 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7142 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7143 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7144 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7145 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7146 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7147 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7148 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7149 /* Place-holder. Leave as last. */
7150 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7151};
7152
96038623
DE
7153/* PAIRED predicates. */
7154static const struct builtin_description bdesc_paired_preds[] =
7155{
7156 /* Place-holder. Leave as first. */
7157 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7158 /* Place-holder. Leave as last. */
7159 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7160};
7161
b6d08ca1 7162/* ABS* operations. */
100c4561
AH
7163
7164static const struct builtin_description bdesc_abs[] =
7165{
7166 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7167 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7168 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7169 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7170 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7171 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7172 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7173};
7174
617e0e1d
DB
7175/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7176 foo (VECa). */
24408032 7177
a3170dc6 7178static struct builtin_description bdesc_1arg[] =
2212663f 7179{
617e0e1d
DB
7180 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7181 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7182 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7183 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7184 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7185 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7186 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7187 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7188 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7189 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7190 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7191 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7192 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7193 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7194 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7195 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7196 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7197
58646b77
PB
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7213 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7214 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7215 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7217
a3170dc6
AH
7218 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7219 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7220 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7221 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7222 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7223 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7224 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7225 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7226 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7227 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7228 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7229 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7230 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7231 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7232 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7233 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7234 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7235 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7236 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7237 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7238 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7239 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7240 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7241 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7242 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7243 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7244 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7245 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7246 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7247 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7248
7249 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7250 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7251
7252 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7253 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7254 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7255 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7256 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7257};
7258
7259static rtx
5039610b 7260rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7261{
7262 rtx pat;
5039610b 7263 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7264 rtx op0 = expand_normal (arg0);
2212663f
DB
7265 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7266 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7267
0559cc77
DE
7268 if (icode == CODE_FOR_nothing)
7269 /* Builtin not supported on this processor. */
7270 return 0;
7271
20e26713
AH
7272 /* If we got invalid arguments bail out before generating bad rtl. */
7273 if (arg0 == error_mark_node)
9a171fcd 7274 return const0_rtx;
20e26713 7275
0559cc77
DE
7276 if (icode == CODE_FOR_altivec_vspltisb
7277 || icode == CODE_FOR_altivec_vspltish
7278 || icode == CODE_FOR_altivec_vspltisw
7279 || icode == CODE_FOR_spe_evsplatfi
7280 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7281 {
7282 /* Only allow 5-bit *signed* literals. */
b44140e7 7283 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7284 || INTVAL (op0) > 15
7285 || INTVAL (op0) < -16)
b44140e7
AH
7286 {
7287 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7288 return const0_rtx;
b44140e7 7289 }
b44140e7
AH
7290 }
7291
c62f2db5 7292 if (target == 0
2212663f
DB
7293 || GET_MODE (target) != tmode
7294 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7295 target = gen_reg_rtx (tmode);
7296
7297 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7298 op0 = copy_to_mode_reg (mode0, op0);
7299
7300 pat = GEN_FCN (icode) (target, op0);
7301 if (! pat)
7302 return 0;
7303 emit_insn (pat);
0ac081f6 7304
2212663f
DB
7305 return target;
7306}
ae4b4a02 7307
100c4561 7308static rtx
5039610b 7309altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7310{
7311 rtx pat, scratch1, scratch2;
5039610b 7312 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7313 rtx op0 = expand_normal (arg0);
100c4561
AH
7314 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7315 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7316
7317 /* If we have invalid arguments, bail out before generating bad rtl. */
7318 if (arg0 == error_mark_node)
9a171fcd 7319 return const0_rtx;
100c4561
AH
7320
7321 if (target == 0
7322 || GET_MODE (target) != tmode
7323 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7324 target = gen_reg_rtx (tmode);
7325
7326 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7327 op0 = copy_to_mode_reg (mode0, op0);
7328
7329 scratch1 = gen_reg_rtx (mode0);
7330 scratch2 = gen_reg_rtx (mode0);
7331
7332 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7333 if (! pat)
7334 return 0;
7335 emit_insn (pat);
7336
7337 return target;
7338}
7339
0ac081f6 7340static rtx
5039610b 7341rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7342{
7343 rtx pat;
5039610b
SL
7344 tree arg0 = CALL_EXPR_ARG (exp, 0);
7345 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7346 rtx op0 = expand_normal (arg0);
7347 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7348 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7349 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7350 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7351
0559cc77
DE
7352 if (icode == CODE_FOR_nothing)
7353 /* Builtin not supported on this processor. */
7354 return 0;
7355
20e26713
AH
7356 /* If we got invalid arguments bail out before generating bad rtl. */
7357 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7358 return const0_rtx;
20e26713 7359
0559cc77
DE
7360 if (icode == CODE_FOR_altivec_vcfux
7361 || icode == CODE_FOR_altivec_vcfsx
7362 || icode == CODE_FOR_altivec_vctsxs
7363 || icode == CODE_FOR_altivec_vctuxs
7364 || icode == CODE_FOR_altivec_vspltb
7365 || icode == CODE_FOR_altivec_vsplth
7366 || icode == CODE_FOR_altivec_vspltw
7367 || icode == CODE_FOR_spe_evaddiw
7368 || icode == CODE_FOR_spe_evldd
7369 || icode == CODE_FOR_spe_evldh
7370 || icode == CODE_FOR_spe_evldw
7371 || icode == CODE_FOR_spe_evlhhesplat
7372 || icode == CODE_FOR_spe_evlhhossplat
7373 || icode == CODE_FOR_spe_evlhhousplat
7374 || icode == CODE_FOR_spe_evlwhe
7375 || icode == CODE_FOR_spe_evlwhos
7376 || icode == CODE_FOR_spe_evlwhou
7377 || icode == CODE_FOR_spe_evlwhsplat
7378 || icode == CODE_FOR_spe_evlwwsplat
7379 || icode == CODE_FOR_spe_evrlwi
7380 || icode == CODE_FOR_spe_evslwi
7381 || icode == CODE_FOR_spe_evsrwis
f5119d10 7382 || icode == CODE_FOR_spe_evsubifw
0559cc77 7383 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7384 {
7385 /* Only allow 5-bit unsigned literals. */
8bb418a3 7386 STRIP_NOPS (arg1);
b44140e7
AH
7387 if (TREE_CODE (arg1) != INTEGER_CST
7388 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7389 {
7390 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7391 return const0_rtx;
b44140e7 7392 }
b44140e7
AH
7393 }
7394
c62f2db5 7395 if (target == 0
0ac081f6
AH
7396 || GET_MODE (target) != tmode
7397 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7398 target = gen_reg_rtx (tmode);
7399
7400 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7401 op0 = copy_to_mode_reg (mode0, op0);
7402 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7403 op1 = copy_to_mode_reg (mode1, op1);
7404
7405 pat = GEN_FCN (icode) (target, op0, op1);
7406 if (! pat)
7407 return 0;
7408 emit_insn (pat);
7409
7410 return target;
7411}
6525c0e7 7412
ae4b4a02 7413static rtx
f676971a 7414altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7415 tree exp, rtx target)
ae4b4a02
AH
7416{
7417 rtx pat, scratch;
5039610b
SL
7418 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7419 tree arg0 = CALL_EXPR_ARG (exp, 1);
7420 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7421 rtx op0 = expand_normal (arg0);
7422 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7423 enum machine_mode tmode = SImode;
7424 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7425 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7426 int cr6_form_int;
7427
7428 if (TREE_CODE (cr6_form) != INTEGER_CST)
7429 {
7430 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7431 return const0_rtx;
ae4b4a02
AH
7432 }
7433 else
7434 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7435
37409796 7436 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7437
7438 /* If we have invalid arguments, bail out before generating bad rtl. */
7439 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7440 return const0_rtx;
ae4b4a02
AH
7441
7442 if (target == 0
7443 || GET_MODE (target) != tmode
7444 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7445 target = gen_reg_rtx (tmode);
7446
7447 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7448 op0 = copy_to_mode_reg (mode0, op0);
7449 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7450 op1 = copy_to_mode_reg (mode1, op1);
7451
7452 scratch = gen_reg_rtx (mode0);
7453
7454 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7455 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7456 if (! pat)
7457 return 0;
7458 emit_insn (pat);
7459
7460 /* The vec_any* and vec_all* predicates use the same opcodes for two
7461 different operations, but the bits in CR6 will be different
7462 depending on what information we want. So we have to play tricks
7463 with CR6 to get the right bits out.
7464
7465 If you think this is disgusting, look at the specs for the
7466 AltiVec predicates. */
7467
c4ad648e
AM
7468 switch (cr6_form_int)
7469 {
7470 case 0:
7471 emit_insn (gen_cr6_test_for_zero (target));
7472 break;
7473 case 1:
7474 emit_insn (gen_cr6_test_for_zero_reverse (target));
7475 break;
7476 case 2:
7477 emit_insn (gen_cr6_test_for_lt (target));
7478 break;
7479 case 3:
7480 emit_insn (gen_cr6_test_for_lt_reverse (target));
7481 break;
7482 default:
7483 error ("argument 1 of __builtin_altivec_predicate is out of range");
7484 break;
7485 }
ae4b4a02
AH
7486
7487 return target;
7488}
7489
96038623
DE
7490static rtx
7491paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7492{
7493 rtx pat, addr;
7494 tree arg0 = CALL_EXPR_ARG (exp, 0);
7495 tree arg1 = CALL_EXPR_ARG (exp, 1);
7496 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7497 enum machine_mode mode0 = Pmode;
7498 enum machine_mode mode1 = Pmode;
7499 rtx op0 = expand_normal (arg0);
7500 rtx op1 = expand_normal (arg1);
7501
7502 if (icode == CODE_FOR_nothing)
7503 /* Builtin not supported on this processor. */
7504 return 0;
7505
7506 /* If we got invalid arguments bail out before generating bad rtl. */
7507 if (arg0 == error_mark_node || arg1 == error_mark_node)
7508 return const0_rtx;
7509
7510 if (target == 0
7511 || GET_MODE (target) != tmode
7512 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7513 target = gen_reg_rtx (tmode);
7514
7515 op1 = copy_to_mode_reg (mode1, op1);
7516
7517 if (op0 == const0_rtx)
7518 {
7519 addr = gen_rtx_MEM (tmode, op1);
7520 }
7521 else
7522 {
7523 op0 = copy_to_mode_reg (mode0, op0);
7524 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7525 }
7526
7527 pat = GEN_FCN (icode) (target, addr);
7528
7529 if (! pat)
7530 return 0;
7531 emit_insn (pat);
7532
7533 return target;
7534}
7535
b4a62fa0 7536static rtx
5039610b 7537altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7538{
7539 rtx pat, addr;
5039610b
SL
7540 tree arg0 = CALL_EXPR_ARG (exp, 0);
7541 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7542 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7543 enum machine_mode mode0 = Pmode;
7544 enum machine_mode mode1 = Pmode;
84217346
MD
7545 rtx op0 = expand_normal (arg0);
7546 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7547
7548 if (icode == CODE_FOR_nothing)
7549 /* Builtin not supported on this processor. */
7550 return 0;
7551
7552 /* If we got invalid arguments bail out before generating bad rtl. */
7553 if (arg0 == error_mark_node || arg1 == error_mark_node)
7554 return const0_rtx;
7555
7556 if (target == 0
7557 || GET_MODE (target) != tmode
7558 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7559 target = gen_reg_rtx (tmode);
7560
f676971a 7561 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7562
7563 if (op0 == const0_rtx)
7564 {
7565 addr = gen_rtx_MEM (tmode, op1);
7566 }
7567 else
7568 {
7569 op0 = copy_to_mode_reg (mode0, op0);
7570 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7571 }
7572
7573 pat = GEN_FCN (icode) (target, addr);
7574
7575 if (! pat)
7576 return 0;
7577 emit_insn (pat);
7578
7579 return target;
7580}
7581
61bea3b0 7582static rtx
5039610b 7583spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7584{
5039610b
SL
7585 tree arg0 = CALL_EXPR_ARG (exp, 0);
7586 tree arg1 = CALL_EXPR_ARG (exp, 1);
7587 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7588 rtx op0 = expand_normal (arg0);
7589 rtx op1 = expand_normal (arg1);
7590 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7591 rtx pat;
7592 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7593 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7594 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7595
7596 /* Invalid arguments. Bail before doing anything stoopid! */
7597 if (arg0 == error_mark_node
7598 || arg1 == error_mark_node
7599 || arg2 == error_mark_node)
7600 return const0_rtx;
7601
7602 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7603 op0 = copy_to_mode_reg (mode2, op0);
7604 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7605 op1 = copy_to_mode_reg (mode0, op1);
7606 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7607 op2 = copy_to_mode_reg (mode1, op2);
7608
7609 pat = GEN_FCN (icode) (op1, op2, op0);
7610 if (pat)
7611 emit_insn (pat);
7612 return NULL_RTX;
7613}
7614
96038623
DE
7615static rtx
7616paired_expand_stv_builtin (enum insn_code icode, tree exp)
7617{
7618 tree arg0 = CALL_EXPR_ARG (exp, 0);
7619 tree arg1 = CALL_EXPR_ARG (exp, 1);
7620 tree arg2 = CALL_EXPR_ARG (exp, 2);
7621 rtx op0 = expand_normal (arg0);
7622 rtx op1 = expand_normal (arg1);
7623 rtx op2 = expand_normal (arg2);
7624 rtx pat, addr;
7625 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7626 enum machine_mode mode1 = Pmode;
7627 enum machine_mode mode2 = Pmode;
7628
7629 /* Invalid arguments. Bail before doing anything stoopid! */
7630 if (arg0 == error_mark_node
7631 || arg1 == error_mark_node
7632 || arg2 == error_mark_node)
7633 return const0_rtx;
7634
7635 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7636 op0 = copy_to_mode_reg (tmode, op0);
7637
7638 op2 = copy_to_mode_reg (mode2, op2);
7639
7640 if (op1 == const0_rtx)
7641 {
7642 addr = gen_rtx_MEM (tmode, op2);
7643 }
7644 else
7645 {
7646 op1 = copy_to_mode_reg (mode1, op1);
7647 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7648 }
7649
7650 pat = GEN_FCN (icode) (addr, op0);
7651 if (pat)
7652 emit_insn (pat);
7653 return NULL_RTX;
7654}
7655
6525c0e7 7656static rtx
5039610b 7657altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7658{
5039610b
SL
7659 tree arg0 = CALL_EXPR_ARG (exp, 0);
7660 tree arg1 = CALL_EXPR_ARG (exp, 1);
7661 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7662 rtx op0 = expand_normal (arg0);
7663 rtx op1 = expand_normal (arg1);
7664 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7665 rtx pat, addr;
7666 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7667 enum machine_mode mode1 = Pmode;
7668 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7669
7670 /* Invalid arguments. Bail before doing anything stoopid! */
7671 if (arg0 == error_mark_node
7672 || arg1 == error_mark_node
7673 || arg2 == error_mark_node)
9a171fcd 7674 return const0_rtx;
6525c0e7 7675
b4a62fa0
SB
7676 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7677 op0 = copy_to_mode_reg (tmode, op0);
7678
f676971a 7679 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7680
7681 if (op1 == const0_rtx)
7682 {
7683 addr = gen_rtx_MEM (tmode, op2);
7684 }
7685 else
7686 {
7687 op1 = copy_to_mode_reg (mode1, op1);
7688 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7689 }
6525c0e7 7690
b4a62fa0 7691 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7692 if (pat)
7693 emit_insn (pat);
7694 return NULL_RTX;
7695}
7696
2212663f 7697static rtx
5039610b 7698rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7699{
7700 rtx pat;
5039610b
SL
7701 tree arg0 = CALL_EXPR_ARG (exp, 0);
7702 tree arg1 = CALL_EXPR_ARG (exp, 1);
7703 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7704 rtx op0 = expand_normal (arg0);
7705 rtx op1 = expand_normal (arg1);
7706 rtx op2 = expand_normal (arg2);
2212663f
DB
7707 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7708 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7709 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7710 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7711
774b5662
DE
7712 if (icode == CODE_FOR_nothing)
7713 /* Builtin not supported on this processor. */
7714 return 0;
7715
20e26713
AH
7716 /* If we got invalid arguments bail out before generating bad rtl. */
7717 if (arg0 == error_mark_node
7718 || arg1 == error_mark_node
7719 || arg2 == error_mark_node)
9a171fcd 7720 return const0_rtx;
20e26713 7721
aba5fb01
NS
7722 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7723 || icode == CODE_FOR_altivec_vsldoi_v4si
7724 || icode == CODE_FOR_altivec_vsldoi_v8hi
7725 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7726 {
7727 /* Only allow 4-bit unsigned literals. */
8bb418a3 7728 STRIP_NOPS (arg2);
b44140e7
AH
7729 if (TREE_CODE (arg2) != INTEGER_CST
7730 || TREE_INT_CST_LOW (arg2) & ~0xf)
7731 {
7732 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7733 return const0_rtx;
b44140e7 7734 }
b44140e7
AH
7735 }
7736
c62f2db5 7737 if (target == 0
2212663f
DB
7738 || GET_MODE (target) != tmode
7739 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7740 target = gen_reg_rtx (tmode);
7741
7742 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7743 op0 = copy_to_mode_reg (mode0, op0);
7744 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7745 op1 = copy_to_mode_reg (mode1, op1);
7746 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7747 op2 = copy_to_mode_reg (mode2, op2);
7748
7749 pat = GEN_FCN (icode) (target, op0, op1, op2);
7750 if (! pat)
7751 return 0;
7752 emit_insn (pat);
7753
7754 return target;
7755}
92898235 7756
3a9b8c7e 7757/* Expand the lvx builtins. */
0ac081f6 7758static rtx
a2369ed3 7759altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7760{
5039610b 7761 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7762 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7763 tree arg0;
7764 enum machine_mode tmode, mode0;
7c3abc73 7765 rtx pat, op0;
3a9b8c7e 7766 enum insn_code icode;
92898235 7767
0ac081f6
AH
7768 switch (fcode)
7769 {
f18c054f 7770 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7771 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7772 break;
f18c054f 7773 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7774 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7775 break;
7776 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7777 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7778 break;
7779 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7780 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7781 break;
7782 default:
7783 *expandedp = false;
7784 return NULL_RTX;
7785 }
0ac081f6 7786
3a9b8c7e 7787 *expandedp = true;
f18c054f 7788
5039610b 7789 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7790 op0 = expand_normal (arg0);
3a9b8c7e
AH
7791 tmode = insn_data[icode].operand[0].mode;
7792 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7793
3a9b8c7e
AH
7794 if (target == 0
7795 || GET_MODE (target) != tmode
7796 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7797 target = gen_reg_rtx (tmode);
24408032 7798
3a9b8c7e
AH
7799 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7800 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7801
3a9b8c7e
AH
7802 pat = GEN_FCN (icode) (target, op0);
7803 if (! pat)
7804 return 0;
7805 emit_insn (pat);
7806 return target;
7807}
f18c054f 7808
3a9b8c7e
AH
7809/* Expand the stvx builtins. */
7810static rtx
f676971a 7811altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7812 bool *expandedp)
3a9b8c7e 7813{
5039610b 7814 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7815 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7816 tree arg0, arg1;
7817 enum machine_mode mode0, mode1;
7c3abc73 7818 rtx pat, op0, op1;
3a9b8c7e 7819 enum insn_code icode;
f18c054f 7820
3a9b8c7e
AH
7821 switch (fcode)
7822 {
7823 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 7824 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
7825 break;
7826 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 7827 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
7828 break;
7829 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 7830 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
7831 break;
7832 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 7833 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
7834 break;
7835 default:
7836 *expandedp = false;
7837 return NULL_RTX;
7838 }
24408032 7839
5039610b
SL
7840 arg0 = CALL_EXPR_ARG (exp, 0);
7841 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7842 op0 = expand_normal (arg0);
7843 op1 = expand_normal (arg1);
3a9b8c7e
AH
7844 mode0 = insn_data[icode].operand[0].mode;
7845 mode1 = insn_data[icode].operand[1].mode;
f18c054f 7846
3a9b8c7e
AH
7847 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7848 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7849 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7850 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 7851
3a9b8c7e
AH
7852 pat = GEN_FCN (icode) (op0, op1);
7853 if (pat)
7854 emit_insn (pat);
f18c054f 7855
3a9b8c7e
AH
7856 *expandedp = true;
7857 return NULL_RTX;
7858}
f18c054f 7859
3a9b8c7e
AH
7860/* Expand the dst builtins. */
7861static rtx
f676971a 7862altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7863 bool *expandedp)
3a9b8c7e 7864{
5039610b 7865 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7866 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7867 tree arg0, arg1, arg2;
7868 enum machine_mode mode0, mode1, mode2;
7c3abc73 7869 rtx pat, op0, op1, op2;
586de218 7870 const struct builtin_description *d;
a3170dc6 7871 size_t i;
f18c054f 7872
3a9b8c7e 7873 *expandedp = false;
f18c054f 7874
3a9b8c7e 7875 /* Handle DST variants. */
586de218 7876 d = bdesc_dst;
3a9b8c7e
AH
7877 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7878 if (d->code == fcode)
7879 {
5039610b
SL
7880 arg0 = CALL_EXPR_ARG (exp, 0);
7881 arg1 = CALL_EXPR_ARG (exp, 1);
7882 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7883 op0 = expand_normal (arg0);
7884 op1 = expand_normal (arg1);
7885 op2 = expand_normal (arg2);
3a9b8c7e
AH
7886 mode0 = insn_data[d->icode].operand[0].mode;
7887 mode1 = insn_data[d->icode].operand[1].mode;
7888 mode2 = insn_data[d->icode].operand[2].mode;
24408032 7889
3a9b8c7e
AH
7890 /* Invalid arguments, bail out before generating bad rtl. */
7891 if (arg0 == error_mark_node
7892 || arg1 == error_mark_node
7893 || arg2 == error_mark_node)
7894 return const0_rtx;
f18c054f 7895
86e7df90 7896 *expandedp = true;
8bb418a3 7897 STRIP_NOPS (arg2);
3a9b8c7e
AH
7898 if (TREE_CODE (arg2) != INTEGER_CST
7899 || TREE_INT_CST_LOW (arg2) & ~0x3)
7900 {
9e637a26 7901 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7902 return const0_rtx;
7903 }
f18c054f 7904
3a9b8c7e 7905 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7906 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7907 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7908 op1 = copy_to_mode_reg (mode1, op1);
24408032 7909
3a9b8c7e
AH
7910 pat = GEN_FCN (d->icode) (op0, op1, op2);
7911 if (pat != 0)
7912 emit_insn (pat);
f18c054f 7913
3a9b8c7e
AH
7914 return NULL_RTX;
7915 }
f18c054f 7916
3a9b8c7e
AH
7917 return NULL_RTX;
7918}
24408032 7919
7a4eca66
DE
7920/* Expand vec_init builtin. */
7921static rtx
5039610b 7922altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
7923{
7924 enum machine_mode tmode = TYPE_MODE (type);
7925 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7926 int i, n_elt = GET_MODE_NUNITS (tmode);
7927 rtvec v = rtvec_alloc (n_elt);
7928
7929 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 7930 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 7931
5039610b 7932 for (i = 0; i < n_elt; ++i)
7a4eca66 7933 {
5039610b 7934 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
7935 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7936 }
7937
7a4eca66
DE
7938 if (!target || !register_operand (target, tmode))
7939 target = gen_reg_rtx (tmode);
7940
7941 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7942 return target;
7943}
7944
7945/* Return the integer constant in ARG. Constrain it to be in the range
7946 of the subparts of VEC_TYPE; issue an error if not. */
7947
7948static int
7949get_element_number (tree vec_type, tree arg)
7950{
7951 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7952
7953 if (!host_integerp (arg, 1)
7954 || (elt = tree_low_cst (arg, 1), elt > max))
7955 {
7956 error ("selector must be an integer constant in the range 0..%wi", max);
7957 return 0;
7958 }
7959
7960 return elt;
7961}
7962
7963/* Expand vec_set builtin. */
7964static rtx
5039610b 7965altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
7966{
7967 enum machine_mode tmode, mode1;
7968 tree arg0, arg1, arg2;
7969 int elt;
7970 rtx op0, op1;
7971
5039610b
SL
7972 arg0 = CALL_EXPR_ARG (exp, 0);
7973 arg1 = CALL_EXPR_ARG (exp, 1);
7974 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
7975
7976 tmode = TYPE_MODE (TREE_TYPE (arg0));
7977 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7978 gcc_assert (VECTOR_MODE_P (tmode));
7979
7980 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7981 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7982 elt = get_element_number (TREE_TYPE (arg0), arg2);
7983
7984 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7985 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7986
7987 op0 = force_reg (tmode, op0);
7988 op1 = force_reg (mode1, op1);
7989
7990 rs6000_expand_vector_set (op0, op1, elt);
7991
7992 return op0;
7993}
7994
7995/* Expand vec_ext builtin. */
7996static rtx
5039610b 7997altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
7998{
7999 enum machine_mode tmode, mode0;
8000 tree arg0, arg1;
8001 int elt;
8002 rtx op0;
8003
5039610b
SL
8004 arg0 = CALL_EXPR_ARG (exp, 0);
8005 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8006
84217346 8007 op0 = expand_normal (arg0);
7a4eca66
DE
8008 elt = get_element_number (TREE_TYPE (arg0), arg1);
8009
8010 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8011 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8012 gcc_assert (VECTOR_MODE_P (mode0));
8013
8014 op0 = force_reg (mode0, op0);
8015
8016 if (optimize || !target || !register_operand (target, tmode))
8017 target = gen_reg_rtx (tmode);
8018
8019 rs6000_expand_vector_extract (target, op0, elt);
8020
8021 return target;
8022}
8023
3a9b8c7e
AH
8024/* Expand the builtin in EXP and store the result in TARGET. Store
8025 true in *EXPANDEDP if we found a builtin to expand. */
8026static rtx
a2369ed3 8027altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8028{
586de218
KG
8029 const struct builtin_description *d;
8030 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8031 size_t i;
8032 enum insn_code icode;
5039610b 8033 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8034 tree arg0;
8035 rtx op0, pat;
8036 enum machine_mode tmode, mode0;
3a9b8c7e 8037 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8038
58646b77
PB
8039 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8040 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8041 {
8042 *expandedp = true;
ea40ba9c 8043 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8044 return const0_rtx;
8045 }
8046
3a9b8c7e
AH
8047 target = altivec_expand_ld_builtin (exp, target, expandedp);
8048 if (*expandedp)
8049 return target;
0ac081f6 8050
3a9b8c7e
AH
8051 target = altivec_expand_st_builtin (exp, target, expandedp);
8052 if (*expandedp)
8053 return target;
8054
8055 target = altivec_expand_dst_builtin (exp, target, expandedp);
8056 if (*expandedp)
8057 return target;
8058
8059 *expandedp = true;
95385cbb 8060
3a9b8c7e
AH
8061 switch (fcode)
8062 {
6525c0e7 8063 case ALTIVEC_BUILTIN_STVX:
5039610b 8064 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8065 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8066 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8067 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8068 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8069 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8070 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8071 case ALTIVEC_BUILTIN_STVXL:
5039610b 8072 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8073
95385cbb
AH
8074 case ALTIVEC_BUILTIN_MFVSCR:
8075 icode = CODE_FOR_altivec_mfvscr;
8076 tmode = insn_data[icode].operand[0].mode;
8077
8078 if (target == 0
8079 || GET_MODE (target) != tmode
8080 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8081 target = gen_reg_rtx (tmode);
f676971a 8082
95385cbb 8083 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8084 if (! pat)
8085 return 0;
8086 emit_insn (pat);
95385cbb
AH
8087 return target;
8088
8089 case ALTIVEC_BUILTIN_MTVSCR:
8090 icode = CODE_FOR_altivec_mtvscr;
5039610b 8091 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8092 op0 = expand_normal (arg0);
95385cbb
AH
8093 mode0 = insn_data[icode].operand[0].mode;
8094
8095 /* If we got invalid arguments bail out before generating bad rtl. */
8096 if (arg0 == error_mark_node)
9a171fcd 8097 return const0_rtx;
95385cbb
AH
8098
8099 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8100 op0 = copy_to_mode_reg (mode0, op0);
8101
8102 pat = GEN_FCN (icode) (op0);
8103 if (pat)
8104 emit_insn (pat);
8105 return NULL_RTX;
3a9b8c7e 8106
95385cbb
AH
8107 case ALTIVEC_BUILTIN_DSSALL:
8108 emit_insn (gen_altivec_dssall ());
8109 return NULL_RTX;
8110
8111 case ALTIVEC_BUILTIN_DSS:
8112 icode = CODE_FOR_altivec_dss;
5039610b 8113 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8114 STRIP_NOPS (arg0);
84217346 8115 op0 = expand_normal (arg0);
95385cbb
AH
8116 mode0 = insn_data[icode].operand[0].mode;
8117
8118 /* If we got invalid arguments bail out before generating bad rtl. */
8119 if (arg0 == error_mark_node)
9a171fcd 8120 return const0_rtx;
95385cbb 8121
b44140e7
AH
8122 if (TREE_CODE (arg0) != INTEGER_CST
8123 || TREE_INT_CST_LOW (arg0) & ~0x3)
8124 {
8125 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8126 return const0_rtx;
b44140e7
AH
8127 }
8128
95385cbb
AH
8129 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8130 op0 = copy_to_mode_reg (mode0, op0);
8131
8132 emit_insn (gen_altivec_dss (op0));
0ac081f6 8133 return NULL_RTX;
7a4eca66
DE
8134
8135 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8136 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8137 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8138 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8139 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8140
8141 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8142 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8143 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8144 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8145 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8146
8147 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8148 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8149 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8150 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8151 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8152
8153 default:
8154 break;
8155 /* Fall through. */
0ac081f6 8156 }
24408032 8157
100c4561 8158 /* Expand abs* operations. */
586de218 8159 d = bdesc_abs;
ca7558fc 8160 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8161 if (d->code == fcode)
5039610b 8162 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8163
ae4b4a02 8164 /* Expand the AltiVec predicates. */
586de218 8165 dp = bdesc_altivec_preds;
ca7558fc 8166 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8167 if (dp->code == fcode)
c4ad648e 8168 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8169 exp, target);
ae4b4a02 8170
6525c0e7
AH
8171 /* LV* are funky. We initialized them differently. */
8172 switch (fcode)
8173 {
8174 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8175 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8176 exp, target);
6525c0e7 8177 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8178 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8179 exp, target);
6525c0e7 8180 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8181 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8182 exp, target);
6525c0e7 8183 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8184 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8185 exp, target);
6525c0e7 8186 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8187 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8188 exp, target);
6525c0e7 8189 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8190 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8191 exp, target);
6525c0e7 8192 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8193 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8194 exp, target);
6525c0e7
AH
8195 default:
8196 break;
8197 /* Fall through. */
8198 }
95385cbb 8199
92898235 8200 *expandedp = false;
0ac081f6
AH
8201 return NULL_RTX;
8202}
8203
96038623
DE
8204/* Expand the builtin in EXP and store the result in TARGET. Store
8205 true in *EXPANDEDP if we found a builtin to expand. */
8206static rtx
8207paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8208{
8209 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8210 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8211 struct builtin_description *d;
8212 size_t i;
8213
8214 *expandedp = true;
8215
8216 switch (fcode)
8217 {
8218 case PAIRED_BUILTIN_STX:
8219 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8220 case PAIRED_BUILTIN_LX:
8221 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8222 default:
8223 break;
8224 /* Fall through. */
8225 }
8226
8227 /* Expand the paired predicates. */
8228 d = (struct builtin_description *) bdesc_paired_preds;
8229 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8230 if (d->code == fcode)
8231 return paired_expand_predicate_builtin (d->icode, exp, target);
8232
8233 *expandedp = false;
8234 return NULL_RTX;
8235}
8236
a3170dc6
AH
8237/* Binops that need to be initialized manually, but can be expanded
8238 automagically by rs6000_expand_binop_builtin. */
8239static struct builtin_description bdesc_2arg_spe[] =
8240{
8241 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8242 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8243 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8244 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8245 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8246 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8247 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8248 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8249 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8250 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8251 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8252 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8253 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8254 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8255 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8256 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8257 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8258 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8259 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8260 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8261 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8262 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8263};
8264
8265/* Expand the builtin in EXP and store the result in TARGET. Store
8266 true in *EXPANDEDP if we found a builtin to expand.
8267
8268 This expands the SPE builtins that are not simple unary and binary
8269 operations. */
8270static rtx
a2369ed3 8271spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8272{
5039610b 8273 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8274 tree arg1, arg0;
8275 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8276 enum insn_code icode;
8277 enum machine_mode tmode, mode0;
8278 rtx pat, op0;
8279 struct builtin_description *d;
8280 size_t i;
8281
8282 *expandedp = true;
8283
8284 /* Syntax check for a 5-bit unsigned immediate. */
8285 switch (fcode)
8286 {
8287 case SPE_BUILTIN_EVSTDD:
8288 case SPE_BUILTIN_EVSTDH:
8289 case SPE_BUILTIN_EVSTDW:
8290 case SPE_BUILTIN_EVSTWHE:
8291 case SPE_BUILTIN_EVSTWHO:
8292 case SPE_BUILTIN_EVSTWWE:
8293 case SPE_BUILTIN_EVSTWWO:
5039610b 8294 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8295 if (TREE_CODE (arg1) != INTEGER_CST
8296 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8297 {
8298 error ("argument 2 must be a 5-bit unsigned literal");
8299 return const0_rtx;
8300 }
8301 break;
8302 default:
8303 break;
8304 }
8305
00332c9f
AH
8306 /* The evsplat*i instructions are not quite generic. */
8307 switch (fcode)
8308 {
8309 case SPE_BUILTIN_EVSPLATFI:
8310 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8311 exp, target);
00332c9f
AH
8312 case SPE_BUILTIN_EVSPLATI:
8313 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8314 exp, target);
00332c9f
AH
8315 default:
8316 break;
8317 }
8318
a3170dc6
AH
8319 d = (struct builtin_description *) bdesc_2arg_spe;
8320 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8321 if (d->code == fcode)
5039610b 8322 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8323
8324 d = (struct builtin_description *) bdesc_spe_predicates;
8325 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8326 if (d->code == fcode)
5039610b 8327 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8328
8329 d = (struct builtin_description *) bdesc_spe_evsel;
8330 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8331 if (d->code == fcode)
5039610b 8332 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8333
8334 switch (fcode)
8335 {
8336 case SPE_BUILTIN_EVSTDDX:
5039610b 8337 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8338 case SPE_BUILTIN_EVSTDHX:
5039610b 8339 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8340 case SPE_BUILTIN_EVSTDWX:
5039610b 8341 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8342 case SPE_BUILTIN_EVSTWHEX:
5039610b 8343 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8344 case SPE_BUILTIN_EVSTWHOX:
5039610b 8345 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8346 case SPE_BUILTIN_EVSTWWEX:
5039610b 8347 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8348 case SPE_BUILTIN_EVSTWWOX:
5039610b 8349 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8350 case SPE_BUILTIN_EVSTDD:
5039610b 8351 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8352 case SPE_BUILTIN_EVSTDH:
5039610b 8353 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8354 case SPE_BUILTIN_EVSTDW:
5039610b 8355 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8356 case SPE_BUILTIN_EVSTWHE:
5039610b 8357 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8358 case SPE_BUILTIN_EVSTWHO:
5039610b 8359 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8360 case SPE_BUILTIN_EVSTWWE:
5039610b 8361 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8362 case SPE_BUILTIN_EVSTWWO:
5039610b 8363 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8364 case SPE_BUILTIN_MFSPEFSCR:
8365 icode = CODE_FOR_spe_mfspefscr;
8366 tmode = insn_data[icode].operand[0].mode;
8367
8368 if (target == 0
8369 || GET_MODE (target) != tmode
8370 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8371 target = gen_reg_rtx (tmode);
f676971a 8372
a3170dc6
AH
8373 pat = GEN_FCN (icode) (target);
8374 if (! pat)
8375 return 0;
8376 emit_insn (pat);
8377 return target;
8378 case SPE_BUILTIN_MTSPEFSCR:
8379 icode = CODE_FOR_spe_mtspefscr;
5039610b 8380 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8381 op0 = expand_normal (arg0);
a3170dc6
AH
8382 mode0 = insn_data[icode].operand[0].mode;
8383
8384 if (arg0 == error_mark_node)
8385 return const0_rtx;
8386
8387 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8388 op0 = copy_to_mode_reg (mode0, op0);
8389
8390 pat = GEN_FCN (icode) (op0);
8391 if (pat)
8392 emit_insn (pat);
8393 return NULL_RTX;
8394 default:
8395 break;
8396 }
8397
8398 *expandedp = false;
8399 return NULL_RTX;
8400}
8401
96038623
DE
8402static rtx
8403paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8404{
8405 rtx pat, scratch, tmp;
8406 tree form = CALL_EXPR_ARG (exp, 0);
8407 tree arg0 = CALL_EXPR_ARG (exp, 1);
8408 tree arg1 = CALL_EXPR_ARG (exp, 2);
8409 rtx op0 = expand_normal (arg0);
8410 rtx op1 = expand_normal (arg1);
8411 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8412 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8413 int form_int;
8414 enum rtx_code code;
8415
8416 if (TREE_CODE (form) != INTEGER_CST)
8417 {
8418 error ("argument 1 of __builtin_paired_predicate must be a constant");
8419 return const0_rtx;
8420 }
8421 else
8422 form_int = TREE_INT_CST_LOW (form);
8423
8424 gcc_assert (mode0 == mode1);
8425
8426 if (arg0 == error_mark_node || arg1 == error_mark_node)
8427 return const0_rtx;
8428
8429 if (target == 0
8430 || GET_MODE (target) != SImode
8431 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8432 target = gen_reg_rtx (SImode);
8433 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8434 op0 = copy_to_mode_reg (mode0, op0);
8435 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8436 op1 = copy_to_mode_reg (mode1, op1);
8437
8438 scratch = gen_reg_rtx (CCFPmode);
8439
8440 pat = GEN_FCN (icode) (scratch, op0, op1);
8441 if (!pat)
8442 return const0_rtx;
8443
8444 emit_insn (pat);
8445
8446 switch (form_int)
8447 {
8448 /* LT bit. */
8449 case 0:
8450 code = LT;
8451 break;
8452 /* GT bit. */
8453 case 1:
8454 code = GT;
8455 break;
8456 /* EQ bit. */
8457 case 2:
8458 code = EQ;
8459 break;
8460 /* UN bit. */
8461 case 3:
8462 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8463 return target;
8464 default:
8465 error ("argument 1 of __builtin_paired_predicate is out of range");
8466 return const0_rtx;
8467 }
8468
8469 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8470 emit_move_insn (target, tmp);
8471 return target;
8472}
8473
a3170dc6 8474static rtx
5039610b 8475spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8476{
8477 rtx pat, scratch, tmp;
5039610b
SL
8478 tree form = CALL_EXPR_ARG (exp, 0);
8479 tree arg0 = CALL_EXPR_ARG (exp, 1);
8480 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8481 rtx op0 = expand_normal (arg0);
8482 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8483 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8484 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8485 int form_int;
8486 enum rtx_code code;
8487
8488 if (TREE_CODE (form) != INTEGER_CST)
8489 {
8490 error ("argument 1 of __builtin_spe_predicate must be a constant");
8491 return const0_rtx;
8492 }
8493 else
8494 form_int = TREE_INT_CST_LOW (form);
8495
37409796 8496 gcc_assert (mode0 == mode1);
a3170dc6
AH
8497
8498 if (arg0 == error_mark_node || arg1 == error_mark_node)
8499 return const0_rtx;
8500
8501 if (target == 0
8502 || GET_MODE (target) != SImode
8503 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8504 target = gen_reg_rtx (SImode);
8505
8506 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8507 op0 = copy_to_mode_reg (mode0, op0);
8508 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8509 op1 = copy_to_mode_reg (mode1, op1);
8510
8511 scratch = gen_reg_rtx (CCmode);
8512
8513 pat = GEN_FCN (icode) (scratch, op0, op1);
8514 if (! pat)
8515 return const0_rtx;
8516 emit_insn (pat);
8517
8518 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8519 _lower_. We use one compare, but look in different bits of the
8520 CR for each variant.
8521
8522 There are 2 elements in each SPE simd type (upper/lower). The CR
8523 bits are set as follows:
8524
8525 BIT0 | BIT 1 | BIT 2 | BIT 3
8526 U | L | (U | L) | (U & L)
8527
8528 So, for an "all" relationship, BIT 3 would be set.
8529 For an "any" relationship, BIT 2 would be set. Etc.
8530
8531 Following traditional nomenclature, these bits map to:
8532
8533 BIT0 | BIT 1 | BIT 2 | BIT 3
8534 LT | GT | EQ | OV
8535
8536 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8537 */
8538
8539 switch (form_int)
8540 {
8541 /* All variant. OV bit. */
8542 case 0:
8543 /* We need to get to the OV bit, which is the ORDERED bit. We
8544 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8545 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8546 So let's just use another pattern. */
8547 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8548 return target;
8549 /* Any variant. EQ bit. */
8550 case 1:
8551 code = EQ;
8552 break;
8553 /* Upper variant. LT bit. */
8554 case 2:
8555 code = LT;
8556 break;
8557 /* Lower variant. GT bit. */
8558 case 3:
8559 code = GT;
8560 break;
8561 default:
8562 error ("argument 1 of __builtin_spe_predicate is out of range");
8563 return const0_rtx;
8564 }
8565
8566 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8567 emit_move_insn (target, tmp);
8568
8569 return target;
8570}
8571
8572/* The evsel builtins look like this:
8573
8574 e = __builtin_spe_evsel_OP (a, b, c, d);
8575
8576 and work like this:
8577
8578 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8579 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8580*/
8581
8582static rtx
5039610b 8583spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8584{
8585 rtx pat, scratch;
5039610b
SL
8586 tree arg0 = CALL_EXPR_ARG (exp, 0);
8587 tree arg1 = CALL_EXPR_ARG (exp, 1);
8588 tree arg2 = CALL_EXPR_ARG (exp, 2);
8589 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8590 rtx op0 = expand_normal (arg0);
8591 rtx op1 = expand_normal (arg1);
8592 rtx op2 = expand_normal (arg2);
8593 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8594 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8595 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8596
37409796 8597 gcc_assert (mode0 == mode1);
a3170dc6
AH
8598
8599 if (arg0 == error_mark_node || arg1 == error_mark_node
8600 || arg2 == error_mark_node || arg3 == error_mark_node)
8601 return const0_rtx;
8602
8603 if (target == 0
8604 || GET_MODE (target) != mode0
8605 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8606 target = gen_reg_rtx (mode0);
8607
8608 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8609 op0 = copy_to_mode_reg (mode0, op0);
8610 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8611 op1 = copy_to_mode_reg (mode0, op1);
8612 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8613 op2 = copy_to_mode_reg (mode0, op2);
8614 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8615 op3 = copy_to_mode_reg (mode0, op3);
8616
8617 /* Generate the compare. */
8618 scratch = gen_reg_rtx (CCmode);
8619 pat = GEN_FCN (icode) (scratch, op0, op1);
8620 if (! pat)
8621 return const0_rtx;
8622 emit_insn (pat);
8623
8624 if (mode0 == V2SImode)
8625 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8626 else
8627 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8628
8629 return target;
8630}
8631
0ac081f6
AH
8632/* Expand an expression EXP that calls a built-in function,
8633 with result going to TARGET if that's convenient
8634 (and in mode MODE if that's convenient).
8635 SUBTARGET may be used as the target for computing one of EXP's operands.
8636 IGNORE is nonzero if the value is to be ignored. */
8637
8638static rtx
a2369ed3 8639rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8640 enum machine_mode mode ATTRIBUTE_UNUSED,
8641 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8642{
5039610b 8643 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8644 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8645 const struct builtin_description *d;
92898235
AH
8646 size_t i;
8647 rtx ret;
8648 bool success;
f676971a 8649
7ccf35ed
DN
8650 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8651 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8652 {
8653 int icode = (int) CODE_FOR_altivec_lvsr;
8654 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8655 enum machine_mode mode = insn_data[icode].operand[1].mode;
8656 tree arg;
8657 rtx op, addr, pat;
8658
37409796 8659 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8660
5039610b 8661 arg = CALL_EXPR_ARG (exp, 0);
37409796 8662 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8663 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8664 addr = memory_address (mode, op);
8665 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8666 op = addr;
8667 else
8668 {
8669 /* For the load case need to negate the address. */
8670 op = gen_reg_rtx (GET_MODE (addr));
8671 emit_insn (gen_rtx_SET (VOIDmode, op,
8672 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8673 }
7ccf35ed
DN
8674 op = gen_rtx_MEM (mode, op);
8675
8676 if (target == 0
8677 || GET_MODE (target) != tmode
8678 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8679 target = gen_reg_rtx (tmode);
8680
8681 /*pat = gen_altivec_lvsr (target, op);*/
8682 pat = GEN_FCN (icode) (target, op);
8683 if (!pat)
8684 return 0;
8685 emit_insn (pat);
8686
8687 return target;
8688 }
5039610b
SL
8689
8690 /* FIXME: There's got to be a nicer way to handle this case than
8691 constructing a new CALL_EXPR. */
f57d17f1
TM
8692 if (fcode == ALTIVEC_BUILTIN_VCFUX
8693 || fcode == ALTIVEC_BUILTIN_VCFSX)
8694 {
5039610b
SL
8695 if (call_expr_nargs (exp) == 1)
8696 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8697 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8698 }
7ccf35ed 8699
0ac081f6 8700 if (TARGET_ALTIVEC)
92898235
AH
8701 {
8702 ret = altivec_expand_builtin (exp, target, &success);
8703
a3170dc6
AH
8704 if (success)
8705 return ret;
8706 }
8707 if (TARGET_SPE)
8708 {
8709 ret = spe_expand_builtin (exp, target, &success);
8710
92898235
AH
8711 if (success)
8712 return ret;
8713 }
96038623
DE
8714 if (TARGET_PAIRED_FLOAT)
8715 {
8716 ret = paired_expand_builtin (exp, target, &success);
8717
8718 if (success)
8719 return ret;
8720 }
92898235 8721
96038623 8722 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 8723
37409796
NS
8724 /* Handle simple unary operations. */
8725 d = (struct builtin_description *) bdesc_1arg;
8726 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8727 if (d->code == fcode)
5039610b 8728 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8729
37409796
NS
8730 /* Handle simple binary operations. */
8731 d = (struct builtin_description *) bdesc_2arg;
8732 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8733 if (d->code == fcode)
5039610b 8734 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8735
37409796 8736 /* Handle simple ternary operations. */
586de218 8737 d = bdesc_3arg;
37409796
NS
8738 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8739 if (d->code == fcode)
5039610b 8740 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8741
37409796 8742 gcc_unreachable ();
0ac081f6
AH
8743}
8744
7c62e993
PB
8745static tree
8746build_opaque_vector_type (tree node, int nunits)
8747{
8748 node = copy_node (node);
8749 TYPE_MAIN_VARIANT (node) = node;
8750 return build_vector_type (node, nunits);
8751}
8752
0ac081f6 8753static void
863d938c 8754rs6000_init_builtins (void)
0ac081f6 8755{
4a5eab38
PB
8756 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8757 V2SF_type_node = build_vector_type (float_type_node, 2);
8758 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8759 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8760 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8761 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8762 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8763
8764 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8765 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8766 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8767
7c62e993
PB
8768 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8769 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8770 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8771 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8772
8bb418a3
ZL
8773 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8774 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8775 'vector unsigned short'. */
8776
8dd16ecc
NS
8777 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8778 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8779 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8780 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8781
58646b77
PB
8782 long_integer_type_internal_node = long_integer_type_node;
8783 long_unsigned_type_internal_node = long_unsigned_type_node;
8784 intQI_type_internal_node = intQI_type_node;
8785 uintQI_type_internal_node = unsigned_intQI_type_node;
8786 intHI_type_internal_node = intHI_type_node;
8787 uintHI_type_internal_node = unsigned_intHI_type_node;
8788 intSI_type_internal_node = intSI_type_node;
8789 uintSI_type_internal_node = unsigned_intSI_type_node;
8790 float_type_internal_node = float_type_node;
8791 void_type_internal_node = void_type_node;
8792
8bb418a3
ZL
8793 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8794 get_identifier ("__bool char"),
8795 bool_char_type_node));
8796 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8797 get_identifier ("__bool short"),
8798 bool_short_type_node));
8799 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8800 get_identifier ("__bool int"),
8801 bool_int_type_node));
8802 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8803 get_identifier ("__pixel"),
8804 pixel_type_node));
8805
4a5eab38
PB
8806 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8807 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8808 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8809 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
8810
8811 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8812 get_identifier ("__vector unsigned char"),
8813 unsigned_V16QI_type_node));
8814 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8815 get_identifier ("__vector signed char"),
8816 V16QI_type_node));
8817 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8818 get_identifier ("__vector __bool char"),
8819 bool_V16QI_type_node));
8820
8821 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8822 get_identifier ("__vector unsigned short"),
8823 unsigned_V8HI_type_node));
8824 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8825 get_identifier ("__vector signed short"),
8826 V8HI_type_node));
8827 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8828 get_identifier ("__vector __bool short"),
8829 bool_V8HI_type_node));
8830
8831 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8832 get_identifier ("__vector unsigned int"),
8833 unsigned_V4SI_type_node));
8834 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8835 get_identifier ("__vector signed int"),
8836 V4SI_type_node));
8837 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8838 get_identifier ("__vector __bool int"),
8839 bool_V4SI_type_node));
8840
8841 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8842 get_identifier ("__vector float"),
8843 V4SF_type_node));
8844 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8845 get_identifier ("__vector __pixel"),
8846 pixel_V8HI_type_node));
8847
96038623
DE
8848 if (TARGET_PAIRED_FLOAT)
8849 paired_init_builtins ();
a3170dc6 8850 if (TARGET_SPE)
3fdaa45a 8851 spe_init_builtins ();
0ac081f6
AH
8852 if (TARGET_ALTIVEC)
8853 altivec_init_builtins ();
96038623 8854 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 8855 rs6000_common_init_builtins ();
69ca3549
DE
8856
8857#if TARGET_XCOFF
8858 /* AIX libm provides clog as __clog. */
8859 if (built_in_decls [BUILT_IN_CLOG])
8860 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8861#endif
0ac081f6
AH
8862}
8863
a3170dc6
AH
8864/* Search through a set of builtins and enable the mask bits.
8865 DESC is an array of builtins.
b6d08ca1 8866 SIZE is the total number of builtins.
a3170dc6
AH
8867 START is the builtin enum at which to start.
8868 END is the builtin enum at which to end. */
0ac081f6 8869static void
a2369ed3 8870enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 8871 enum rs6000_builtins start,
a2369ed3 8872 enum rs6000_builtins end)
a3170dc6
AH
8873{
8874 int i;
8875
8876 for (i = 0; i < size; ++i)
8877 if (desc[i].code == start)
8878 break;
8879
8880 if (i == size)
8881 return;
8882
8883 for (; i < size; ++i)
8884 {
8885 /* Flip all the bits on. */
8886 desc[i].mask = target_flags;
8887 if (desc[i].code == end)
8888 break;
8889 }
8890}
8891
8892static void
863d938c 8893spe_init_builtins (void)
0ac081f6 8894{
a3170dc6
AH
8895 tree endlink = void_list_node;
8896 tree puint_type_node = build_pointer_type (unsigned_type_node);
8897 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 8898 struct builtin_description *d;
0ac081f6
AH
8899 size_t i;
8900
a3170dc6
AH
8901 tree v2si_ftype_4_v2si
8902 = build_function_type
3fdaa45a
AH
8903 (opaque_V2SI_type_node,
8904 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8905 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8906 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8907 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8908 endlink)))));
8909
8910 tree v2sf_ftype_4_v2sf
8911 = build_function_type
3fdaa45a
AH
8912 (opaque_V2SF_type_node,
8913 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8914 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8915 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8916 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8917 endlink)))));
8918
8919 tree int_ftype_int_v2si_v2si
8920 = build_function_type
8921 (integer_type_node,
8922 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8923 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8924 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8925 endlink))));
8926
8927 tree int_ftype_int_v2sf_v2sf
8928 = build_function_type
8929 (integer_type_node,
8930 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8931 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8932 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8933 endlink))));
8934
8935 tree void_ftype_v2si_puint_int
8936 = build_function_type (void_type_node,
3fdaa45a 8937 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8938 tree_cons (NULL_TREE, puint_type_node,
8939 tree_cons (NULL_TREE,
8940 integer_type_node,
8941 endlink))));
8942
8943 tree void_ftype_v2si_puint_char
8944 = build_function_type (void_type_node,
3fdaa45a 8945 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8946 tree_cons (NULL_TREE, puint_type_node,
8947 tree_cons (NULL_TREE,
8948 char_type_node,
8949 endlink))));
8950
8951 tree void_ftype_v2si_pv2si_int
8952 = build_function_type (void_type_node,
3fdaa45a 8953 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8954 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8955 tree_cons (NULL_TREE,
8956 integer_type_node,
8957 endlink))));
8958
8959 tree void_ftype_v2si_pv2si_char
8960 = build_function_type (void_type_node,
3fdaa45a 8961 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8962 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8963 tree_cons (NULL_TREE,
8964 char_type_node,
8965 endlink))));
8966
8967 tree void_ftype_int
8968 = build_function_type (void_type_node,
8969 tree_cons (NULL_TREE, integer_type_node, endlink));
8970
8971 tree int_ftype_void
36e8d515 8972 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
8973
8974 tree v2si_ftype_pv2si_int
3fdaa45a 8975 = build_function_type (opaque_V2SI_type_node,
6035d635 8976 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8977 tree_cons (NULL_TREE, integer_type_node,
8978 endlink)));
8979
8980 tree v2si_ftype_puint_int
3fdaa45a 8981 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8982 tree_cons (NULL_TREE, puint_type_node,
8983 tree_cons (NULL_TREE, integer_type_node,
8984 endlink)));
8985
8986 tree v2si_ftype_pushort_int
3fdaa45a 8987 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8988 tree_cons (NULL_TREE, pushort_type_node,
8989 tree_cons (NULL_TREE, integer_type_node,
8990 endlink)));
8991
00332c9f
AH
8992 tree v2si_ftype_signed_char
8993 = build_function_type (opaque_V2SI_type_node,
8994 tree_cons (NULL_TREE, signed_char_type_node,
8995 endlink));
8996
a3170dc6
AH
8997 /* The initialization of the simple binary and unary builtins is
8998 done in rs6000_common_init_builtins, but we have to enable the
8999 mask bits here manually because we have run out of `target_flags'
9000 bits. We really need to redesign this mask business. */
9001
9002 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9003 ARRAY_SIZE (bdesc_2arg),
9004 SPE_BUILTIN_EVADDW,
9005 SPE_BUILTIN_EVXOR);
9006 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9007 ARRAY_SIZE (bdesc_1arg),
9008 SPE_BUILTIN_EVABS,
9009 SPE_BUILTIN_EVSUBFUSIAAW);
9010 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9011 ARRAY_SIZE (bdesc_spe_predicates),
9012 SPE_BUILTIN_EVCMPEQ,
9013 SPE_BUILTIN_EVFSTSTLT);
9014 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9015 ARRAY_SIZE (bdesc_spe_evsel),
9016 SPE_BUILTIN_EVSEL_CMPGTS,
9017 SPE_BUILTIN_EVSEL_FSTSTEQ);
9018
36252949
AH
9019 (*lang_hooks.decls.pushdecl)
9020 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9021 opaque_V2SI_type_node));
9022
a3170dc6 9023 /* Initialize irregular SPE builtins. */
f676971a 9024
a3170dc6
AH
9025 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9026 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9027 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9028 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9029 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9030 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9031 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9032 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9033 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9034 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9035 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9036 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9037 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9038 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9039 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9040 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9041 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9042 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9043
9044 /* Loads. */
9045 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9046 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9047 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9048 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9049 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9050 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9051 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9052 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9053 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9054 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9055 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9056 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9057 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9058 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9059 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9060 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9061 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9062 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9063 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9064 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9065 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9066 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9067
9068 /* Predicates. */
9069 d = (struct builtin_description *) bdesc_spe_predicates;
9070 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9071 {
9072 tree type;
9073
9074 switch (insn_data[d->icode].operand[1].mode)
9075 {
9076 case V2SImode:
9077 type = int_ftype_int_v2si_v2si;
9078 break;
9079 case V2SFmode:
9080 type = int_ftype_int_v2sf_v2sf;
9081 break;
9082 default:
37409796 9083 gcc_unreachable ();
a3170dc6
AH
9084 }
9085
9086 def_builtin (d->mask, d->name, type, d->code);
9087 }
9088
9089 /* Evsel predicates. */
9090 d = (struct builtin_description *) bdesc_spe_evsel;
9091 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9092 {
9093 tree type;
9094
9095 switch (insn_data[d->icode].operand[1].mode)
9096 {
9097 case V2SImode:
9098 type = v2si_ftype_4_v2si;
9099 break;
9100 case V2SFmode:
9101 type = v2sf_ftype_4_v2sf;
9102 break;
9103 default:
37409796 9104 gcc_unreachable ();
a3170dc6
AH
9105 }
9106
9107 def_builtin (d->mask, d->name, type, d->code);
9108 }
9109}
9110
96038623
DE
9111static void
9112paired_init_builtins (void)
9113{
9114 struct builtin_description *d;
9115 size_t i;
9116 tree endlink = void_list_node;
9117
9118 tree int_ftype_int_v2sf_v2sf
9119 = build_function_type
9120 (integer_type_node,
9121 tree_cons (NULL_TREE, integer_type_node,
9122 tree_cons (NULL_TREE, V2SF_type_node,
9123 tree_cons (NULL_TREE, V2SF_type_node,
9124 endlink))));
9125 tree pcfloat_type_node =
9126 build_pointer_type (build_qualified_type
9127 (float_type_node, TYPE_QUAL_CONST));
9128
9129 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9130 long_integer_type_node,
9131 pcfloat_type_node,
9132 NULL_TREE);
9133 tree void_ftype_v2sf_long_pcfloat =
9134 build_function_type_list (void_type_node,
9135 V2SF_type_node,
9136 long_integer_type_node,
9137 pcfloat_type_node,
9138 NULL_TREE);
9139
9140
9141 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9142 PAIRED_BUILTIN_LX);
9143
9144
9145 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9146 PAIRED_BUILTIN_STX);
9147
9148 /* Predicates. */
9149 d = (struct builtin_description *) bdesc_paired_preds;
9150 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9151 {
9152 tree type;
9153
9154 switch (insn_data[d->icode].operand[1].mode)
9155 {
9156 case V2SFmode:
9157 type = int_ftype_int_v2sf_v2sf;
9158 break;
9159 default:
9160 gcc_unreachable ();
9161 }
9162
9163 def_builtin (d->mask, d->name, type, d->code);
9164 }
9165}
9166
a3170dc6 9167static void
863d938c 9168altivec_init_builtins (void)
a3170dc6 9169{
586de218
KG
9170 const struct builtin_description *d;
9171 const struct builtin_description_predicates *dp;
a3170dc6 9172 size_t i;
7a4eca66
DE
9173 tree ftype;
9174
a3170dc6
AH
9175 tree pfloat_type_node = build_pointer_type (float_type_node);
9176 tree pint_type_node = build_pointer_type (integer_type_node);
9177 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9178 tree pchar_type_node = build_pointer_type (char_type_node);
9179
9180 tree pvoid_type_node = build_pointer_type (void_type_node);
9181
0dbc3651
ZW
9182 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9183 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9184 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9185 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9186
9187 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9188
58646b77
PB
9189 tree int_ftype_opaque
9190 = build_function_type_list (integer_type_node,
9191 opaque_V4SI_type_node, NULL_TREE);
9192
9193 tree opaque_ftype_opaque_int
9194 = build_function_type_list (opaque_V4SI_type_node,
9195 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9196 tree opaque_ftype_opaque_opaque_int
9197 = build_function_type_list (opaque_V4SI_type_node,
9198 opaque_V4SI_type_node, opaque_V4SI_type_node,
9199 integer_type_node, NULL_TREE);
9200 tree int_ftype_int_opaque_opaque
9201 = build_function_type_list (integer_type_node,
9202 integer_type_node, opaque_V4SI_type_node,
9203 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9204 tree int_ftype_int_v4si_v4si
9205 = build_function_type_list (integer_type_node,
9206 integer_type_node, V4SI_type_node,
9207 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9208 tree v4sf_ftype_pcfloat
9209 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9210 tree void_ftype_pfloat_v4sf
b4de2f7d 9211 = build_function_type_list (void_type_node,
a3170dc6 9212 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9213 tree v4si_ftype_pcint
9214 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9215 tree void_ftype_pint_v4si
b4de2f7d
AH
9216 = build_function_type_list (void_type_node,
9217 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9218 tree v8hi_ftype_pcshort
9219 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9220 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9221 = build_function_type_list (void_type_node,
9222 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9223 tree v16qi_ftype_pcchar
9224 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9225 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9226 = build_function_type_list (void_type_node,
9227 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9228 tree void_ftype_v4si
b4de2f7d 9229 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9230 tree v8hi_ftype_void
9231 = build_function_type (V8HI_type_node, void_list_node);
9232 tree void_ftype_void
9233 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9234 tree void_ftype_int
9235 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9236
58646b77
PB
9237 tree opaque_ftype_long_pcvoid
9238 = build_function_type_list (opaque_V4SI_type_node,
9239 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9240 tree v16qi_ftype_long_pcvoid
a3170dc6 9241 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9242 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9243 tree v8hi_ftype_long_pcvoid
a3170dc6 9244 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9245 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9246 tree v4si_ftype_long_pcvoid
a3170dc6 9247 = build_function_type_list (V4SI_type_node,
b4a62fa0 9248 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9249
58646b77
PB
9250 tree void_ftype_opaque_long_pvoid
9251 = build_function_type_list (void_type_node,
9252 opaque_V4SI_type_node, long_integer_type_node,
9253 pvoid_type_node, NULL_TREE);
b4a62fa0 9254 tree void_ftype_v4si_long_pvoid
b4de2f7d 9255 = build_function_type_list (void_type_node,
b4a62fa0 9256 V4SI_type_node, long_integer_type_node,
b4de2f7d 9257 pvoid_type_node, NULL_TREE);
b4a62fa0 9258 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9259 = build_function_type_list (void_type_node,
b4a62fa0 9260 V16QI_type_node, long_integer_type_node,
b4de2f7d 9261 pvoid_type_node, NULL_TREE);
b4a62fa0 9262 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9263 = build_function_type_list (void_type_node,
b4a62fa0 9264 V8HI_type_node, long_integer_type_node,
b4de2f7d 9265 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9266 tree int_ftype_int_v8hi_v8hi
9267 = build_function_type_list (integer_type_node,
9268 integer_type_node, V8HI_type_node,
9269 V8HI_type_node, NULL_TREE);
9270 tree int_ftype_int_v16qi_v16qi
9271 = build_function_type_list (integer_type_node,
9272 integer_type_node, V16QI_type_node,
9273 V16QI_type_node, NULL_TREE);
9274 tree int_ftype_int_v4sf_v4sf
9275 = build_function_type_list (integer_type_node,
9276 integer_type_node, V4SF_type_node,
9277 V4SF_type_node, NULL_TREE);
9278 tree v4si_ftype_v4si
9279 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9280 tree v8hi_ftype_v8hi
9281 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9282 tree v16qi_ftype_v16qi
9283 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9284 tree v4sf_ftype_v4sf
9285 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9286 tree void_ftype_pcvoid_int_int
a3170dc6 9287 = build_function_type_list (void_type_node,
0dbc3651 9288 pcvoid_type_node, integer_type_node,
8bb418a3 9289 integer_type_node, NULL_TREE);
8bb418a3 9290
0dbc3651
ZW
9291 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9292 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9293 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9294 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9295 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9296 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9297 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9298 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9299 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9300 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9301 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9302 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9303 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9304 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9305 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9306 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9307 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9308 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9309 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9310 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9311 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9312 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9313 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9314 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9315 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9316 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9317 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9318 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9319 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9320 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9321 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9322 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9323 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9324 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9325 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9326 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9327 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9328 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9329 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9330 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9331 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9332 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9333 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9334 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9335 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9336 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9337
9338 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9339
9340 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9341 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9342 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9343 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9344 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9345 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9346 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9347 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9348 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9349 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9350
a3170dc6 9351 /* Add the DST variants. */
586de218 9352 d = bdesc_dst;
a3170dc6 9353 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9354 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9355
9356 /* Initialize the predicates. */
586de218 9357 dp = bdesc_altivec_preds;
a3170dc6
AH
9358 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9359 {
9360 enum machine_mode mode1;
9361 tree type;
58646b77
PB
9362 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9363 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9364
58646b77
PB
9365 if (is_overloaded)
9366 mode1 = VOIDmode;
9367 else
9368 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9369
9370 switch (mode1)
9371 {
58646b77
PB
9372 case VOIDmode:
9373 type = int_ftype_int_opaque_opaque;
9374 break;
a3170dc6
AH
9375 case V4SImode:
9376 type = int_ftype_int_v4si_v4si;
9377 break;
9378 case V8HImode:
9379 type = int_ftype_int_v8hi_v8hi;
9380 break;
9381 case V16QImode:
9382 type = int_ftype_int_v16qi_v16qi;
9383 break;
9384 case V4SFmode:
9385 type = int_ftype_int_v4sf_v4sf;
9386 break;
9387 default:
37409796 9388 gcc_unreachable ();
a3170dc6 9389 }
f676971a 9390
a3170dc6
AH
9391 def_builtin (dp->mask, dp->name, type, dp->code);
9392 }
9393
9394 /* Initialize the abs* operators. */
586de218 9395 d = bdesc_abs;
a3170dc6
AH
9396 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9397 {
9398 enum machine_mode mode0;
9399 tree type;
9400
9401 mode0 = insn_data[d->icode].operand[0].mode;
9402
9403 switch (mode0)
9404 {
9405 case V4SImode:
9406 type = v4si_ftype_v4si;
9407 break;
9408 case V8HImode:
9409 type = v8hi_ftype_v8hi;
9410 break;
9411 case V16QImode:
9412 type = v16qi_ftype_v16qi;
9413 break;
9414 case V4SFmode:
9415 type = v4sf_ftype_v4sf;
9416 break;
9417 default:
37409796 9418 gcc_unreachable ();
a3170dc6 9419 }
f676971a 9420
a3170dc6
AH
9421 def_builtin (d->mask, d->name, type, d->code);
9422 }
7ccf35ed 9423
13c62176
DN
9424 if (TARGET_ALTIVEC)
9425 {
9426 tree decl;
9427
9428 /* Initialize target builtin that implements
9429 targetm.vectorize.builtin_mask_for_load. */
9430
c79efc4d
RÁE
9431 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9432 v16qi_ftype_long_pcvoid,
9433 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9434 BUILT_IN_MD, NULL, NULL_TREE);
9435 TREE_READONLY (decl) = 1;
13c62176
DN
9436 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9437 altivec_builtin_mask_for_load = decl;
13c62176 9438 }
7a4eca66
DE
9439
9440 /* Access to the vec_init patterns. */
9441 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9442 integer_type_node, integer_type_node,
9443 integer_type_node, NULL_TREE);
9444 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9445 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9446
9447 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9448 short_integer_type_node,
9449 short_integer_type_node,
9450 short_integer_type_node,
9451 short_integer_type_node,
9452 short_integer_type_node,
9453 short_integer_type_node,
9454 short_integer_type_node, NULL_TREE);
9455 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9456 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9457
9458 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9459 char_type_node, char_type_node,
9460 char_type_node, char_type_node,
9461 char_type_node, char_type_node,
9462 char_type_node, char_type_node,
9463 char_type_node, char_type_node,
9464 char_type_node, char_type_node,
9465 char_type_node, char_type_node,
9466 char_type_node, NULL_TREE);
9467 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9468 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9469
9470 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9471 float_type_node, float_type_node,
9472 float_type_node, NULL_TREE);
9473 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9474 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9475
9476 /* Access to the vec_set patterns. */
9477 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9478 intSI_type_node,
9479 integer_type_node, NULL_TREE);
9480 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9481 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9482
9483 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9484 intHI_type_node,
9485 integer_type_node, NULL_TREE);
9486 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9487 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9488
9489 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9490 intQI_type_node,
9491 integer_type_node, NULL_TREE);
9492 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9493 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9494
9495 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9496 float_type_node,
9497 integer_type_node, NULL_TREE);
9498 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9499 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9500
9501 /* Access to the vec_extract patterns. */
9502 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9503 integer_type_node, NULL_TREE);
9504 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9505 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9506
9507 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9508 integer_type_node, NULL_TREE);
9509 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9510 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9511
9512 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9513 integer_type_node, NULL_TREE);
9514 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9515 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9516
9517 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9518 integer_type_node, NULL_TREE);
9519 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9520 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9521}
9522
9523static void
863d938c 9524rs6000_common_init_builtins (void)
a3170dc6 9525{
586de218 9526 const struct builtin_description *d;
a3170dc6
AH
9527 size_t i;
9528
96038623
DE
9529 tree v2sf_ftype_v2sf_v2sf_v2sf
9530 = build_function_type_list (V2SF_type_node,
9531 V2SF_type_node, V2SF_type_node,
9532 V2SF_type_node, NULL_TREE);
9533
a3170dc6
AH
9534 tree v4sf_ftype_v4sf_v4sf_v16qi
9535 = build_function_type_list (V4SF_type_node,
9536 V4SF_type_node, V4SF_type_node,
9537 V16QI_type_node, NULL_TREE);
9538 tree v4si_ftype_v4si_v4si_v16qi
9539 = build_function_type_list (V4SI_type_node,
9540 V4SI_type_node, V4SI_type_node,
9541 V16QI_type_node, NULL_TREE);
9542 tree v8hi_ftype_v8hi_v8hi_v16qi
9543 = build_function_type_list (V8HI_type_node,
9544 V8HI_type_node, V8HI_type_node,
9545 V16QI_type_node, NULL_TREE);
9546 tree v16qi_ftype_v16qi_v16qi_v16qi
9547 = build_function_type_list (V16QI_type_node,
9548 V16QI_type_node, V16QI_type_node,
9549 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9550 tree v4si_ftype_int
9551 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9552 tree v8hi_ftype_int
9553 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9554 tree v16qi_ftype_int
9555 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9556 tree v8hi_ftype_v16qi
9557 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9558 tree v4sf_ftype_v4sf
9559 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9560
9561 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9562 = build_function_type_list (opaque_V2SI_type_node,
9563 opaque_V2SI_type_node,
9564 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9565
96038623 9566 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9567 = build_function_type_list (opaque_V2SF_type_node,
9568 opaque_V2SF_type_node,
9569 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9570
96038623
DE
9571 tree v2sf_ftype_v2sf_v2sf
9572 = build_function_type_list (V2SF_type_node,
9573 V2SF_type_node,
9574 V2SF_type_node, NULL_TREE);
9575
9576
a3170dc6 9577 tree v2si_ftype_int_int
2abe3e28 9578 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9579 integer_type_node, integer_type_node,
9580 NULL_TREE);
9581
58646b77
PB
9582 tree opaque_ftype_opaque
9583 = build_function_type_list (opaque_V4SI_type_node,
9584 opaque_V4SI_type_node, NULL_TREE);
9585
a3170dc6 9586 tree v2si_ftype_v2si
2abe3e28
AH
9587 = build_function_type_list (opaque_V2SI_type_node,
9588 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9589
96038623 9590 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9591 = build_function_type_list (opaque_V2SF_type_node,
9592 opaque_V2SF_type_node, NULL_TREE);
f676971a 9593
96038623
DE
9594 tree v2sf_ftype_v2sf
9595 = build_function_type_list (V2SF_type_node,
9596 V2SF_type_node, NULL_TREE);
9597
a3170dc6 9598 tree v2sf_ftype_v2si
2abe3e28
AH
9599 = build_function_type_list (opaque_V2SF_type_node,
9600 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9601
9602 tree v2si_ftype_v2sf
2abe3e28
AH
9603 = build_function_type_list (opaque_V2SI_type_node,
9604 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9605
9606 tree v2si_ftype_v2si_char
2abe3e28
AH
9607 = build_function_type_list (opaque_V2SI_type_node,
9608 opaque_V2SI_type_node,
9609 char_type_node, NULL_TREE);
a3170dc6
AH
9610
9611 tree v2si_ftype_int_char
2abe3e28 9612 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9613 integer_type_node, char_type_node, NULL_TREE);
9614
9615 tree v2si_ftype_char
2abe3e28
AH
9616 = build_function_type_list (opaque_V2SI_type_node,
9617 char_type_node, NULL_TREE);
a3170dc6
AH
9618
9619 tree int_ftype_int_int
9620 = build_function_type_list (integer_type_node,
9621 integer_type_node, integer_type_node,
9622 NULL_TREE);
95385cbb 9623
58646b77
PB
9624 tree opaque_ftype_opaque_opaque
9625 = build_function_type_list (opaque_V4SI_type_node,
9626 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9627 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9628 = build_function_type_list (V4SI_type_node,
9629 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9630 tree v4sf_ftype_v4si_int
b4de2f7d 9631 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9632 V4SI_type_node, integer_type_node, NULL_TREE);
9633 tree v4si_ftype_v4sf_int
b4de2f7d 9634 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9635 V4SF_type_node, integer_type_node, NULL_TREE);
9636 tree v4si_ftype_v4si_int
b4de2f7d 9637 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9638 V4SI_type_node, integer_type_node, NULL_TREE);
9639 tree v8hi_ftype_v8hi_int
b4de2f7d 9640 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9641 V8HI_type_node, integer_type_node, NULL_TREE);
9642 tree v16qi_ftype_v16qi_int
b4de2f7d 9643 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9644 V16QI_type_node, integer_type_node, NULL_TREE);
9645 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9646 = build_function_type_list (V16QI_type_node,
9647 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9648 integer_type_node, NULL_TREE);
9649 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9650 = build_function_type_list (V8HI_type_node,
9651 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9652 integer_type_node, NULL_TREE);
9653 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9654 = build_function_type_list (V4SI_type_node,
9655 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9656 integer_type_node, NULL_TREE);
9657 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9658 = build_function_type_list (V4SF_type_node,
9659 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9660 integer_type_node, NULL_TREE);
0ac081f6 9661 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9662 = build_function_type_list (V4SF_type_node,
9663 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9664 tree opaque_ftype_opaque_opaque_opaque
9665 = build_function_type_list (opaque_V4SI_type_node,
9666 opaque_V4SI_type_node, opaque_V4SI_type_node,
9667 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9668 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9669 = build_function_type_list (V4SF_type_node,
9670 V4SF_type_node, V4SF_type_node,
9671 V4SI_type_node, NULL_TREE);
2212663f 9672 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9673 = build_function_type_list (V4SF_type_node,
9674 V4SF_type_node, V4SF_type_node,
9675 V4SF_type_node, NULL_TREE);
f676971a 9676 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9677 = build_function_type_list (V4SI_type_node,
9678 V4SI_type_node, V4SI_type_node,
9679 V4SI_type_node, NULL_TREE);
0ac081f6 9680 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9681 = build_function_type_list (V8HI_type_node,
9682 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9683 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9684 = build_function_type_list (V8HI_type_node,
9685 V8HI_type_node, V8HI_type_node,
9686 V8HI_type_node, NULL_TREE);
c4ad648e 9687 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9688 = build_function_type_list (V4SI_type_node,
9689 V8HI_type_node, V8HI_type_node,
9690 V4SI_type_node, NULL_TREE);
c4ad648e 9691 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9692 = build_function_type_list (V4SI_type_node,
9693 V16QI_type_node, V16QI_type_node,
9694 V4SI_type_node, NULL_TREE);
0ac081f6 9695 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9696 = build_function_type_list (V16QI_type_node,
9697 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9698 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9699 = build_function_type_list (V4SI_type_node,
9700 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9701 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9702 = build_function_type_list (V8HI_type_node,
9703 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9704 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9705 = build_function_type_list (V4SI_type_node,
9706 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9707 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9708 = build_function_type_list (V8HI_type_node,
9709 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9710 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9711 = build_function_type_list (V16QI_type_node,
9712 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9713 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9714 = build_function_type_list (V4SI_type_node,
9715 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9716 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9717 = build_function_type_list (V4SI_type_node,
9718 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9719 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9720 = build_function_type_list (V4SI_type_node,
9721 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9722 tree v4si_ftype_v8hi
9723 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9724 tree int_ftype_v4si_v4si
9725 = build_function_type_list (integer_type_node,
9726 V4SI_type_node, V4SI_type_node, NULL_TREE);
9727 tree int_ftype_v4sf_v4sf
9728 = build_function_type_list (integer_type_node,
9729 V4SF_type_node, V4SF_type_node, NULL_TREE);
9730 tree int_ftype_v16qi_v16qi
9731 = build_function_type_list (integer_type_node,
9732 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9733 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9734 = build_function_type_list (integer_type_node,
9735 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9736
6f317ef3 9737 /* Add the simple ternary operators. */
586de218 9738 d = bdesc_3arg;
ca7558fc 9739 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9740 {
2212663f
DB
9741 enum machine_mode mode0, mode1, mode2, mode3;
9742 tree type;
58646b77
PB
9743 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9744 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9745
58646b77
PB
9746 if (is_overloaded)
9747 {
9748 mode0 = VOIDmode;
9749 mode1 = VOIDmode;
9750 mode2 = VOIDmode;
9751 mode3 = VOIDmode;
9752 }
9753 else
9754 {
9755 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9756 continue;
f676971a 9757
58646b77
PB
9758 mode0 = insn_data[d->icode].operand[0].mode;
9759 mode1 = insn_data[d->icode].operand[1].mode;
9760 mode2 = insn_data[d->icode].operand[2].mode;
9761 mode3 = insn_data[d->icode].operand[3].mode;
9762 }
bb8df8a6 9763
2212663f
DB
9764 /* When all four are of the same mode. */
9765 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9766 {
9767 switch (mode0)
9768 {
58646b77
PB
9769 case VOIDmode:
9770 type = opaque_ftype_opaque_opaque_opaque;
9771 break;
617e0e1d
DB
9772 case V4SImode:
9773 type = v4si_ftype_v4si_v4si_v4si;
9774 break;
2212663f
DB
9775 case V4SFmode:
9776 type = v4sf_ftype_v4sf_v4sf_v4sf;
9777 break;
9778 case V8HImode:
9779 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 9780 break;
2212663f
DB
9781 case V16QImode:
9782 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9783 break;
96038623
DE
9784 case V2SFmode:
9785 type = v2sf_ftype_v2sf_v2sf_v2sf;
9786 break;
2212663f 9787 default:
37409796 9788 gcc_unreachable ();
2212663f
DB
9789 }
9790 }
9791 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 9792 {
2212663f
DB
9793 switch (mode0)
9794 {
9795 case V4SImode:
9796 type = v4si_ftype_v4si_v4si_v16qi;
9797 break;
9798 case V4SFmode:
9799 type = v4sf_ftype_v4sf_v4sf_v16qi;
9800 break;
9801 case V8HImode:
9802 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 9803 break;
2212663f
DB
9804 case V16QImode:
9805 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9806 break;
2212663f 9807 default:
37409796 9808 gcc_unreachable ();
2212663f
DB
9809 }
9810 }
f676971a 9811 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 9812 && mode3 == V4SImode)
24408032 9813 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 9814 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 9815 && mode3 == V4SImode)
24408032 9816 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 9817 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 9818 && mode3 == V4SImode)
24408032
AH
9819 type = v4sf_ftype_v4sf_v4sf_v4si;
9820
a7b376ee 9821 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
9822 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9823 && mode3 == QImode)
b9e4e5d1 9824 type = v16qi_ftype_v16qi_v16qi_int;
24408032 9825
a7b376ee 9826 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
9827 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9828 && mode3 == QImode)
b9e4e5d1 9829 type = v8hi_ftype_v8hi_v8hi_int;
24408032 9830
a7b376ee 9831 /* vint, vint, vint, 4-bit literal. */
24408032
AH
9832 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9833 && mode3 == QImode)
b9e4e5d1 9834 type = v4si_ftype_v4si_v4si_int;
24408032 9835
a7b376ee 9836 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
9837 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9838 && mode3 == QImode)
b9e4e5d1 9839 type = v4sf_ftype_v4sf_v4sf_int;
24408032 9840
2212663f 9841 else
37409796 9842 gcc_unreachable ();
2212663f
DB
9843
9844 def_builtin (d->mask, d->name, type, d->code);
9845 }
9846
0ac081f6 9847 /* Add the simple binary operators. */
00b960c7 9848 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 9849 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
9850 {
9851 enum machine_mode mode0, mode1, mode2;
9852 tree type;
58646b77
PB
9853 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9854 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 9855
58646b77
PB
9856 if (is_overloaded)
9857 {
9858 mode0 = VOIDmode;
9859 mode1 = VOIDmode;
9860 mode2 = VOIDmode;
9861 }
9862 else
bb8df8a6 9863 {
58646b77
PB
9864 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9865 continue;
f676971a 9866
58646b77
PB
9867 mode0 = insn_data[d->icode].operand[0].mode;
9868 mode1 = insn_data[d->icode].operand[1].mode;
9869 mode2 = insn_data[d->icode].operand[2].mode;
9870 }
0ac081f6
AH
9871
9872 /* When all three operands are of the same mode. */
9873 if (mode0 == mode1 && mode1 == mode2)
9874 {
9875 switch (mode0)
9876 {
58646b77
PB
9877 case VOIDmode:
9878 type = opaque_ftype_opaque_opaque;
9879 break;
0ac081f6
AH
9880 case V4SFmode:
9881 type = v4sf_ftype_v4sf_v4sf;
9882 break;
9883 case V4SImode:
9884 type = v4si_ftype_v4si_v4si;
9885 break;
9886 case V16QImode:
9887 type = v16qi_ftype_v16qi_v16qi;
9888 break;
9889 case V8HImode:
9890 type = v8hi_ftype_v8hi_v8hi;
9891 break;
a3170dc6
AH
9892 case V2SImode:
9893 type = v2si_ftype_v2si_v2si;
9894 break;
96038623
DE
9895 case V2SFmode:
9896 if (TARGET_PAIRED_FLOAT)
9897 type = v2sf_ftype_v2sf_v2sf;
9898 else
9899 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
9900 break;
9901 case SImode:
9902 type = int_ftype_int_int;
9903 break;
0ac081f6 9904 default:
37409796 9905 gcc_unreachable ();
0ac081f6
AH
9906 }
9907 }
9908
9909 /* A few other combos we really don't want to do manually. */
9910
9911 /* vint, vfloat, vfloat. */
9912 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9913 type = v4si_ftype_v4sf_v4sf;
9914
9915 /* vshort, vchar, vchar. */
9916 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9917 type = v8hi_ftype_v16qi_v16qi;
9918
9919 /* vint, vshort, vshort. */
9920 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9921 type = v4si_ftype_v8hi_v8hi;
9922
9923 /* vshort, vint, vint. */
9924 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9925 type = v8hi_ftype_v4si_v4si;
9926
9927 /* vchar, vshort, vshort. */
9928 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9929 type = v16qi_ftype_v8hi_v8hi;
9930
9931 /* vint, vchar, vint. */
9932 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9933 type = v4si_ftype_v16qi_v4si;
9934
fa066a23
AH
9935 /* vint, vchar, vchar. */
9936 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9937 type = v4si_ftype_v16qi_v16qi;
9938
0ac081f6
AH
9939 /* vint, vshort, vint. */
9940 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9941 type = v4si_ftype_v8hi_v4si;
f676971a 9942
a7b376ee 9943 /* vint, vint, 5-bit literal. */
2212663f 9944 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9945 type = v4si_ftype_v4si_int;
f676971a 9946
a7b376ee 9947 /* vshort, vshort, 5-bit literal. */
2212663f 9948 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 9949 type = v8hi_ftype_v8hi_int;
f676971a 9950
a7b376ee 9951 /* vchar, vchar, 5-bit literal. */
2212663f 9952 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 9953 type = v16qi_ftype_v16qi_int;
0ac081f6 9954
a7b376ee 9955 /* vfloat, vint, 5-bit literal. */
617e0e1d 9956 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9957 type = v4sf_ftype_v4si_int;
f676971a 9958
a7b376ee 9959 /* vint, vfloat, 5-bit literal. */
617e0e1d 9960 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 9961 type = v4si_ftype_v4sf_int;
617e0e1d 9962
a3170dc6
AH
9963 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9964 type = v2si_ftype_int_int;
9965
9966 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9967 type = v2si_ftype_v2si_char;
9968
9969 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9970 type = v2si_ftype_int_char;
9971
37409796 9972 else
0ac081f6 9973 {
37409796
NS
9974 /* int, x, x. */
9975 gcc_assert (mode0 == SImode);
0ac081f6
AH
9976 switch (mode1)
9977 {
9978 case V4SImode:
9979 type = int_ftype_v4si_v4si;
9980 break;
9981 case V4SFmode:
9982 type = int_ftype_v4sf_v4sf;
9983 break;
9984 case V16QImode:
9985 type = int_ftype_v16qi_v16qi;
9986 break;
9987 case V8HImode:
9988 type = int_ftype_v8hi_v8hi;
9989 break;
9990 default:
37409796 9991 gcc_unreachable ();
0ac081f6
AH
9992 }
9993 }
9994
2212663f
DB
9995 def_builtin (d->mask, d->name, type, d->code);
9996 }
24408032 9997
2212663f
DB
9998 /* Add the simple unary operators. */
9999 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10000 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10001 {
10002 enum machine_mode mode0, mode1;
10003 tree type;
58646b77
PB
10004 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10005 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10006
10007 if (is_overloaded)
10008 {
10009 mode0 = VOIDmode;
10010 mode1 = VOIDmode;
10011 }
10012 else
10013 {
10014 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10015 continue;
bb8df8a6 10016
58646b77
PB
10017 mode0 = insn_data[d->icode].operand[0].mode;
10018 mode1 = insn_data[d->icode].operand[1].mode;
10019 }
2212663f
DB
10020
10021 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10022 type = v4si_ftype_int;
2212663f 10023 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10024 type = v8hi_ftype_int;
2212663f 10025 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10026 type = v16qi_ftype_int;
58646b77
PB
10027 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10028 type = opaque_ftype_opaque;
617e0e1d
DB
10029 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10030 type = v4sf_ftype_v4sf;
20e26713
AH
10031 else if (mode0 == V8HImode && mode1 == V16QImode)
10032 type = v8hi_ftype_v16qi;
10033 else if (mode0 == V4SImode && mode1 == V8HImode)
10034 type = v4si_ftype_v8hi;
a3170dc6
AH
10035 else if (mode0 == V2SImode && mode1 == V2SImode)
10036 type = v2si_ftype_v2si;
10037 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10038 {
10039 if (TARGET_PAIRED_FLOAT)
10040 type = v2sf_ftype_v2sf;
10041 else
10042 type = v2sf_ftype_v2sf_spe;
10043 }
a3170dc6
AH
10044 else if (mode0 == V2SFmode && mode1 == V2SImode)
10045 type = v2sf_ftype_v2si;
10046 else if (mode0 == V2SImode && mode1 == V2SFmode)
10047 type = v2si_ftype_v2sf;
10048 else if (mode0 == V2SImode && mode1 == QImode)
10049 type = v2si_ftype_char;
2212663f 10050 else
37409796 10051 gcc_unreachable ();
2212663f 10052
0ac081f6
AH
10053 def_builtin (d->mask, d->name, type, d->code);
10054 }
10055}
10056
c15c90bb
ZW
10057static void
10058rs6000_init_libfuncs (void)
10059{
602ea4d3
JJ
10060 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10061 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10062 {
602ea4d3
JJ
10063 /* AIX library routines for float->int conversion. */
10064 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10065 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10066 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10067 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10068 }
c15c90bb 10069
602ea4d3 10070 if (!TARGET_IEEEQUAD)
98c41d98 10071 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10072 if (!TARGET_XL_COMPAT)
10073 {
10074 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10075 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10076 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10077 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10078
17caeff2 10079 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10080 {
10081 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10082 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10083 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10084 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10085 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10086 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10087 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10088
10089 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10090 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10091 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10092 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10093 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10094 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10095 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10096 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10097 }
b26941b4
JM
10098
10099 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10100 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10101 }
10102 else
10103 {
10104 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10105 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10106 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10107 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10108 }
c9034561 10109 else
c15c90bb 10110 {
c9034561 10111 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10112
10113 set_optab_libfunc (add_optab, TFmode, "_q_add");
10114 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10115 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10116 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10117 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10118 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10119 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10120
c9034561
ZW
10121 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10122 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10123 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10124 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10125 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10126 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10127
85363ca0
ZW
10128 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10129 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10130 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10131 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10132 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10133 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10134 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10135 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10136 }
10137}
fba73eb1
DE
10138
10139\f
10140/* Expand a block clear operation, and return 1 if successful. Return 0
10141 if we should let the compiler generate normal code.
10142
10143 operands[0] is the destination
10144 operands[1] is the length
57e84f18 10145 operands[3] is the alignment */
fba73eb1
DE
10146
10147int
10148expand_block_clear (rtx operands[])
10149{
10150 rtx orig_dest = operands[0];
10151 rtx bytes_rtx = operands[1];
57e84f18 10152 rtx align_rtx = operands[3];
5514620a
GK
10153 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10154 HOST_WIDE_INT align;
10155 HOST_WIDE_INT bytes;
fba73eb1
DE
10156 int offset;
10157 int clear_bytes;
5514620a 10158 int clear_step;
fba73eb1
DE
10159
10160 /* If this is not a fixed size move, just call memcpy */
10161 if (! constp)
10162 return 0;
10163
37409796
NS
10164 /* This must be a fixed size alignment */
10165 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10166 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10167
10168 /* Anything to clear? */
10169 bytes = INTVAL (bytes_rtx);
10170 if (bytes <= 0)
10171 return 1;
10172
5514620a
GK
10173 /* Use the builtin memset after a point, to avoid huge code bloat.
10174 When optimize_size, avoid any significant code bloat; calling
10175 memset is about 4 instructions, so allow for one instruction to
10176 load zero and three to do clearing. */
10177 if (TARGET_ALTIVEC && align >= 128)
10178 clear_step = 16;
10179 else if (TARGET_POWERPC64 && align >= 32)
10180 clear_step = 8;
21d818ff
NF
10181 else if (TARGET_SPE && align >= 64)
10182 clear_step = 8;
5514620a
GK
10183 else
10184 clear_step = 4;
fba73eb1 10185
5514620a
GK
10186 if (optimize_size && bytes > 3 * clear_step)
10187 return 0;
10188 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10189 return 0;
10190
10191 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10192 {
fba73eb1
DE
10193 enum machine_mode mode = BLKmode;
10194 rtx dest;
f676971a 10195
5514620a
GK
10196 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10197 {
10198 clear_bytes = 16;
10199 mode = V4SImode;
10200 }
21d818ff
NF
10201 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10202 {
10203 clear_bytes = 8;
10204 mode = V2SImode;
10205 }
5514620a 10206 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10207 /* 64-bit loads and stores require word-aligned
10208 displacements. */
10209 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10210 {
10211 clear_bytes = 8;
10212 mode = DImode;
fba73eb1 10213 }
5514620a 10214 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10215 { /* move 4 bytes */
10216 clear_bytes = 4;
10217 mode = SImode;
fba73eb1 10218 }
ec53fc93 10219 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10220 { /* move 2 bytes */
10221 clear_bytes = 2;
10222 mode = HImode;
fba73eb1
DE
10223 }
10224 else /* move 1 byte at a time */
10225 {
10226 clear_bytes = 1;
10227 mode = QImode;
fba73eb1 10228 }
f676971a 10229
fba73eb1 10230 dest = adjust_address (orig_dest, mode, offset);
f676971a 10231
5514620a 10232 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10233 }
10234
10235 return 1;
10236}
10237
35aff10b 10238\f
7e69e155
MM
10239/* Expand a block move operation, and return 1 if successful. Return 0
10240 if we should let the compiler generate normal code.
10241
10242 operands[0] is the destination
10243 operands[1] is the source
10244 operands[2] is the length
10245 operands[3] is the alignment */
10246
3933e0e1
MM
10247#define MAX_MOVE_REG 4
10248
7e69e155 10249int
a2369ed3 10250expand_block_move (rtx operands[])
7e69e155 10251{
b6c9286a
MM
10252 rtx orig_dest = operands[0];
10253 rtx orig_src = operands[1];
7e69e155 10254 rtx bytes_rtx = operands[2];
7e69e155 10255 rtx align_rtx = operands[3];
3933e0e1 10256 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10257 int align;
3933e0e1
MM
10258 int bytes;
10259 int offset;
7e69e155 10260 int move_bytes;
cabfd258
GK
10261 rtx stores[MAX_MOVE_REG];
10262 int num_reg = 0;
7e69e155 10263
3933e0e1 10264 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10265 if (! constp)
3933e0e1
MM
10266 return 0;
10267
37409796
NS
10268 /* This must be a fixed size alignment */
10269 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10270 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10271
7e69e155 10272 /* Anything to move? */
3933e0e1
MM
10273 bytes = INTVAL (bytes_rtx);
10274 if (bytes <= 0)
7e69e155
MM
10275 return 1;
10276
ea9982a8 10277 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10278 reg_parm_stack_space. */
ea9982a8 10279 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10280 return 0;
10281
cabfd258 10282 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10283 {
cabfd258 10284 union {
70128ad9 10285 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10286 rtx (*mov) (rtx, rtx);
cabfd258
GK
10287 } gen_func;
10288 enum machine_mode mode = BLKmode;
10289 rtx src, dest;
f676971a 10290
5514620a
GK
10291 /* Altivec first, since it will be faster than a string move
10292 when it applies, and usually not significantly larger. */
10293 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10294 {
10295 move_bytes = 16;
10296 mode = V4SImode;
10297 gen_func.mov = gen_movv4si;
10298 }
21d818ff
NF
10299 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10300 {
10301 move_bytes = 8;
10302 mode = V2SImode;
10303 gen_func.mov = gen_movv2si;
10304 }
5514620a 10305 else if (TARGET_STRING
cabfd258
GK
10306 && bytes > 24 /* move up to 32 bytes at a time */
10307 && ! fixed_regs[5]
10308 && ! fixed_regs[6]
10309 && ! fixed_regs[7]
10310 && ! fixed_regs[8]
10311 && ! fixed_regs[9]
10312 && ! fixed_regs[10]
10313 && ! fixed_regs[11]
10314 && ! fixed_regs[12])
7e69e155 10315 {
cabfd258 10316 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10317 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10318 }
10319 else if (TARGET_STRING
10320 && bytes > 16 /* move up to 24 bytes at a time */
10321 && ! fixed_regs[5]
10322 && ! fixed_regs[6]
10323 && ! fixed_regs[7]
10324 && ! fixed_regs[8]
10325 && ! fixed_regs[9]
10326 && ! fixed_regs[10])
10327 {
10328 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10329 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10330 }
10331 else if (TARGET_STRING
10332 && bytes > 8 /* move up to 16 bytes at a time */
10333 && ! fixed_regs[5]
10334 && ! fixed_regs[6]
10335 && ! fixed_regs[7]
10336 && ! fixed_regs[8])
10337 {
10338 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10339 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10340 }
10341 else if (bytes >= 8 && TARGET_POWERPC64
10342 /* 64-bit loads and stores require word-aligned
10343 displacements. */
fba73eb1 10344 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10345 {
10346 move_bytes = 8;
10347 mode = DImode;
10348 gen_func.mov = gen_movdi;
10349 }
10350 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10351 { /* move up to 8 bytes at a time */
10352 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10353 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10354 }
cd7d9ca4 10355 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10356 { /* move 4 bytes */
10357 move_bytes = 4;
10358 mode = SImode;
10359 gen_func.mov = gen_movsi;
10360 }
ec53fc93 10361 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10362 { /* move 2 bytes */
10363 move_bytes = 2;
10364 mode = HImode;
10365 gen_func.mov = gen_movhi;
10366 }
10367 else if (TARGET_STRING && bytes > 1)
10368 { /* move up to 4 bytes at a time */
10369 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10370 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10371 }
10372 else /* move 1 byte at a time */
10373 {
10374 move_bytes = 1;
10375 mode = QImode;
10376 gen_func.mov = gen_movqi;
10377 }
f676971a 10378
cabfd258
GK
10379 src = adjust_address (orig_src, mode, offset);
10380 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10381
10382 if (mode != BLKmode)
cabfd258
GK
10383 {
10384 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10385
cabfd258
GK
10386 emit_insn ((*gen_func.mov) (tmp_reg, src));
10387 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10388 }
3933e0e1 10389
cabfd258
GK
10390 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10391 {
10392 int i;
10393 for (i = 0; i < num_reg; i++)
10394 emit_insn (stores[i]);
10395 num_reg = 0;
10396 }
35aff10b 10397
cabfd258 10398 if (mode == BLKmode)
7e69e155 10399 {
70128ad9 10400 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10401 patterns require zero offset. */
10402 if (!REG_P (XEXP (src, 0)))
b6c9286a 10403 {
cabfd258
GK
10404 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10405 src = replace_equiv_address (src, src_reg);
b6c9286a 10406 }
cabfd258 10407 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10408
cabfd258 10409 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10410 {
cabfd258
GK
10411 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10412 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10413 }
cabfd258 10414 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10415
70128ad9 10416 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10417 GEN_INT (move_bytes & 31),
10418 align_rtx));
7e69e155 10419 }
7e69e155
MM
10420 }
10421
10422 return 1;
10423}
10424
d62294f5 10425\f
9caa3eb2
DE
10426/* Return a string to perform a load_multiple operation.
10427 operands[0] is the vector.
10428 operands[1] is the source address.
10429 operands[2] is the first destination register. */
10430
10431const char *
a2369ed3 10432rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10433{
10434 /* We have to handle the case where the pseudo used to contain the address
10435 is assigned to one of the output registers. */
10436 int i, j;
10437 int words = XVECLEN (operands[0], 0);
10438 rtx xop[10];
10439
10440 if (XVECLEN (operands[0], 0) == 1)
10441 return "{l|lwz} %2,0(%1)";
10442
10443 for (i = 0; i < words; i++)
10444 if (refers_to_regno_p (REGNO (operands[2]) + i,
10445 REGNO (operands[2]) + i + 1, operands[1], 0))
10446 {
10447 if (i == words-1)
10448 {
10449 xop[0] = GEN_INT (4 * (words-1));
10450 xop[1] = operands[1];
10451 xop[2] = operands[2];
10452 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10453 return "";
10454 }
10455 else if (i == 0)
10456 {
10457 xop[0] = GEN_INT (4 * (words-1));
10458 xop[1] = operands[1];
10459 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10460 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10461 return "";
10462 }
10463 else
10464 {
10465 for (j = 0; j < words; j++)
10466 if (j != i)
10467 {
10468 xop[0] = GEN_INT (j * 4);
10469 xop[1] = operands[1];
10470 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10471 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10472 }
10473 xop[0] = GEN_INT (i * 4);
10474 xop[1] = operands[1];
10475 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10476 return "";
10477 }
10478 }
10479
10480 return "{lsi|lswi} %2,%1,%N0";
10481}
10482
9878760c 10483\f
a4f6c312
SS
10484/* A validation routine: say whether CODE, a condition code, and MODE
10485 match. The other alternatives either don't make sense or should
10486 never be generated. */
39a10a29 10487
48d72335 10488void
a2369ed3 10489validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10490{
37409796
NS
10491 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10492 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10493 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10494
10495 /* These don't make sense. */
37409796
NS
10496 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10497 || mode != CCUNSmode);
39a10a29 10498
37409796
NS
10499 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10500 || mode == CCUNSmode);
39a10a29 10501
37409796
NS
10502 gcc_assert (mode == CCFPmode
10503 || (code != ORDERED && code != UNORDERED
10504 && code != UNEQ && code != LTGT
10505 && code != UNGT && code != UNLT
10506 && code != UNGE && code != UNLE));
f676971a
EC
10507
10508 /* These should never be generated except for
bc9ec0e0 10509 flag_finite_math_only. */
37409796
NS
10510 gcc_assert (mode != CCFPmode
10511 || flag_finite_math_only
10512 || (code != LE && code != GE
10513 && code != UNEQ && code != LTGT
10514 && code != UNGT && code != UNLT));
39a10a29
GK
10515
10516 /* These are invalid; the information is not there. */
37409796 10517 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10518}
10519
9878760c
RK
10520\f
10521/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10522 mask required to convert the result of a rotate insn into a shift
b1765bde 10523 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10524
10525int
a2369ed3 10526includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10527{
e2c953b6
DE
10528 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10529
10530 shift_mask <<= INTVAL (shiftop);
9878760c 10531
b1765bde 10532 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10533}
10534
10535/* Similar, but for right shift. */
10536
10537int
a2369ed3 10538includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10539{
a7653a2c 10540 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10541
10542 shift_mask >>= INTVAL (shiftop);
10543
b1765bde 10544 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10545}
10546
c5059423
AM
10547/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10548 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10549 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10550
10551int
a2369ed3 10552includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10553{
c5059423
AM
10554 if (GET_CODE (andop) == CONST_INT)
10555 {
02071907 10556 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10557
c5059423 10558 c = INTVAL (andop);
02071907 10559 if (c == 0 || c == ~0)
c5059423 10560 return 0;
e2c953b6 10561
02071907 10562 shift_mask = ~0;
c5059423
AM
10563 shift_mask <<= INTVAL (shiftop);
10564
b6d08ca1 10565 /* Find the least significant one bit. */
c5059423
AM
10566 lsb = c & -c;
10567
10568 /* It must coincide with the LSB of the shift mask. */
10569 if (-lsb != shift_mask)
10570 return 0;
e2c953b6 10571
c5059423
AM
10572 /* Invert to look for the next transition (if any). */
10573 c = ~c;
10574
10575 /* Remove the low group of ones (originally low group of zeros). */
10576 c &= -lsb;
10577
10578 /* Again find the lsb, and check we have all 1's above. */
10579 lsb = c & -c;
10580 return c == -lsb;
10581 }
10582 else if (GET_CODE (andop) == CONST_DOUBLE
10583 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10584 {
02071907
AM
10585 HOST_WIDE_INT low, high, lsb;
10586 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10587
10588 low = CONST_DOUBLE_LOW (andop);
10589 if (HOST_BITS_PER_WIDE_INT < 64)
10590 high = CONST_DOUBLE_HIGH (andop);
10591
10592 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10593 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10594 return 0;
10595
10596 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10597 {
02071907 10598 shift_mask_high = ~0;
c5059423
AM
10599 if (INTVAL (shiftop) > 32)
10600 shift_mask_high <<= INTVAL (shiftop) - 32;
10601
10602 lsb = high & -high;
10603
10604 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10605 return 0;
10606
10607 high = ~high;
10608 high &= -lsb;
10609
10610 lsb = high & -high;
10611 return high == -lsb;
10612 }
10613
02071907 10614 shift_mask_low = ~0;
c5059423
AM
10615 shift_mask_low <<= INTVAL (shiftop);
10616
10617 lsb = low & -low;
10618
10619 if (-lsb != shift_mask_low)
10620 return 0;
10621
10622 if (HOST_BITS_PER_WIDE_INT < 64)
10623 high = ~high;
10624 low = ~low;
10625 low &= -lsb;
10626
10627 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10628 {
10629 lsb = high & -high;
10630 return high == -lsb;
10631 }
10632
10633 lsb = low & -low;
10634 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10635 }
10636 else
10637 return 0;
10638}
e2c953b6 10639
c5059423
AM
10640/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10641 to perform a left shift. It must have SHIFTOP or more least
c1207243 10642 significant 0's, with the remainder of the word 1's. */
e2c953b6 10643
c5059423 10644int
a2369ed3 10645includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10646{
e2c953b6 10647 if (GET_CODE (andop) == CONST_INT)
c5059423 10648 {
02071907 10649 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10650
02071907 10651 shift_mask = ~0;
c5059423
AM
10652 shift_mask <<= INTVAL (shiftop);
10653 c = INTVAL (andop);
10654
c1207243 10655 /* Find the least significant one bit. */
c5059423
AM
10656 lsb = c & -c;
10657
10658 /* It must be covered by the shift mask.
a4f6c312 10659 This test also rejects c == 0. */
c5059423
AM
10660 if ((lsb & shift_mask) == 0)
10661 return 0;
10662
10663 /* Check we have all 1's above the transition, and reject all 1's. */
10664 return c == -lsb && lsb != 1;
10665 }
10666 else if (GET_CODE (andop) == CONST_DOUBLE
10667 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10668 {
02071907 10669 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10670
10671 low = CONST_DOUBLE_LOW (andop);
10672
10673 if (HOST_BITS_PER_WIDE_INT < 64)
10674 {
02071907 10675 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10676
10677 high = CONST_DOUBLE_HIGH (andop);
10678
10679 if (low == 0)
10680 {
02071907 10681 shift_mask_high = ~0;
c5059423
AM
10682 if (INTVAL (shiftop) > 32)
10683 shift_mask_high <<= INTVAL (shiftop) - 32;
10684
10685 lsb = high & -high;
10686
10687 if ((lsb & shift_mask_high) == 0)
10688 return 0;
10689
10690 return high == -lsb;
10691 }
10692 if (high != ~0)
10693 return 0;
10694 }
10695
02071907 10696 shift_mask_low = ~0;
c5059423
AM
10697 shift_mask_low <<= INTVAL (shiftop);
10698
10699 lsb = low & -low;
10700
10701 if ((lsb & shift_mask_low) == 0)
10702 return 0;
10703
10704 return low == -lsb && lsb != 1;
10705 }
e2c953b6 10706 else
c5059423 10707 return 0;
9878760c 10708}
35068b43 10709
11ac38b2
DE
10710/* Return 1 if operands will generate a valid arguments to rlwimi
10711instruction for insert with right shift in 64-bit mode. The mask may
10712not start on the first bit or stop on the last bit because wrap-around
10713effects of instruction do not correspond to semantics of RTL insn. */
10714
10715int
10716insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10717{
429ec7dc
DE
10718 if (INTVAL (startop) > 32
10719 && INTVAL (startop) < 64
10720 && INTVAL (sizeop) > 1
10721 && INTVAL (sizeop) + INTVAL (startop) < 64
10722 && INTVAL (shiftop) > 0
10723 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10724 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10725 return 1;
10726
10727 return 0;
10728}
10729
35068b43 10730/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10731 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10732
10733int
a2369ed3 10734registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10735{
10736 /* We might have been passed a SUBREG. */
f676971a 10737 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10738 return 0;
f676971a 10739
90f81f99
AP
10740 /* We might have been passed non floating point registers. */
10741 if (!FP_REGNO_P (REGNO (reg1))
10742 || !FP_REGNO_P (REGNO (reg2)))
10743 return 0;
35068b43
RK
10744
10745 return (REGNO (reg1) == REGNO (reg2) - 1);
10746}
10747
a4f6c312
SS
10748/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10749 addr1 and addr2 must be in consecutive memory locations
10750 (addr2 == addr1 + 8). */
35068b43
RK
10751
10752int
90f81f99 10753mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10754{
90f81f99 10755 rtx addr1, addr2;
bb8df8a6
EC
10756 unsigned int reg1, reg2;
10757 int offset1, offset2;
35068b43 10758
90f81f99
AP
10759 /* The mems cannot be volatile. */
10760 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10761 return 0;
f676971a 10762
90f81f99
AP
10763 addr1 = XEXP (mem1, 0);
10764 addr2 = XEXP (mem2, 0);
10765
35068b43
RK
10766 /* Extract an offset (if used) from the first addr. */
10767 if (GET_CODE (addr1) == PLUS)
10768 {
10769 /* If not a REG, return zero. */
10770 if (GET_CODE (XEXP (addr1, 0)) != REG)
10771 return 0;
10772 else
10773 {
c4ad648e 10774 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
10775 /* The offset must be constant! */
10776 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
10777 return 0;
10778 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
10779 }
10780 }
10781 else if (GET_CODE (addr1) != REG)
10782 return 0;
10783 else
10784 {
10785 reg1 = REGNO (addr1);
10786 /* This was a simple (mem (reg)) expression. Offset is 0. */
10787 offset1 = 0;
10788 }
10789
bb8df8a6
EC
10790 /* And now for the second addr. */
10791 if (GET_CODE (addr2) == PLUS)
10792 {
10793 /* If not a REG, return zero. */
10794 if (GET_CODE (XEXP (addr2, 0)) != REG)
10795 return 0;
10796 else
10797 {
10798 reg2 = REGNO (XEXP (addr2, 0));
10799 /* The offset must be constant. */
10800 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
10801 return 0;
10802 offset2 = INTVAL (XEXP (addr2, 1));
10803 }
10804 }
10805 else if (GET_CODE (addr2) != REG)
35068b43 10806 return 0;
bb8df8a6
EC
10807 else
10808 {
10809 reg2 = REGNO (addr2);
10810 /* This was a simple (mem (reg)) expression. Offset is 0. */
10811 offset2 = 0;
10812 }
35068b43 10813
bb8df8a6
EC
10814 /* Both of these must have the same base register. */
10815 if (reg1 != reg2)
35068b43
RK
10816 return 0;
10817
10818 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 10819 if (offset2 != offset1 + 8)
35068b43
RK
10820 return 0;
10821
10822 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
10823 instructions. */
10824 return 1;
10825}
9878760c
RK
10826\f
10827/* Return the register class of a scratch register needed to copy IN into
10828 or out of a register in CLASS in MODE. If it can be done directly,
10829 NO_REGS is returned. */
10830
10831enum reg_class
3c4774e0
R
10832rs6000_secondary_reload_class (enum reg_class class,
10833 enum machine_mode mode ATTRIBUTE_UNUSED,
10834 rtx in)
9878760c 10835{
5accd822 10836 int regno;
9878760c 10837
ab82a49f
AP
10838 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10839#if TARGET_MACHO
c4ad648e 10840 && MACHOPIC_INDIRECT
ab82a49f 10841#endif
c4ad648e 10842 ))
46fad5b7
DJ
10843 {
10844 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
10845 other than BASE_REGS for TARGET_ELF. So indicate that a
10846 register from BASE_REGS is needed as an intermediate
10847 register.
f676971a 10848
46fad5b7
DJ
10849 On Darwin, pic addresses require a load from memory, which
10850 needs a base register. */
10851 if (class != BASE_REGS
c4ad648e
AM
10852 && (GET_CODE (in) == SYMBOL_REF
10853 || GET_CODE (in) == HIGH
10854 || GET_CODE (in) == LABEL_REF
10855 || GET_CODE (in) == CONST))
10856 return BASE_REGS;
46fad5b7 10857 }
e7b7998a 10858
5accd822
DE
10859 if (GET_CODE (in) == REG)
10860 {
10861 regno = REGNO (in);
10862 if (regno >= FIRST_PSEUDO_REGISTER)
10863 {
10864 regno = true_regnum (in);
10865 if (regno >= FIRST_PSEUDO_REGISTER)
10866 regno = -1;
10867 }
10868 }
10869 else if (GET_CODE (in) == SUBREG)
10870 {
10871 regno = true_regnum (in);
10872 if (regno >= FIRST_PSEUDO_REGISTER)
10873 regno = -1;
10874 }
10875 else
10876 regno = -1;
10877
9878760c
RK
10878 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10879 into anything. */
10880 if (class == GENERAL_REGS || class == BASE_REGS
10881 || (regno >= 0 && INT_REGNO_P (regno)))
10882 return NO_REGS;
10883
10884 /* Constants, memory, and FP registers can go into FP registers. */
10885 if ((regno == -1 || FP_REGNO_P (regno))
10886 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10887 return NO_REGS;
10888
0ac081f6
AH
10889 /* Memory, and AltiVec registers can go into AltiVec registers. */
10890 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10891 && class == ALTIVEC_REGS)
10892 return NO_REGS;
10893
9878760c
RK
10894 /* We can copy among the CR registers. */
10895 if ((class == CR_REGS || class == CR0_REGS)
10896 && regno >= 0 && CR_REGNO_P (regno))
10897 return NO_REGS;
10898
10899 /* Otherwise, we need GENERAL_REGS. */
10900 return GENERAL_REGS;
10901}
10902\f
10903/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 10904 know this is a valid comparison.
9878760c
RK
10905
10906 SCC_P is 1 if this is for an scc. That means that %D will have been
10907 used instead of %C, so the bits will be in different places.
10908
b4ac57ab 10909 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
10910
10911int
a2369ed3 10912ccr_bit (rtx op, int scc_p)
9878760c
RK
10913{
10914 enum rtx_code code = GET_CODE (op);
10915 enum machine_mode cc_mode;
10916 int cc_regnum;
10917 int base_bit;
9ebbca7d 10918 rtx reg;
9878760c 10919
ec8e098d 10920 if (!COMPARISON_P (op))
9878760c
RK
10921 return -1;
10922
9ebbca7d
GK
10923 reg = XEXP (op, 0);
10924
37409796 10925 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
10926
10927 cc_mode = GET_MODE (reg);
10928 cc_regnum = REGNO (reg);
10929 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 10930
39a10a29 10931 validate_condition_mode (code, cc_mode);
c5defebb 10932
b7053a3f
GK
10933 /* When generating a sCOND operation, only positive conditions are
10934 allowed. */
37409796
NS
10935 gcc_assert (!scc_p
10936 || code == EQ || code == GT || code == LT || code == UNORDERED
10937 || code == GTU || code == LTU);
f676971a 10938
9878760c
RK
10939 switch (code)
10940 {
10941 case NE:
10942 return scc_p ? base_bit + 3 : base_bit + 2;
10943 case EQ:
10944 return base_bit + 2;
1c882ea4 10945 case GT: case GTU: case UNLE:
9878760c 10946 return base_bit + 1;
1c882ea4 10947 case LT: case LTU: case UNGE:
9878760c 10948 return base_bit;
1c882ea4
GK
10949 case ORDERED: case UNORDERED:
10950 return base_bit + 3;
9878760c
RK
10951
10952 case GE: case GEU:
39a10a29 10953 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
10954 unordered position. So test that bit. For integer, this is ! LT
10955 unless this is an scc insn. */
39a10a29 10956 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
10957
10958 case LE: case LEU:
39a10a29 10959 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 10960
9878760c 10961 default:
37409796 10962 gcc_unreachable ();
9878760c
RK
10963 }
10964}
1ff7789b 10965\f
8d30c4ee 10966/* Return the GOT register. */
1ff7789b 10967
9390387d 10968rtx
a2369ed3 10969rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 10970{
a4f6c312
SS
10971 /* The second flow pass currently (June 1999) can't update
10972 regs_ever_live without disturbing other parts of the compiler, so
10973 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
10974 if (!can_create_pseudo_p ()
10975 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 10976 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 10977
8d30c4ee 10978 current_function_uses_pic_offset_table = 1;
3cb999d8 10979
1ff7789b
MM
10980 return pic_offset_table_rtx;
10981}
a7df97e6 10982\f
e2500fed
GK
10983/* Function to init struct machine_function.
10984 This will be called, via a pointer variable,
10985 from push_function_context. */
a7df97e6 10986
e2500fed 10987static struct machine_function *
863d938c 10988rs6000_init_machine_status (void)
a7df97e6 10989{
e2500fed 10990 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 10991}
9878760c 10992\f
0ba1b2ff
AM
10993/* These macros test for integers and extract the low-order bits. */
10994#define INT_P(X) \
10995((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
10996 && GET_MODE (X) == VOIDmode)
10997
10998#define INT_LOWPART(X) \
10999 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11000
11001int
a2369ed3 11002extract_MB (rtx op)
0ba1b2ff
AM
11003{
11004 int i;
11005 unsigned long val = INT_LOWPART (op);
11006
11007 /* If the high bit is zero, the value is the first 1 bit we find
11008 from the left. */
11009 if ((val & 0x80000000) == 0)
11010 {
37409796 11011 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11012
11013 i = 1;
11014 while (((val <<= 1) & 0x80000000) == 0)
11015 ++i;
11016 return i;
11017 }
11018
11019 /* If the high bit is set and the low bit is not, or the mask is all
11020 1's, the value is zero. */
11021 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11022 return 0;
11023
11024 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11025 from the right. */
11026 i = 31;
11027 while (((val >>= 1) & 1) != 0)
11028 --i;
11029
11030 return i;
11031}
11032
11033int
a2369ed3 11034extract_ME (rtx op)
0ba1b2ff
AM
11035{
11036 int i;
11037 unsigned long val = INT_LOWPART (op);
11038
11039 /* If the low bit is zero, the value is the first 1 bit we find from
11040 the right. */
11041 if ((val & 1) == 0)
11042 {
37409796 11043 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11044
11045 i = 30;
11046 while (((val >>= 1) & 1) == 0)
11047 --i;
11048
11049 return i;
11050 }
11051
11052 /* If the low bit is set and the high bit is not, or the mask is all
11053 1's, the value is 31. */
11054 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11055 return 31;
11056
11057 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11058 from the left. */
11059 i = 0;
11060 while (((val <<= 1) & 0x80000000) != 0)
11061 ++i;
11062
11063 return i;
11064}
11065
c4501e62
JJ
11066/* Locate some local-dynamic symbol still in use by this function
11067 so that we can print its name in some tls_ld pattern. */
11068
11069static const char *
863d938c 11070rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11071{
11072 rtx insn;
11073
11074 if (cfun->machine->some_ld_name)
11075 return cfun->machine->some_ld_name;
11076
11077 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11078 if (INSN_P (insn)
11079 && for_each_rtx (&PATTERN (insn),
11080 rs6000_get_some_local_dynamic_name_1, 0))
11081 return cfun->machine->some_ld_name;
11082
37409796 11083 gcc_unreachable ();
c4501e62
JJ
11084}
11085
11086/* Helper function for rs6000_get_some_local_dynamic_name. */
11087
11088static int
a2369ed3 11089rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11090{
11091 rtx x = *px;
11092
11093 if (GET_CODE (x) == SYMBOL_REF)
11094 {
11095 const char *str = XSTR (x, 0);
11096 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11097 {
11098 cfun->machine->some_ld_name = str;
11099 return 1;
11100 }
11101 }
11102
11103 return 0;
11104}
11105
85b776df
AM
11106/* Write out a function code label. */
11107
11108void
11109rs6000_output_function_entry (FILE *file, const char *fname)
11110{
11111 if (fname[0] != '.')
11112 {
11113 switch (DEFAULT_ABI)
11114 {
11115 default:
37409796 11116 gcc_unreachable ();
85b776df
AM
11117
11118 case ABI_AIX:
11119 if (DOT_SYMBOLS)
11120 putc ('.', file);
11121 else
11122 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11123 break;
11124
11125 case ABI_V4:
11126 case ABI_DARWIN:
11127 break;
11128 }
11129 }
11130 if (TARGET_AIX)
11131 RS6000_OUTPUT_BASENAME (file, fname);
11132 else
11133 assemble_name (file, fname);
11134}
11135
9878760c
RK
11136/* Print an operand. Recognize special options, documented below. */
11137
38c1f2d7 11138#if TARGET_ELF
d9407988 11139#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11140#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11141#else
11142#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11143#define SMALL_DATA_REG 0
ba5e43aa
MM
11144#endif
11145
9878760c 11146void
a2369ed3 11147print_operand (FILE *file, rtx x, int code)
9878760c
RK
11148{
11149 int i;
a260abc9 11150 HOST_WIDE_INT val;
0ba1b2ff 11151 unsigned HOST_WIDE_INT uval;
9878760c
RK
11152
11153 switch (code)
11154 {
a8b3aeda 11155 case '.':
a85d226b
RK
11156 /* Write out an instruction after the call which may be replaced
11157 with glue code by the loader. This depends on the AIX version. */
11158 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11159 return;
11160
81eace42
GK
11161 /* %a is output_address. */
11162
9854d9ed
RK
11163 case 'A':
11164 /* If X is a constant integer whose low-order 5 bits are zero,
11165 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11166 in the AIX assembler where "sri" with a zero shift count
20e26713 11167 writes a trash instruction. */
9854d9ed 11168 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11169 putc ('l', file);
9854d9ed 11170 else
76229ac8 11171 putc ('r', file);
9854d9ed
RK
11172 return;
11173
11174 case 'b':
e2c953b6
DE
11175 /* If constant, low-order 16 bits of constant, unsigned.
11176 Otherwise, write normally. */
11177 if (INT_P (x))
11178 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11179 else
11180 print_operand (file, x, 0);
cad12a8d
RK
11181 return;
11182
a260abc9
DE
11183 case 'B':
11184 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11185 for 64-bit mask direction. */
9390387d 11186 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11187 return;
a260abc9 11188
81eace42
GK
11189 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11190 output_operand. */
11191
423c1189
AH
11192 case 'c':
11193 /* X is a CR register. Print the number of the GT bit of the CR. */
11194 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11195 output_operand_lossage ("invalid %%E value");
11196 else
11197 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11198 return;
11199
11200 case 'D':
cef6b86c 11201 /* Like 'J' but get to the GT bit only. */
37409796 11202 gcc_assert (GET_CODE (x) == REG);
423c1189 11203
cef6b86c
EB
11204 /* Bit 1 is GT bit. */
11205 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11206
cef6b86c
EB
11207 /* Add one for shift count in rlinm for scc. */
11208 fprintf (file, "%d", i + 1);
423c1189
AH
11209 return;
11210
9854d9ed 11211 case 'E':
39a10a29 11212 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11213 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11214 output_operand_lossage ("invalid %%E value");
78fbdbf7 11215 else
39a10a29 11216 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11217 return;
9854d9ed
RK
11218
11219 case 'f':
11220 /* X is a CR register. Print the shift count needed to move it
11221 to the high-order four bits. */
11222 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11223 output_operand_lossage ("invalid %%f value");
11224 else
9ebbca7d 11225 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11226 return;
11227
11228 case 'F':
11229 /* Similar, but print the count for the rotate in the opposite
11230 direction. */
11231 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11232 output_operand_lossage ("invalid %%F value");
11233 else
9ebbca7d 11234 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11235 return;
11236
11237 case 'G':
11238 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11239 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11240 if (GET_CODE (x) != CONST_INT)
11241 output_operand_lossage ("invalid %%G value");
11242 else if (INTVAL (x) >= 0)
76229ac8 11243 putc ('z', file);
9854d9ed 11244 else
76229ac8 11245 putc ('m', file);
9854d9ed 11246 return;
e2c953b6 11247
9878760c 11248 case 'h':
a4f6c312
SS
11249 /* If constant, output low-order five bits. Otherwise, write
11250 normally. */
9878760c 11251 if (INT_P (x))
5f59ecb7 11252 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11253 else
11254 print_operand (file, x, 0);
11255 return;
11256
64305719 11257 case 'H':
a4f6c312
SS
11258 /* If constant, output low-order six bits. Otherwise, write
11259 normally. */
64305719 11260 if (INT_P (x))
5f59ecb7 11261 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11262 else
11263 print_operand (file, x, 0);
11264 return;
11265
9854d9ed
RK
11266 case 'I':
11267 /* Print `i' if this is a constant, else nothing. */
9878760c 11268 if (INT_P (x))
76229ac8 11269 putc ('i', file);
9878760c
RK
11270 return;
11271
9854d9ed
RK
11272 case 'j':
11273 /* Write the bit number in CCR for jump. */
11274 i = ccr_bit (x, 0);
11275 if (i == -1)
11276 output_operand_lossage ("invalid %%j code");
9878760c 11277 else
9854d9ed 11278 fprintf (file, "%d", i);
9878760c
RK
11279 return;
11280
9854d9ed
RK
11281 case 'J':
11282 /* Similar, but add one for shift count in rlinm for scc and pass
11283 scc flag to `ccr_bit'. */
11284 i = ccr_bit (x, 1);
11285 if (i == -1)
11286 output_operand_lossage ("invalid %%J code");
11287 else
a0466a68
RK
11288 /* If we want bit 31, write a shift count of zero, not 32. */
11289 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11290 return;
11291
9854d9ed
RK
11292 case 'k':
11293 /* X must be a constant. Write the 1's complement of the
11294 constant. */
9878760c 11295 if (! INT_P (x))
9854d9ed 11296 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11297 else
11298 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11299 return;
11300
81eace42 11301 case 'K':
9ebbca7d
GK
11302 /* X must be a symbolic constant on ELF. Write an
11303 expression suitable for an 'addi' that adds in the low 16
11304 bits of the MEM. */
11305 if (GET_CODE (x) != CONST)
11306 {
11307 print_operand_address (file, x);
11308 fputs ("@l", file);
11309 }
11310 else
11311 {
11312 if (GET_CODE (XEXP (x, 0)) != PLUS
11313 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11314 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11315 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11316 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11317 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11318 fputs ("@l", file);
ed8d2920
MM
11319 /* For GNU as, there must be a non-alphanumeric character
11320 between 'l' and the number. The '-' is added by
11321 print_operand() already. */
11322 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11323 fputs ("+", file);
9ebbca7d
GK
11324 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11325 }
81eace42
GK
11326 return;
11327
11328 /* %l is output_asm_label. */
9ebbca7d 11329
9854d9ed
RK
11330 case 'L':
11331 /* Write second word of DImode or DFmode reference. Works on register
11332 or non-indexed memory only. */
11333 if (GET_CODE (x) == REG)
fb5c67a7 11334 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11335 else if (GET_CODE (x) == MEM)
11336 {
11337 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11338 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11339 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11340 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11341 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11342 UNITS_PER_WORD));
6fb5fa3c
DB
11343 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11344 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11345 UNITS_PER_WORD));
9854d9ed 11346 else
d7624dc0
RK
11347 output_address (XEXP (adjust_address_nv (x, SImode,
11348 UNITS_PER_WORD),
11349 0));
ed8908e7 11350
ba5e43aa 11351 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11352 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11353 reg_names[SMALL_DATA_REG]);
9854d9ed 11354 }
9878760c 11355 return;
f676971a 11356
9878760c
RK
11357 case 'm':
11358 /* MB value for a mask operand. */
b1765bde 11359 if (! mask_operand (x, SImode))
9878760c
RK
11360 output_operand_lossage ("invalid %%m value");
11361
0ba1b2ff 11362 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11363 return;
11364
11365 case 'M':
11366 /* ME value for a mask operand. */
b1765bde 11367 if (! mask_operand (x, SImode))
a260abc9 11368 output_operand_lossage ("invalid %%M value");
9878760c 11369
0ba1b2ff 11370 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11371 return;
11372
81eace42
GK
11373 /* %n outputs the negative of its operand. */
11374
9878760c
RK
11375 case 'N':
11376 /* Write the number of elements in the vector times 4. */
11377 if (GET_CODE (x) != PARALLEL)
11378 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11379 else
11380 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11381 return;
11382
11383 case 'O':
11384 /* Similar, but subtract 1 first. */
11385 if (GET_CODE (x) != PARALLEL)
1427100a 11386 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11387 else
11388 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11389 return;
11390
9854d9ed
RK
11391 case 'p':
11392 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11393 if (! INT_P (x)
2bfcf297 11394 || INT_LOWPART (x) < 0
9854d9ed
RK
11395 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11396 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11397 else
11398 fprintf (file, "%d", i);
9854d9ed
RK
11399 return;
11400
9878760c
RK
11401 case 'P':
11402 /* The operand must be an indirect memory reference. The result
8bb418a3 11403 is the register name. */
9878760c
RK
11404 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11405 || REGNO (XEXP (x, 0)) >= 32)
11406 output_operand_lossage ("invalid %%P value");
e2c953b6 11407 else
fb5c67a7 11408 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11409 return;
11410
dfbdccdb
GK
11411 case 'q':
11412 /* This outputs the logical code corresponding to a boolean
11413 expression. The expression may have one or both operands
39a10a29 11414 negated (if one, only the first one). For condition register
c4ad648e
AM
11415 logical operations, it will also treat the negated
11416 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11417 {
63bc1d05 11418 const char *const *t = 0;
dfbdccdb
GK
11419 const char *s;
11420 enum rtx_code code = GET_CODE (x);
11421 static const char * const tbl[3][3] = {
11422 { "and", "andc", "nor" },
11423 { "or", "orc", "nand" },
11424 { "xor", "eqv", "xor" } };
11425
11426 if (code == AND)
11427 t = tbl[0];
11428 else if (code == IOR)
11429 t = tbl[1];
11430 else if (code == XOR)
11431 t = tbl[2];
11432 else
11433 output_operand_lossage ("invalid %%q value");
11434
11435 if (GET_CODE (XEXP (x, 0)) != NOT)
11436 s = t[0];
11437 else
11438 {
11439 if (GET_CODE (XEXP (x, 1)) == NOT)
11440 s = t[2];
11441 else
11442 s = t[1];
11443 }
f676971a 11444
dfbdccdb
GK
11445 fputs (s, file);
11446 }
11447 return;
11448
2c4a9cff
DE
11449 case 'Q':
11450 if (TARGET_MFCRF)
3b6ce0af 11451 fputc (',', file);
5efb1046 11452 /* FALLTHRU */
2c4a9cff
DE
11453 else
11454 return;
11455
9854d9ed
RK
11456 case 'R':
11457 /* X is a CR register. Print the mask for `mtcrf'. */
11458 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11459 output_operand_lossage ("invalid %%R value");
11460 else
9ebbca7d 11461 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11462 return;
9854d9ed
RK
11463
11464 case 's':
11465 /* Low 5 bits of 32 - value */
11466 if (! INT_P (x))
11467 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11468 else
11469 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11470 return;
9854d9ed 11471
a260abc9 11472 case 'S':
0ba1b2ff 11473 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11474 CONST_INT 32-bit mask is considered sign-extended so any
11475 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11476 if (! mask64_operand (x, DImode))
a260abc9
DE
11477 output_operand_lossage ("invalid %%S value");
11478
0ba1b2ff 11479 uval = INT_LOWPART (x);
a260abc9 11480
0ba1b2ff 11481 if (uval & 1) /* Clear Left */
a260abc9 11482 {
f099d360
GK
11483#if HOST_BITS_PER_WIDE_INT > 64
11484 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11485#endif
0ba1b2ff 11486 i = 64;
a260abc9 11487 }
0ba1b2ff 11488 else /* Clear Right */
a260abc9 11489 {
0ba1b2ff 11490 uval = ~uval;
f099d360
GK
11491#if HOST_BITS_PER_WIDE_INT > 64
11492 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11493#endif
0ba1b2ff 11494 i = 63;
a260abc9 11495 }
0ba1b2ff
AM
11496 while (uval != 0)
11497 --i, uval >>= 1;
37409796 11498 gcc_assert (i >= 0);
0ba1b2ff
AM
11499 fprintf (file, "%d", i);
11500 return;
a260abc9 11501
a3170dc6
AH
11502 case 't':
11503 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11504 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11505
11506 /* Bit 3 is OV bit. */
11507 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11508
11509 /* If we want bit 31, write a shift count of zero, not 32. */
11510 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11511 return;
11512
cccf3bdc
DE
11513 case 'T':
11514 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11515 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11516 && REGNO (x) != CTR_REGNO))
cccf3bdc 11517 output_operand_lossage ("invalid %%T value");
1de43f85 11518 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11519 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11520 else
11521 fputs ("ctr", file);
11522 return;
11523
9854d9ed 11524 case 'u':
802a0058 11525 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11526 if (! INT_P (x))
11527 output_operand_lossage ("invalid %%u value");
e2c953b6 11528 else
f676971a 11529 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11530 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11531 return;
11532
802a0058
MM
11533 case 'v':
11534 /* High-order 16 bits of constant for use in signed operand. */
11535 if (! INT_P (x))
11536 output_operand_lossage ("invalid %%v value");
e2c953b6 11537 else
134c32f6
DE
11538 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11539 (INT_LOWPART (x) >> 16) & 0xffff);
11540 return;
802a0058 11541
9854d9ed
RK
11542 case 'U':
11543 /* Print `u' if this has an auto-increment or auto-decrement. */
11544 if (GET_CODE (x) == MEM
11545 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11546 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11547 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11548 putc ('u', file);
9854d9ed 11549 return;
9878760c 11550
e0cd0770
JC
11551 case 'V':
11552 /* Print the trap code for this operand. */
11553 switch (GET_CODE (x))
11554 {
11555 case EQ:
11556 fputs ("eq", file); /* 4 */
11557 break;
11558 case NE:
11559 fputs ("ne", file); /* 24 */
11560 break;
11561 case LT:
11562 fputs ("lt", file); /* 16 */
11563 break;
11564 case LE:
11565 fputs ("le", file); /* 20 */
11566 break;
11567 case GT:
11568 fputs ("gt", file); /* 8 */
11569 break;
11570 case GE:
11571 fputs ("ge", file); /* 12 */
11572 break;
11573 case LTU:
11574 fputs ("llt", file); /* 2 */
11575 break;
11576 case LEU:
11577 fputs ("lle", file); /* 6 */
11578 break;
11579 case GTU:
11580 fputs ("lgt", file); /* 1 */
11581 break;
11582 case GEU:
11583 fputs ("lge", file); /* 5 */
11584 break;
11585 default:
37409796 11586 gcc_unreachable ();
e0cd0770
JC
11587 }
11588 break;
11589
9854d9ed
RK
11590 case 'w':
11591 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11592 normally. */
11593 if (INT_P (x))
f676971a 11594 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11595 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11596 else
11597 print_operand (file, x, 0);
9878760c
RK
11598 return;
11599
9854d9ed 11600 case 'W':
e2c953b6 11601 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11602 val = (GET_CODE (x) == CONST_INT
11603 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11604
11605 if (val < 0)
11606 i = -1;
9854d9ed 11607 else
e2c953b6
DE
11608 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11609 if ((val <<= 1) < 0)
11610 break;
11611
11612#if HOST_BITS_PER_WIDE_INT == 32
11613 if (GET_CODE (x) == CONST_INT && i >= 0)
11614 i += 32; /* zero-extend high-part was all 0's */
11615 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11616 {
11617 val = CONST_DOUBLE_LOW (x);
11618
37409796
NS
11619 gcc_assert (val);
11620 if (val < 0)
e2c953b6
DE
11621 --i;
11622 else
11623 for ( ; i < 64; i++)
11624 if ((val <<= 1) < 0)
11625 break;
11626 }
11627#endif
11628
11629 fprintf (file, "%d", i + 1);
9854d9ed 11630 return;
9878760c 11631
9854d9ed
RK
11632 case 'X':
11633 if (GET_CODE (x) == MEM
6fb5fa3c
DB
11634 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
11635 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
11636 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 11637 putc ('x', file);
9854d9ed 11638 return;
9878760c 11639
9854d9ed
RK
11640 case 'Y':
11641 /* Like 'L', for third word of TImode */
11642 if (GET_CODE (x) == REG)
fb5c67a7 11643 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11644 else if (GET_CODE (x) == MEM)
9878760c 11645 {
9854d9ed
RK
11646 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11647 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11648 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
11649 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11650 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11651 else
d7624dc0 11652 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11653 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11654 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11655 reg_names[SMALL_DATA_REG]);
9878760c
RK
11656 }
11657 return;
f676971a 11658
9878760c 11659 case 'z':
b4ac57ab
RS
11660 /* X is a SYMBOL_REF. Write out the name preceded by a
11661 period and without any trailing data in brackets. Used for function
4d30c363
MM
11662 names. If we are configured for System V (or the embedded ABI) on
11663 the PowerPC, do not emit the period, since those systems do not use
11664 TOCs and the like. */
37409796 11665 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11666
c4ad648e
AM
11667 /* Mark the decl as referenced so that cgraph will output the
11668 function. */
9bf6462a 11669 if (SYMBOL_REF_DECL (x))
c4ad648e 11670 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11671
85b776df 11672 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11673 if (TARGET_MACHO)
11674 {
11675 const char *name = XSTR (x, 0);
a031e781 11676#if TARGET_MACHO
3b48085e 11677 if (MACHOPIC_INDIRECT
11abc112
MM
11678 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11679 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11680#endif
11681 assemble_name (file, name);
11682 }
85b776df 11683 else if (!DOT_SYMBOLS)
9739c90c 11684 assemble_name (file, XSTR (x, 0));
85b776df
AM
11685 else
11686 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11687 return;
11688
9854d9ed
RK
11689 case 'Z':
11690 /* Like 'L', for last word of TImode. */
11691 if (GET_CODE (x) == REG)
fb5c67a7 11692 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11693 else if (GET_CODE (x) == MEM)
11694 {
11695 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11696 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11697 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
11698 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11699 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11700 else
d7624dc0 11701 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11702 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11703 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11704 reg_names[SMALL_DATA_REG]);
9854d9ed 11705 }
5c23c401 11706 return;
0ac081f6 11707
a3170dc6 11708 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11709 case 'y':
11710 {
11711 rtx tmp;
11712
37409796 11713 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11714
11715 tmp = XEXP (x, 0);
11716
90d3ff1c 11717 /* Ugly hack because %y is overloaded. */
8ef65e3d 11718 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11719 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11720 || GET_MODE (x) == TFmode
11721 || GET_MODE (x) == TImode))
a3170dc6
AH
11722 {
11723 /* Handle [reg]. */
11724 if (GET_CODE (tmp) == REG)
11725 {
11726 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11727 break;
11728 }
11729 /* Handle [reg+UIMM]. */
11730 else if (GET_CODE (tmp) == PLUS &&
11731 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11732 {
11733 int x;
11734
37409796 11735 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11736
11737 x = INTVAL (XEXP (tmp, 1));
11738 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11739 break;
11740 }
11741
11742 /* Fall through. Must be [reg+reg]. */
11743 }
850e8d3d
DN
11744 if (TARGET_ALTIVEC
11745 && GET_CODE (tmp) == AND
11746 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11747 && INTVAL (XEXP (tmp, 1)) == -16)
11748 tmp = XEXP (tmp, 0);
0ac081f6 11749 if (GET_CODE (tmp) == REG)
c62f2db5 11750 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11751 else
0ac081f6 11752 {
37409796 11753 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11754 && REG_P (XEXP (tmp, 0))
11755 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11756
0ac081f6
AH
11757 if (REGNO (XEXP (tmp, 0)) == 0)
11758 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11759 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11760 else
11761 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11762 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11763 }
0ac081f6
AH
11764 break;
11765 }
f676971a 11766
9878760c
RK
11767 case 0:
11768 if (GET_CODE (x) == REG)
11769 fprintf (file, "%s", reg_names[REGNO (x)]);
11770 else if (GET_CODE (x) == MEM)
11771 {
11772 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11773 know the width from the mode. */
11774 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
11775 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
11776 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 11777 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
11778 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
11779 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
11780 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11781 output_address (XEXP (XEXP (x, 0), 1));
9878760c 11782 else
a54d04b7 11783 output_address (XEXP (x, 0));
9878760c
RK
11784 }
11785 else
a54d04b7 11786 output_addr_const (file, x);
a85d226b 11787 return;
9878760c 11788
c4501e62
JJ
11789 case '&':
11790 assemble_name (file, rs6000_get_some_local_dynamic_name ());
11791 return;
11792
9878760c
RK
11793 default:
11794 output_operand_lossage ("invalid %%xn code");
11795 }
11796}
11797\f
11798/* Print the address of an operand. */
11799
11800void
a2369ed3 11801print_operand_address (FILE *file, rtx x)
9878760c
RK
11802{
11803 if (GET_CODE (x) == REG)
4697a36c 11804 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
11805 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
11806 || GET_CODE (x) == LABEL_REF)
9878760c
RK
11807 {
11808 output_addr_const (file, x);
ba5e43aa 11809 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11810 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11811 reg_names[SMALL_DATA_REG]);
37409796
NS
11812 else
11813 gcc_assert (!TARGET_TOC);
9878760c
RK
11814 }
11815 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
11816 {
9024f4b8 11817 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 11818 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
11819 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
11820 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 11821 else
4697a36c
MM
11822 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
11823 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
11824 }
11825 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
11826 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
11827 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
11828#if TARGET_ELF
11829 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11830 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
11831 {
11832 output_addr_const (file, XEXP (x, 1));
11833 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11834 }
c859cda6
DJ
11835#endif
11836#if TARGET_MACHO
11837 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11838 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
11839 {
11840 fprintf (file, "lo16(");
11841 output_addr_const (file, XEXP (x, 1));
11842 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11843 }
3cb999d8 11844#endif
4d588c14 11845 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 11846 {
2bfcf297 11847 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 11848 {
2bfcf297
DB
11849 rtx contains_minus = XEXP (x, 1);
11850 rtx minus, symref;
11851 const char *name;
f676971a 11852
9ebbca7d 11853 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 11854 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
11855 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11856 contains_minus = XEXP (contains_minus, 0);
11857
2bfcf297
DB
11858 minus = XEXP (contains_minus, 0);
11859 symref = XEXP (minus, 0);
11860 XEXP (contains_minus, 0) = symref;
11861 if (TARGET_ELF)
11862 {
11863 char *newname;
11864
11865 name = XSTR (symref, 0);
11866 newname = alloca (strlen (name) + sizeof ("@toc"));
11867 strcpy (newname, name);
11868 strcat (newname, "@toc");
11869 XSTR (symref, 0) = newname;
11870 }
11871 output_addr_const (file, XEXP (x, 1));
11872 if (TARGET_ELF)
11873 XSTR (symref, 0) = name;
9ebbca7d
GK
11874 XEXP (contains_minus, 0) = minus;
11875 }
11876 else
11877 output_addr_const (file, XEXP (x, 1));
11878
11879 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11880 }
9878760c 11881 else
37409796 11882 gcc_unreachable ();
9878760c
RK
11883}
11884\f
88cad84b 11885/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
11886 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11887 is defined. It also needs to handle DI-mode objects on 64-bit
11888 targets. */
11889
11890static bool
a2369ed3 11891rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 11892{
f4f4921e 11893#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 11894 /* Special handling for SI values. */
84dcde01 11895 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 11896 {
301d03af 11897 static int recurse = 0;
f676971a 11898
301d03af
RS
11899 /* For -mrelocatable, we mark all addresses that need to be fixed up
11900 in the .fixup section. */
11901 if (TARGET_RELOCATABLE
d6b5193b
RS
11902 && in_section != toc_section
11903 && in_section != text_section
4325ca90 11904 && !unlikely_text_section_p (in_section)
301d03af
RS
11905 && !recurse
11906 && GET_CODE (x) != CONST_INT
11907 && GET_CODE (x) != CONST_DOUBLE
11908 && CONSTANT_P (x))
11909 {
11910 char buf[256];
11911
11912 recurse = 1;
11913 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11914 fixuplabelno++;
11915 ASM_OUTPUT_LABEL (asm_out_file, buf);
11916 fprintf (asm_out_file, "\t.long\t(");
11917 output_addr_const (asm_out_file, x);
11918 fprintf (asm_out_file, ")@fixup\n");
11919 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11920 ASM_OUTPUT_ALIGN (asm_out_file, 2);
11921 fprintf (asm_out_file, "\t.long\t");
11922 assemble_name (asm_out_file, buf);
11923 fprintf (asm_out_file, "\n\t.previous\n");
11924 recurse = 0;
11925 return true;
11926 }
11927 /* Remove initial .'s to turn a -mcall-aixdesc function
11928 address into the address of the descriptor, not the function
11929 itself. */
11930 else if (GET_CODE (x) == SYMBOL_REF
11931 && XSTR (x, 0)[0] == '.'
11932 && DEFAULT_ABI == ABI_AIX)
11933 {
11934 const char *name = XSTR (x, 0);
11935 while (*name == '.')
11936 name++;
11937
11938 fprintf (asm_out_file, "\t.long\t%s\n", name);
11939 return true;
11940 }
11941 }
f4f4921e 11942#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
11943 return default_assemble_integer (x, size, aligned_p);
11944}
93638d7a
AM
11945
11946#ifdef HAVE_GAS_HIDDEN
11947/* Emit an assembler directive to set symbol visibility for DECL to
11948 VISIBILITY_TYPE. */
11949
5add3202 11950static void
a2369ed3 11951rs6000_assemble_visibility (tree decl, int vis)
93638d7a 11952{
93638d7a
AM
11953 /* Functions need to have their entry point symbol visibility set as
11954 well as their descriptor symbol visibility. */
85b776df
AM
11955 if (DEFAULT_ABI == ABI_AIX
11956 && DOT_SYMBOLS
11957 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 11958 {
25fdb4dc 11959 static const char * const visibility_types[] = {
c4ad648e 11960 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
11961 };
11962
11963 const char *name, *type;
93638d7a
AM
11964
11965 name = ((* targetm.strip_name_encoding)
11966 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 11967 type = visibility_types[vis];
93638d7a 11968
25fdb4dc
RH
11969 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11970 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 11971 }
25fdb4dc
RH
11972 else
11973 default_assemble_visibility (decl, vis);
93638d7a
AM
11974}
11975#endif
301d03af 11976\f
39a10a29 11977enum rtx_code
a2369ed3 11978rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
11979{
11980 /* Reversal of FP compares takes care -- an ordered compare
11981 becomes an unordered compare and vice versa. */
f676971a 11982 if (mode == CCFPmode
bc9ec0e0
GK
11983 && (!flag_finite_math_only
11984 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11985 || code == UNEQ || code == LTGT))
bab6226b 11986 return reverse_condition_maybe_unordered (code);
39a10a29 11987 else
bab6226b 11988 return reverse_condition (code);
39a10a29
GK
11989}
11990
39a10a29
GK
11991/* Generate a compare for CODE. Return a brand-new rtx that
11992 represents the result of the compare. */
a4f6c312 11993
39a10a29 11994static rtx
a2369ed3 11995rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
11996{
11997 enum machine_mode comp_mode;
11998 rtx compare_result;
11999
12000 if (rs6000_compare_fp_p)
12001 comp_mode = CCFPmode;
12002 else if (code == GTU || code == LTU
c4ad648e 12003 || code == GEU || code == LEU)
39a10a29 12004 comp_mode = CCUNSmode;
60934f9c
NS
12005 else if ((code == EQ || code == NE)
12006 && GET_CODE (rs6000_compare_op0) == SUBREG
12007 && GET_CODE (rs6000_compare_op1) == SUBREG
12008 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12009 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12010 /* These are unsigned values, perhaps there will be a later
12011 ordering compare that can be shared with this one.
12012 Unfortunately we cannot detect the signedness of the operands
12013 for non-subregs. */
12014 comp_mode = CCUNSmode;
39a10a29
GK
12015 else
12016 comp_mode = CCmode;
12017
12018 /* First, the compare. */
12019 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12020
cef6b86c 12021 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12022 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12023 && rs6000_compare_fp_p)
a3170dc6 12024 {
64022b5d 12025 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12026 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12027
12028 if (op_mode == VOIDmode)
12029 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12030
cef6b86c
EB
12031 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12032 This explains the following mess. */
423c1189 12033
a3170dc6
AH
12034 switch (code)
12035 {
423c1189 12036 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12037 switch (op_mode)
12038 {
12039 case SFmode:
12040 cmp = flag_unsafe_math_optimizations
12041 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12042 rs6000_compare_op1)
12043 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12044 rs6000_compare_op1);
12045 break;
12046
12047 case DFmode:
12048 cmp = flag_unsafe_math_optimizations
12049 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12050 rs6000_compare_op1)
12051 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12052 rs6000_compare_op1);
12053 break;
12054
17caeff2
JM
12055 case TFmode:
12056 cmp = flag_unsafe_math_optimizations
12057 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12058 rs6000_compare_op1)
12059 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12060 rs6000_compare_op1);
12061 break;
12062
37409796
NS
12063 default:
12064 gcc_unreachable ();
12065 }
a3170dc6 12066 break;
bb8df8a6 12067
423c1189 12068 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12069 switch (op_mode)
12070 {
12071 case SFmode:
12072 cmp = flag_unsafe_math_optimizations
12073 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12074 rs6000_compare_op1)
12075 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12076 rs6000_compare_op1);
12077 break;
bb8df8a6 12078
37409796
NS
12079 case DFmode:
12080 cmp = flag_unsafe_math_optimizations
12081 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12082 rs6000_compare_op1)
12083 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12084 rs6000_compare_op1);
12085 break;
12086
17caeff2
JM
12087 case TFmode:
12088 cmp = flag_unsafe_math_optimizations
12089 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12090 rs6000_compare_op1)
12091 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12092 rs6000_compare_op1);
12093 break;
12094
37409796
NS
12095 default:
12096 gcc_unreachable ();
12097 }
a3170dc6 12098 break;
bb8df8a6 12099
423c1189 12100 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12101 switch (op_mode)
12102 {
12103 case SFmode:
12104 cmp = flag_unsafe_math_optimizations
12105 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12106 rs6000_compare_op1)
12107 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12108 rs6000_compare_op1);
12109 break;
bb8df8a6 12110
37409796
NS
12111 case DFmode:
12112 cmp = flag_unsafe_math_optimizations
12113 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12114 rs6000_compare_op1)
12115 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12116 rs6000_compare_op1);
12117 break;
12118
17caeff2
JM
12119 case TFmode:
12120 cmp = flag_unsafe_math_optimizations
12121 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12122 rs6000_compare_op1)
12123 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12124 rs6000_compare_op1);
12125 break;
12126
37409796
NS
12127 default:
12128 gcc_unreachable ();
12129 }
a3170dc6 12130 break;
4d4cbc0e 12131 default:
37409796 12132 gcc_unreachable ();
a3170dc6
AH
12133 }
12134
12135 /* Synthesize LE and GE from LT/GT || EQ. */
12136 if (code == LE || code == GE || code == LEU || code == GEU)
12137 {
a3170dc6
AH
12138 emit_insn (cmp);
12139
12140 switch (code)
12141 {
12142 case LE: code = LT; break;
12143 case GE: code = GT; break;
12144 case LEU: code = LT; break;
12145 case GEU: code = GT; break;
37409796 12146 default: gcc_unreachable ();
a3170dc6
AH
12147 }
12148
a3170dc6
AH
12149 compare_result2 = gen_reg_rtx (CCFPmode);
12150
12151 /* Do the EQ. */
37409796
NS
12152 switch (op_mode)
12153 {
12154 case SFmode:
12155 cmp = flag_unsafe_math_optimizations
12156 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12157 rs6000_compare_op1)
12158 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12159 rs6000_compare_op1);
12160 break;
12161
12162 case DFmode:
12163 cmp = flag_unsafe_math_optimizations
12164 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12165 rs6000_compare_op1)
12166 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12167 rs6000_compare_op1);
12168 break;
12169
17caeff2
JM
12170 case TFmode:
12171 cmp = flag_unsafe_math_optimizations
12172 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12173 rs6000_compare_op1)
12174 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12175 rs6000_compare_op1);
12176 break;
12177
37409796
NS
12178 default:
12179 gcc_unreachable ();
12180 }
a3170dc6
AH
12181 emit_insn (cmp);
12182
a3170dc6 12183 /* OR them together. */
64022b5d
AH
12184 or_result = gen_reg_rtx (CCFPmode);
12185 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12186 compare_result2);
a3170dc6
AH
12187 compare_result = or_result;
12188 code = EQ;
12189 }
12190 else
12191 {
a3170dc6 12192 if (code == NE || code == LTGT)
a3170dc6 12193 code = NE;
423c1189
AH
12194 else
12195 code = EQ;
a3170dc6
AH
12196 }
12197
12198 emit_insn (cmp);
12199 }
12200 else
de17c25f
DE
12201 {
12202 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12203 CLOBBERs to match cmptf_internal2 pattern. */
12204 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12205 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12206 && !TARGET_IEEEQUAD
de17c25f
DE
12207 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12208 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12209 gen_rtvec (9,
12210 gen_rtx_SET (VOIDmode,
12211 compare_result,
12212 gen_rtx_COMPARE (comp_mode,
12213 rs6000_compare_op0,
12214 rs6000_compare_op1)),
12215 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12216 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12217 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12218 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12219 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12220 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12221 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12222 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12223 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12224 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12225 {
12226 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12227 comp_mode = CCEQmode;
12228 compare_result = gen_reg_rtx (CCEQmode);
12229 if (TARGET_64BIT)
12230 emit_insn (gen_stack_protect_testdi (compare_result,
12231 rs6000_compare_op0, op1));
12232 else
12233 emit_insn (gen_stack_protect_testsi (compare_result,
12234 rs6000_compare_op0, op1));
12235 }
de17c25f
DE
12236 else
12237 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12238 gen_rtx_COMPARE (comp_mode,
12239 rs6000_compare_op0,
12240 rs6000_compare_op1)));
12241 }
f676971a 12242
ca5adc63 12243 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12244 under flag_finite_math_only we don't bother. */
39a10a29 12245 if (rs6000_compare_fp_p
e7108df9 12246 && !flag_finite_math_only
8ef65e3d 12247 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12248 && (code == LE || code == GE
12249 || code == UNEQ || code == LTGT
12250 || code == UNGT || code == UNLT))
12251 {
12252 enum rtx_code or1, or2;
12253 rtx or1_rtx, or2_rtx, compare2_rtx;
12254 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12255
39a10a29
GK
12256 switch (code)
12257 {
12258 case LE: or1 = LT; or2 = EQ; break;
12259 case GE: or1 = GT; or2 = EQ; break;
12260 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12261 case LTGT: or1 = LT; or2 = GT; break;
12262 case UNGT: or1 = UNORDERED; or2 = GT; break;
12263 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12264 default: gcc_unreachable ();
39a10a29
GK
12265 }
12266 validate_condition_mode (or1, comp_mode);
12267 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12268 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12269 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12270 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12271 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12272 const_true_rtx);
12273 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12274
12275 compare_result = or_result;
12276 code = EQ;
12277 }
12278
12279 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12280
1c563bed 12281 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12282}
12283
12284
12285/* Emit the RTL for an sCOND pattern. */
12286
12287void
a2369ed3 12288rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12289{
12290 rtx condition_rtx;
12291 enum machine_mode op_mode;
b7053a3f 12292 enum rtx_code cond_code;
39a10a29
GK
12293
12294 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12295 cond_code = GET_CODE (condition_rtx);
12296
8ef65e3d 12297 if (rs6000_compare_fp_p
423c1189
AH
12298 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12299 {
12300 rtx t;
12301
12302 PUT_MODE (condition_rtx, SImode);
12303 t = XEXP (condition_rtx, 0);
12304
37409796 12305 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12306
12307 if (cond_code == NE)
64022b5d 12308 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12309
64022b5d 12310 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12311 return;
12312 }
12313
b7053a3f
GK
12314 if (cond_code == NE
12315 || cond_code == GE || cond_code == LE
12316 || cond_code == GEU || cond_code == LEU
12317 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12318 {
12319 rtx not_result = gen_reg_rtx (CCEQmode);
12320 rtx not_op, rev_cond_rtx;
12321 enum machine_mode cc_mode;
f676971a 12322
b7053a3f
GK
12323 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12324
1c563bed 12325 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12326 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12327 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12328 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12329 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12330 }
39a10a29
GK
12331
12332 op_mode = GET_MODE (rs6000_compare_op0);
12333 if (op_mode == VOIDmode)
12334 op_mode = GET_MODE (rs6000_compare_op1);
12335
12336 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12337 {
12338 PUT_MODE (condition_rtx, DImode);
12339 convert_move (result, condition_rtx, 0);
12340 }
12341 else
12342 {
12343 PUT_MODE (condition_rtx, SImode);
12344 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12345 }
12346}
12347
39a10a29
GK
12348/* Emit a branch of kind CODE to location LOC. */
12349
12350void
a2369ed3 12351rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12352{
12353 rtx condition_rtx, loc_ref;
12354
12355 condition_rtx = rs6000_generate_compare (code);
12356 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12357 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12358 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12359 loc_ref, pc_rtx)));
12360}
12361
12a4e8c5
GK
12362/* Return the string to output a conditional branch to LABEL, which is
12363 the operand number of the label, or -1 if the branch is really a
f676971a 12364 conditional return.
12a4e8c5
GK
12365
12366 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12367 condition code register and its mode specifies what kind of
12368 comparison we made.
12369
a0ab749a 12370 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12371
12372 INSN is the insn. */
12373
12374char *
a2369ed3 12375output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12376{
12377 static char string[64];
12378 enum rtx_code code = GET_CODE (op);
12379 rtx cc_reg = XEXP (op, 0);
12380 enum machine_mode mode = GET_MODE (cc_reg);
12381 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12382 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12383 int really_reversed = reversed ^ need_longbranch;
12384 char *s = string;
12385 const char *ccode;
12386 const char *pred;
12387 rtx note;
12388
39a10a29
GK
12389 validate_condition_mode (code, mode);
12390
12391 /* Work out which way this really branches. We could use
12392 reverse_condition_maybe_unordered here always but this
12393 makes the resulting assembler clearer. */
12a4e8c5 12394 if (really_reversed)
de40e1df
DJ
12395 {
12396 /* Reversal of FP compares takes care -- an ordered compare
12397 becomes an unordered compare and vice versa. */
12398 if (mode == CCFPmode)
12399 code = reverse_condition_maybe_unordered (code);
12400 else
12401 code = reverse_condition (code);
12402 }
12a4e8c5 12403
8ef65e3d 12404 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12405 {
12406 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12407 to the GT bit. */
37409796
NS
12408 switch (code)
12409 {
12410 case EQ:
12411 /* Opposite of GT. */
12412 code = GT;
12413 break;
12414
12415 case NE:
12416 code = UNLE;
12417 break;
12418
12419 default:
12420 gcc_unreachable ();
12421 }
a3170dc6
AH
12422 }
12423
39a10a29 12424 switch (code)
12a4e8c5
GK
12425 {
12426 /* Not all of these are actually distinct opcodes, but
12427 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12428 case NE: case LTGT:
12429 ccode = "ne"; break;
12430 case EQ: case UNEQ:
12431 ccode = "eq"; break;
f676971a 12432 case GE: case GEU:
50a0b056 12433 ccode = "ge"; break;
f676971a 12434 case GT: case GTU: case UNGT:
50a0b056 12435 ccode = "gt"; break;
f676971a 12436 case LE: case LEU:
50a0b056 12437 ccode = "le"; break;
f676971a 12438 case LT: case LTU: case UNLT:
50a0b056 12439 ccode = "lt"; break;
12a4e8c5
GK
12440 case UNORDERED: ccode = "un"; break;
12441 case ORDERED: ccode = "nu"; break;
12442 case UNGE: ccode = "nl"; break;
12443 case UNLE: ccode = "ng"; break;
12444 default:
37409796 12445 gcc_unreachable ();
12a4e8c5 12446 }
f676971a
EC
12447
12448 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12449 The old mnemonics don't have a way to specify this information. */
f4857b9b 12450 pred = "";
12a4e8c5
GK
12451 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12452 if (note != NULL_RTX)
12453 {
12454 /* PROB is the difference from 50%. */
12455 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12456
12457 /* Only hint for highly probable/improbable branches on newer
12458 cpus as static prediction overrides processor dynamic
12459 prediction. For older cpus we may as well always hint, but
12460 assume not taken for branches that are very close to 50% as a
12461 mispredicted taken branch is more expensive than a
f676971a 12462 mispredicted not-taken branch. */
ec507f2d 12463 if (rs6000_always_hint
2c9e13f3
JH
12464 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12465 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12466 {
12467 if (abs (prob) > REG_BR_PROB_BASE / 20
12468 && ((prob > 0) ^ need_longbranch))
c4ad648e 12469 pred = "+";
f4857b9b
AM
12470 else
12471 pred = "-";
12472 }
12a4e8c5 12473 }
12a4e8c5
GK
12474
12475 if (label == NULL)
94a54f47 12476 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12477 else
94a54f47 12478 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12479
37c67319 12480 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12481 Assume they'd only be the first character.... */
37c67319
GK
12482 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12483 *s++ = '%';
94a54f47 12484 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12485
12486 if (label != NULL)
12487 {
12488 /* If the branch distance was too far, we may have to use an
12489 unconditional branch to go the distance. */
12490 if (need_longbranch)
44518ddd 12491 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12492 else
12493 s += sprintf (s, ",%s", label);
12494 }
12495
12496 return string;
12497}
50a0b056 12498
64022b5d 12499/* Return the string to flip the GT bit on a CR. */
423c1189 12500char *
64022b5d 12501output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12502{
12503 static char string[64];
12504 int a, b;
12505
37409796
NS
12506 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12507 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12508
64022b5d
AH
12509 /* GT bit. */
12510 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12511 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12512
12513 sprintf (string, "crnot %d,%d", a, b);
12514 return string;
12515}
12516
21213b4c
DP
12517/* Return insn index for the vector compare instruction for given CODE,
12518 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12519 not available. */
12520
12521static int
94ff898d 12522get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12523 enum machine_mode dest_mode,
12524 enum machine_mode op_mode)
12525{
12526 if (!TARGET_ALTIVEC)
12527 return INSN_NOT_AVAILABLE;
12528
12529 switch (code)
12530 {
12531 case EQ:
12532 if (dest_mode == V16QImode && op_mode == V16QImode)
12533 return UNSPEC_VCMPEQUB;
12534 if (dest_mode == V8HImode && op_mode == V8HImode)
12535 return UNSPEC_VCMPEQUH;
12536 if (dest_mode == V4SImode && op_mode == V4SImode)
12537 return UNSPEC_VCMPEQUW;
12538 if (dest_mode == V4SImode && op_mode == V4SFmode)
12539 return UNSPEC_VCMPEQFP;
12540 break;
12541 case GE:
12542 if (dest_mode == V4SImode && op_mode == V4SFmode)
12543 return UNSPEC_VCMPGEFP;
12544 case GT:
12545 if (dest_mode == V16QImode && op_mode == V16QImode)
12546 return UNSPEC_VCMPGTSB;
12547 if (dest_mode == V8HImode && op_mode == V8HImode)
12548 return UNSPEC_VCMPGTSH;
12549 if (dest_mode == V4SImode && op_mode == V4SImode)
12550 return UNSPEC_VCMPGTSW;
12551 if (dest_mode == V4SImode && op_mode == V4SFmode)
12552 return UNSPEC_VCMPGTFP;
12553 break;
12554 case GTU:
12555 if (dest_mode == V16QImode && op_mode == V16QImode)
12556 return UNSPEC_VCMPGTUB;
12557 if (dest_mode == V8HImode && op_mode == V8HImode)
12558 return UNSPEC_VCMPGTUH;
12559 if (dest_mode == V4SImode && op_mode == V4SImode)
12560 return UNSPEC_VCMPGTUW;
12561 break;
12562 default:
12563 break;
12564 }
12565 return INSN_NOT_AVAILABLE;
12566}
12567
12568/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12569 DMODE is expected destination mode. This is a recursive function. */
12570
12571static rtx
12572rs6000_emit_vector_compare (enum rtx_code rcode,
12573 rtx op0, rtx op1,
12574 enum machine_mode dmode)
12575{
12576 int vec_cmp_insn;
12577 rtx mask;
12578 enum machine_mode dest_mode;
12579 enum machine_mode op_mode = GET_MODE (op1);
12580
37409796
NS
12581 gcc_assert (TARGET_ALTIVEC);
12582 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12583
12584 /* Floating point vector compare instructions uses destination V4SImode.
12585 Move destination to appropriate mode later. */
12586 if (dmode == V4SFmode)
12587 dest_mode = V4SImode;
12588 else
12589 dest_mode = dmode;
12590
12591 mask = gen_reg_rtx (dest_mode);
12592 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12593
12594 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12595 {
12596 bool swap_operands = false;
12597 bool try_again = false;
12598 switch (rcode)
12599 {
12600 case LT:
12601 rcode = GT;
12602 swap_operands = true;
12603 try_again = true;
12604 break;
12605 case LTU:
12606 rcode = GTU;
12607 swap_operands = true;
12608 try_again = true;
12609 break;
12610 case NE:
370df7db
JC
12611 case UNLE:
12612 case UNLT:
12613 case UNGE:
12614 case UNGT:
12615 /* Invert condition and try again.
12616 e.g., A != B becomes ~(A==B). */
21213b4c 12617 {
370df7db 12618 enum rtx_code rev_code;
21213b4c 12619 enum insn_code nor_code;
d1123cde 12620 rtx eq_rtx;
370df7db
JC
12621
12622 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12623 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12624 dest_mode);
94ff898d 12625
166cdb08 12626 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 12627 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12628 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12629
12630 if (dmode != dest_mode)
12631 {
12632 rtx temp = gen_reg_rtx (dest_mode);
12633 convert_move (temp, mask, 0);
12634 return temp;
12635 }
12636 return mask;
12637 }
12638 break;
12639 case GE:
12640 case GEU:
12641 case LE:
12642 case LEU:
12643 /* Try GT/GTU/LT/LTU OR EQ */
12644 {
12645 rtx c_rtx, eq_rtx;
12646 enum insn_code ior_code;
12647 enum rtx_code new_code;
12648
37409796
NS
12649 switch (rcode)
12650 {
12651 case GE:
12652 new_code = GT;
12653 break;
12654
12655 case GEU:
12656 new_code = GTU;
12657 break;
12658
12659 case LE:
12660 new_code = LT;
12661 break;
12662
12663 case LEU:
12664 new_code = LTU;
12665 break;
12666
12667 default:
12668 gcc_unreachable ();
12669 }
21213b4c
DP
12670
12671 c_rtx = rs6000_emit_vector_compare (new_code,
12672 op0, op1, dest_mode);
12673 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12674 dest_mode);
12675
166cdb08 12676 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 12677 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12678 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12679 if (dmode != dest_mode)
12680 {
12681 rtx temp = gen_reg_rtx (dest_mode);
12682 convert_move (temp, mask, 0);
12683 return temp;
12684 }
12685 return mask;
12686 }
12687 break;
12688 default:
37409796 12689 gcc_unreachable ();
21213b4c
DP
12690 }
12691
12692 if (try_again)
12693 {
12694 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12695 /* You only get two chances. */
12696 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12697 }
12698
12699 if (swap_operands)
12700 {
12701 rtx tmp;
12702 tmp = op0;
12703 op0 = op1;
12704 op1 = tmp;
12705 }
12706 }
12707
915167f5
GK
12708 emit_insn (gen_rtx_SET (VOIDmode, mask,
12709 gen_rtx_UNSPEC (dest_mode,
12710 gen_rtvec (2, op0, op1),
12711 vec_cmp_insn)));
21213b4c
DP
12712 if (dmode != dest_mode)
12713 {
12714 rtx temp = gen_reg_rtx (dest_mode);
12715 convert_move (temp, mask, 0);
12716 return temp;
12717 }
12718 return mask;
12719}
12720
12721/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12722 valid insn doesn exist for given mode. */
12723
12724static int
12725get_vsel_insn (enum machine_mode mode)
12726{
12727 switch (mode)
12728 {
12729 case V4SImode:
12730 return UNSPEC_VSEL4SI;
12731 break;
12732 case V4SFmode:
12733 return UNSPEC_VSEL4SF;
12734 break;
12735 case V8HImode:
12736 return UNSPEC_VSEL8HI;
12737 break;
12738 case V16QImode:
12739 return UNSPEC_VSEL16QI;
12740 break;
12741 default:
12742 return INSN_NOT_AVAILABLE;
12743 break;
12744 }
12745 return INSN_NOT_AVAILABLE;
12746}
12747
12748/* Emit vector select insn where DEST is destination using
12749 operands OP1, OP2 and MASK. */
12750
12751static void
12752rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12753{
12754 rtx t, temp;
12755 enum machine_mode dest_mode = GET_MODE (dest);
12756 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12757
12758 temp = gen_reg_rtx (dest_mode);
94ff898d 12759
bb8df8a6 12760 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12761 select op2. */
915167f5
GK
12762 t = gen_rtx_SET (VOIDmode, temp,
12763 gen_rtx_UNSPEC (dest_mode,
12764 gen_rtvec (3, op2, op1, mask),
12765 vsel_insn_index));
21213b4c
DP
12766 emit_insn (t);
12767 emit_move_insn (dest, temp);
12768 return;
12769}
12770
94ff898d 12771/* Emit vector conditional expression.
21213b4c
DP
12772 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12773 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12774
12775int
12776rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
12777 rtx cond, rtx cc_op0, rtx cc_op1)
12778{
12779 enum machine_mode dest_mode = GET_MODE (dest);
12780 enum rtx_code rcode = GET_CODE (cond);
12781 rtx mask;
12782
12783 if (!TARGET_ALTIVEC)
12784 return 0;
12785
12786 /* Get the vector mask for the given relational operations. */
12787 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
12788
12789 rs6000_emit_vector_select (dest, op1, op2, mask);
12790
12791 return 1;
12792}
12793
50a0b056
GK
12794/* Emit a conditional move: move TRUE_COND to DEST if OP of the
12795 operands of the last comparison is nonzero/true, FALSE_COND if it
12796 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 12797
50a0b056 12798int
a2369ed3 12799rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
12800{
12801 enum rtx_code code = GET_CODE (op);
12802 rtx op0 = rs6000_compare_op0;
12803 rtx op1 = rs6000_compare_op1;
12804 REAL_VALUE_TYPE c1;
3148ad6d
DJ
12805 enum machine_mode compare_mode = GET_MODE (op0);
12806 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 12807 rtx temp;
add2402e 12808 bool is_against_zero;
50a0b056 12809
a3c9585f 12810 /* These modes should always match. */
a3170dc6
AH
12811 if (GET_MODE (op1) != compare_mode
12812 /* In the isel case however, we can use a compare immediate, so
12813 op1 may be a small constant. */
12814 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 12815 return 0;
178c3eff 12816 if (GET_MODE (true_cond) != result_mode)
3148ad6d 12817 return 0;
178c3eff 12818 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
12819 return 0;
12820
50a0b056 12821 /* First, work out if the hardware can do this at all, or
a3c9585f 12822 if it's too slow.... */
50a0b056 12823 if (! rs6000_compare_fp_p)
a3170dc6
AH
12824 {
12825 if (TARGET_ISEL)
12826 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
12827 return 0;
12828 }
8ef65e3d 12829 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 12830 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 12831 return 0;
50a0b056 12832
add2402e 12833 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 12834
add2402e
GK
12835 /* A floating-point subtract might overflow, underflow, or produce
12836 an inexact result, thus changing the floating-point flags, so it
12837 can't be generated if we care about that. It's safe if one side
12838 of the construct is zero, since then no subtract will be
12839 generated. */
ebb109ad 12840 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
12841 && flag_trapping_math && ! is_against_zero)
12842 return 0;
12843
50a0b056
GK
12844 /* Eliminate half of the comparisons by switching operands, this
12845 makes the remaining code simpler. */
12846 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 12847 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
12848 {
12849 code = reverse_condition_maybe_unordered (code);
12850 temp = true_cond;
12851 true_cond = false_cond;
12852 false_cond = temp;
12853 }
12854
12855 /* UNEQ and LTGT take four instructions for a comparison with zero,
12856 it'll probably be faster to use a branch here too. */
bc9ec0e0 12857 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 12858 return 0;
f676971a 12859
50a0b056
GK
12860 if (GET_CODE (op1) == CONST_DOUBLE)
12861 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 12862
b6d08ca1 12863 /* We're going to try to implement comparisons by performing
50a0b056
GK
12864 a subtract, then comparing against zero. Unfortunately,
12865 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 12866 know that the operand is finite and the comparison
50a0b056 12867 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 12868 if (HONOR_INFINITIES (compare_mode)
50a0b056 12869 && code != GT && code != UNGE
045572c7 12870 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
12871 /* Constructs of the form (a OP b ? a : b) are safe. */
12872 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 12873 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
12874 && ! rtx_equal_p (op1, true_cond))))
12875 return 0;
add2402e 12876
50a0b056
GK
12877 /* At this point we know we can use fsel. */
12878
12879 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
12880 if (! is_against_zero)
12881 {
12882 temp = gen_reg_rtx (compare_mode);
12883 emit_insn (gen_rtx_SET (VOIDmode, temp,
12884 gen_rtx_MINUS (compare_mode, op0, op1)));
12885 op0 = temp;
12886 op1 = CONST0_RTX (compare_mode);
12887 }
50a0b056
GK
12888
12889 /* If we don't care about NaNs we can reduce some of the comparisons
12890 down to faster ones. */
bc9ec0e0 12891 if (! HONOR_NANS (compare_mode))
50a0b056
GK
12892 switch (code)
12893 {
12894 case GT:
12895 code = LE;
12896 temp = true_cond;
12897 true_cond = false_cond;
12898 false_cond = temp;
12899 break;
12900 case UNGE:
12901 code = GE;
12902 break;
12903 case UNEQ:
12904 code = EQ;
12905 break;
12906 default:
12907 break;
12908 }
12909
12910 /* Now, reduce everything down to a GE. */
12911 switch (code)
12912 {
12913 case GE:
12914 break;
12915
12916 case LE:
3148ad6d
DJ
12917 temp = gen_reg_rtx (compare_mode);
12918 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12919 op0 = temp;
12920 break;
12921
12922 case ORDERED:
3148ad6d
DJ
12923 temp = gen_reg_rtx (compare_mode);
12924 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
12925 op0 = temp;
12926 break;
12927
12928 case EQ:
3148ad6d 12929 temp = gen_reg_rtx (compare_mode);
f676971a 12930 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
12931 gen_rtx_NEG (compare_mode,
12932 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
12933 op0 = temp;
12934 break;
12935
12936 case UNGE:
bc9ec0e0 12937 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 12938 temp = gen_reg_rtx (result_mode);
50a0b056 12939 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 12940 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12941 gen_rtx_GE (VOIDmode,
12942 op0, op1),
12943 true_cond, false_cond)));
bc9ec0e0
GK
12944 false_cond = true_cond;
12945 true_cond = temp;
50a0b056 12946
3148ad6d
DJ
12947 temp = gen_reg_rtx (compare_mode);
12948 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12949 op0 = temp;
12950 break;
12951
12952 case GT:
bc9ec0e0 12953 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 12954 temp = gen_reg_rtx (result_mode);
50a0b056 12955 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 12956 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12957 gen_rtx_GE (VOIDmode,
12958 op0, op1),
12959 true_cond, false_cond)));
bc9ec0e0
GK
12960 true_cond = false_cond;
12961 false_cond = temp;
50a0b056 12962
3148ad6d
DJ
12963 temp = gen_reg_rtx (compare_mode);
12964 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12965 op0 = temp;
12966 break;
12967
12968 default:
37409796 12969 gcc_unreachable ();
50a0b056
GK
12970 }
12971
12972 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 12973 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12974 gen_rtx_GE (VOIDmode,
12975 op0, op1),
12976 true_cond, false_cond)));
12977 return 1;
12978}
12979
a3170dc6
AH
12980/* Same as above, but for ints (isel). */
12981
12982static int
a2369ed3 12983rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
12984{
12985 rtx condition_rtx, cr;
12986
12987 /* All isel implementations thus far are 32-bits. */
12988 if (GET_MODE (rs6000_compare_op0) != SImode)
12989 return 0;
12990
12991 /* We still have to do the compare, because isel doesn't do a
12992 compare, it just looks at the CRx bits set by a previous compare
12993 instruction. */
12994 condition_rtx = rs6000_generate_compare (GET_CODE (op));
12995 cr = XEXP (condition_rtx, 0);
12996
12997 if (GET_MODE (cr) == CCmode)
12998 emit_insn (gen_isel_signed (dest, condition_rtx,
12999 true_cond, false_cond, cr));
13000 else
13001 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13002 true_cond, false_cond, cr));
13003
13004 return 1;
13005}
13006
13007const char *
a2369ed3 13008output_isel (rtx *operands)
a3170dc6
AH
13009{
13010 enum rtx_code code;
13011
13012 code = GET_CODE (operands[1]);
13013 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13014 {
13015 PUT_CODE (operands[1], reverse_condition (code));
13016 return "isel %0,%3,%2,%j1";
13017 }
13018 else
13019 return "isel %0,%2,%3,%j1";
13020}
13021
50a0b056 13022void
a2369ed3 13023rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13024{
13025 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13026 enum rtx_code c;
50a0b056 13027 rtx target;
5dc8d536
AH
13028
13029 if (code == SMAX || code == SMIN)
13030 c = GE;
13031 else
13032 c = GEU;
13033
50a0b056 13034 if (code == SMAX || code == UMAX)
f676971a 13035 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13036 op0, op1, mode, 0);
13037 else
f676971a 13038 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13039 op1, op0, mode, 0);
37409796 13040 gcc_assert (target);
50a0b056
GK
13041 if (target != dest)
13042 emit_move_insn (dest, target);
13043}
46c07df8 13044
915167f5
GK
13045/* Emit instructions to perform a load-reserved/store-conditional operation.
13046 The operation performed is an atomic
13047 (set M (CODE:MODE M OP))
13048 If not NULL, BEFORE is atomically set to M before the operation, and
13049 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13050 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13051 Either OP or M may be wrapped in a NOT operation. */
13052
13053void
13054rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13055 rtx m, rtx op, rtx before_param, rtx after_param,
13056 bool sync_p)
13057{
13058 enum machine_mode used_mode;
13059 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13060 rtx used_m;
13061 rtvec vec;
13062 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13063 rtx shift = NULL_RTX;
bb8df8a6 13064
915167f5
GK
13065 if (sync_p)
13066 emit_insn (gen_memory_barrier ());
bb8df8a6 13067
915167f5
GK
13068 if (GET_CODE (m) == NOT)
13069 used_m = XEXP (m, 0);
13070 else
13071 used_m = m;
13072
13073 /* If this is smaller than SImode, we'll have to use SImode with
13074 adjustments. */
13075 if (mode == QImode || mode == HImode)
13076 {
13077 rtx newop, oldop;
13078
13079 if (MEM_ALIGN (used_m) >= 32)
13080 {
13081 int ishift = 0;
13082 if (BYTES_BIG_ENDIAN)
13083 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13084
915167f5 13085 shift = GEN_INT (ishift);
c75c6d11 13086 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13087 }
13088 else
13089 {
13090 rtx addrSI, aligned_addr;
a9c9d3fa 13091 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13092
c75c6d11
JJ
13093 addrSI = gen_lowpart_common (SImode,
13094 force_reg (Pmode, XEXP (used_m, 0)));
13095 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13096 shift = gen_reg_rtx (SImode);
13097
13098 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13099 GEN_INT (shift_mask)));
13100 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13101
13102 aligned_addr = expand_binop (Pmode, and_optab,
13103 XEXP (used_m, 0),
13104 GEN_INT (-4), NULL_RTX,
13105 1, OPTAB_LIB_WIDEN);
13106 used_m = change_address (used_m, SImode, aligned_addr);
13107 set_mem_align (used_m, 32);
915167f5 13108 }
c75c6d11
JJ
13109 /* It's safe to keep the old alias set of USED_M, because
13110 the operation is atomic and only affects the original
13111 USED_M. */
13112 if (GET_CODE (m) == NOT)
13113 m = gen_rtx_NOT (SImode, used_m);
13114 else
13115 m = used_m;
915167f5
GK
13116
13117 if (GET_CODE (op) == NOT)
13118 {
13119 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13120 oldop = gen_rtx_NOT (SImode, oldop);
13121 }
13122 else
13123 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13124
915167f5
GK
13125 switch (code)
13126 {
13127 case IOR:
13128 case XOR:
13129 newop = expand_binop (SImode, and_optab,
13130 oldop, GEN_INT (imask), NULL_RTX,
13131 1, OPTAB_LIB_WIDEN);
13132 emit_insn (gen_ashlsi3 (newop, newop, shift));
13133 break;
13134
13135 case AND:
13136 newop = expand_binop (SImode, ior_optab,
13137 oldop, GEN_INT (~imask), NULL_RTX,
13138 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13139 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13140 break;
13141
13142 case PLUS:
9f0076e5 13143 case MINUS:
915167f5
GK
13144 {
13145 rtx mask;
bb8df8a6 13146
915167f5
GK
13147 newop = expand_binop (SImode, and_optab,
13148 oldop, GEN_INT (imask), NULL_RTX,
13149 1, OPTAB_LIB_WIDEN);
13150 emit_insn (gen_ashlsi3 (newop, newop, shift));
13151
13152 mask = gen_reg_rtx (SImode);
13153 emit_move_insn (mask, GEN_INT (imask));
13154 emit_insn (gen_ashlsi3 (mask, mask, shift));
13155
9f0076e5
DE
13156 if (code == PLUS)
13157 newop = gen_rtx_PLUS (SImode, m, newop);
13158 else
13159 newop = gen_rtx_MINUS (SImode, m, newop);
13160 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13161 newop = gen_rtx_IOR (SImode, newop,
13162 gen_rtx_AND (SImode,
13163 gen_rtx_NOT (SImode, mask),
13164 m));
13165 break;
13166 }
13167
13168 default:
13169 gcc_unreachable ();
13170 }
13171
a9c9d3fa
GK
13172 if (GET_CODE (m) == NOT)
13173 {
13174 rtx mask, xorm;
13175
13176 mask = gen_reg_rtx (SImode);
13177 emit_move_insn (mask, GEN_INT (imask));
13178 emit_insn (gen_ashlsi3 (mask, mask, shift));
13179
13180 xorm = gen_rtx_XOR (SImode, used_m, mask);
13181 /* Depending on the value of 'op', the XOR or the operation might
13182 be able to be simplified away. */
13183 newop = simplify_gen_binary (code, SImode, xorm, newop);
13184 }
915167f5
GK
13185 op = newop;
13186 used_mode = SImode;
13187 before = gen_reg_rtx (used_mode);
13188 after = gen_reg_rtx (used_mode);
13189 }
13190 else
13191 {
13192 used_mode = mode;
13193 before = before_param;
13194 after = after_param;
13195
13196 if (before == NULL_RTX)
13197 before = gen_reg_rtx (used_mode);
13198 if (after == NULL_RTX)
13199 after = gen_reg_rtx (used_mode);
13200 }
bb8df8a6 13201
9f0076e5
DE
13202 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13203 && used_mode != mode)
915167f5
GK
13204 the_op = op; /* Computed above. */
13205 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13206 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13207 else
13208 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13209
13210 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13211 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13212 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13213 gen_rtx_UNSPEC (used_mode,
13214 gen_rtvec (1, the_op),
13215 UNSPEC_SYNC_OP));
915167f5
GK
13216 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13217
9f0076e5 13218 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13219 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13220 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13221 else
13222 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13223 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13224
13225 /* Shift and mask the return values properly. */
13226 if (used_mode != mode && before_param)
13227 {
13228 emit_insn (gen_lshrsi3 (before, before, shift));
13229 convert_move (before_param, before, 1);
13230 }
13231
13232 if (used_mode != mode && after_param)
13233 {
13234 emit_insn (gen_lshrsi3 (after, after, shift));
13235 convert_move (after_param, after, 1);
13236 }
13237
13238 /* The previous sequence will end with a branch that's dependent on
13239 the conditional store, so placing an isync will ensure that no
13240 other instructions (especially, no load or store instructions)
13241 can start before the atomic operation completes. */
13242 if (sync_p)
13243 emit_insn (gen_isync ());
13244}
13245
b52110d4
DE
13246/* A subroutine of the atomic operation splitters. Jump to LABEL if
13247 COND is true. Mark the jump as unlikely to be taken. */
13248
13249static void
13250emit_unlikely_jump (rtx cond, rtx label)
13251{
13252 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13253 rtx x;
13254
13255 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13256 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13257 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13258}
13259
13260/* A subroutine of the atomic operation splitters. Emit a load-locked
13261 instruction in MODE. */
13262
13263static void
13264emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13265{
13266 rtx (*fn) (rtx, rtx) = NULL;
13267 if (mode == SImode)
13268 fn = gen_load_locked_si;
13269 else if (mode == DImode)
13270 fn = gen_load_locked_di;
13271 emit_insn (fn (reg, mem));
13272}
13273
13274/* A subroutine of the atomic operation splitters. Emit a store-conditional
13275 instruction in MODE. */
13276
13277static void
13278emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13279{
13280 rtx (*fn) (rtx, rtx, rtx) = NULL;
13281 if (mode == SImode)
13282 fn = gen_store_conditional_si;
13283 else if (mode == DImode)
13284 fn = gen_store_conditional_di;
13285
9f0076e5 13286 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13287 if (PPC405_ERRATUM77)
13288 emit_insn (gen_memory_barrier ());
13289
13290 emit_insn (fn (res, mem, val));
13291}
13292
ea2c620c 13293/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13294 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13295 operand of the binary operator. BEFORE and AFTER are optional locations to
13296 return the value of MEM either before of after the operation. SCRATCH is
13297 a scratch register. */
13298
13299void
13300rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13301 rtx before, rtx after, rtx scratch)
13302{
13303 enum machine_mode mode = GET_MODE (mem);
13304 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13305
13306 emit_insn (gen_memory_barrier ());
13307
13308 label = gen_label_rtx ();
13309 emit_label (label);
13310 label = gen_rtx_LABEL_REF (VOIDmode, label);
13311
13312 if (before == NULL_RTX)
13313 before = scratch;
13314 emit_load_locked (mode, before, mem);
13315
13316 if (code == NOT)
13317 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13318 else if (code == AND)
13319 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13320 else
13321 x = gen_rtx_fmt_ee (code, mode, before, val);
13322
13323 if (after != NULL_RTX)
13324 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13325 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13326
13327 emit_store_conditional (mode, cond, mem, scratch);
13328
13329 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13330 emit_unlikely_jump (x, label);
13331
13332 emit_insn (gen_isync ());
13333}
13334
b52110d4
DE
13335/* Expand an atomic compare and swap operation. MEM is the memory on which
13336 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13337 value to be stored. SCRATCH is a scratch GPR. */
13338
13339void
13340rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13341 rtx scratch)
13342{
13343 enum machine_mode mode = GET_MODE (mem);
13344 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13345
13346 emit_insn (gen_memory_barrier ());
13347
13348 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13349 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13350 emit_label (XEXP (label1, 0));
13351
13352 emit_load_locked (mode, retval, mem);
13353
13354 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13355 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13356
13357 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13358 emit_unlikely_jump (x, label2);
13359
13360 emit_move_insn (scratch, newval);
13361 emit_store_conditional (mode, cond, mem, scratch);
13362
13363 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13364 emit_unlikely_jump (x, label1);
13365
13366 emit_insn (gen_isync ());
13367 emit_label (XEXP (label2, 0));
13368}
13369
13370/* Expand an atomic test and set operation. MEM is the memory on which
13371 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13372
13373void
13374rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13375{
13376 enum machine_mode mode = GET_MODE (mem);
13377 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13378
13379 emit_insn (gen_memory_barrier ());
13380
13381 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13382 emit_label (XEXP (label, 0));
13383
13384 emit_load_locked (mode, retval, mem);
13385 emit_move_insn (scratch, val);
13386 emit_store_conditional (mode, cond, mem, scratch);
13387
13388 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13389 emit_unlikely_jump (x, label);
13390
13391 emit_insn (gen_isync ());
13392}
13393
9fc75b97
DE
13394void
13395rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13396{
13397 enum machine_mode mode = GET_MODE (mem);
13398 rtx addrSI, align, wdst, shift, mask;
13399 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13400 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13401
13402 /* Shift amount for subword relative to aligned word. */
13403 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13404 shift = gen_reg_rtx (SImode);
13405 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13406 GEN_INT (shift_mask)));
13407 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13408
13409 /* Shift and mask old value into position within word. */
13410 oldval = convert_modes (SImode, mode, oldval, 1);
13411 oldval = expand_binop (SImode, and_optab,
13412 oldval, GEN_INT (imask), NULL_RTX,
13413 1, OPTAB_LIB_WIDEN);
13414 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13415
13416 /* Shift and mask new value into position within word. */
13417 newval = convert_modes (SImode, mode, newval, 1);
13418 newval = expand_binop (SImode, and_optab,
13419 newval, GEN_INT (imask), NULL_RTX,
13420 1, OPTAB_LIB_WIDEN);
13421 emit_insn (gen_ashlsi3 (newval, newval, shift));
13422
13423 /* Mask for insertion. */
13424 mask = gen_reg_rtx (SImode);
13425 emit_move_insn (mask, GEN_INT (imask));
13426 emit_insn (gen_ashlsi3 (mask, mask, shift));
13427
13428 /* Address of aligned word containing subword. */
13429 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13430 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13431 mem = change_address (mem, SImode, align);
13432 set_mem_align (mem, 32);
13433 MEM_VOLATILE_P (mem) = 1;
13434
13435 wdst = gen_reg_rtx (SImode);
13436 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13437 oldval, newval, mem));
13438
13439 emit_move_insn (dst, gen_lowpart (mode, wdst));
13440}
13441
13442void
13443rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13444 rtx oldval, rtx newval, rtx mem,
13445 rtx scratch)
13446{
13447 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13448
13449 emit_insn (gen_memory_barrier ());
13450 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13451 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13452 emit_label (XEXP (label1, 0));
13453
13454 emit_load_locked (SImode, scratch, mem);
13455
13456 /* Mask subword within loaded value for comparison with oldval.
13457 Use UNSPEC_AND to avoid clobber.*/
13458 emit_insn (gen_rtx_SET (SImode, dest,
13459 gen_rtx_UNSPEC (SImode,
13460 gen_rtvec (2, scratch, mask),
13461 UNSPEC_AND)));
13462
13463 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13464 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13465
13466 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13467 emit_unlikely_jump (x, label2);
13468
13469 /* Clear subword within loaded value for insertion of new value. */
13470 emit_insn (gen_rtx_SET (SImode, scratch,
13471 gen_rtx_AND (SImode,
13472 gen_rtx_NOT (SImode, mask), scratch)));
13473 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13474 emit_store_conditional (SImode, cond, mem, scratch);
13475
13476 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13477 emit_unlikely_jump (x, label1);
13478
13479 emit_insn (gen_isync ());
13480 emit_label (XEXP (label2, 0));
13481}
13482
13483
b52110d4 13484 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13485 multi-register moves. It will emit at most one instruction for
13486 each register that is accessed; that is, it won't emit li/lis pairs
13487 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13488 register. */
46c07df8 13489
46c07df8 13490void
a9baceb1 13491rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13492{
a9baceb1
GK
13493 /* The register number of the first register being moved. */
13494 int reg;
13495 /* The mode that is to be moved. */
13496 enum machine_mode mode;
13497 /* The mode that the move is being done in, and its size. */
13498 enum machine_mode reg_mode;
13499 int reg_mode_size;
13500 /* The number of registers that will be moved. */
13501 int nregs;
13502
13503 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13504 mode = GET_MODE (dst);
c8b622ff 13505 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13506 if (FP_REGNO_P (reg))
7393f7f8 13507 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13508 else if (ALTIVEC_REGNO_P (reg))
13509 reg_mode = V16QImode;
8521c414
JM
13510 else if (TARGET_E500_DOUBLE && mode == TFmode)
13511 reg_mode = DFmode;
a9baceb1
GK
13512 else
13513 reg_mode = word_mode;
13514 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13515
37409796 13516 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13517
a9baceb1
GK
13518 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13519 {
13520 /* Move register range backwards, if we might have destructive
13521 overlap. */
13522 int i;
13523 for (i = nregs - 1; i >= 0; i--)
f676971a 13524 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13525 simplify_gen_subreg (reg_mode, dst, mode,
13526 i * reg_mode_size),
13527 simplify_gen_subreg (reg_mode, src, mode,
13528 i * reg_mode_size)));
13529 }
46c07df8
HP
13530 else
13531 {
a9baceb1
GK
13532 int i;
13533 int j = -1;
13534 bool used_update = false;
46c07df8 13535
c1e55850 13536 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13537 {
13538 rtx breg;
3a1f863f 13539
a9baceb1
GK
13540 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13541 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13542 {
13543 rtx delta_rtx;
a9baceb1 13544 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13545 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13546 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13547 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13548 emit_insn (TARGET_32BIT
13549 ? gen_addsi3 (breg, breg, delta_rtx)
13550 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13551 src = replace_equiv_address (src, breg);
3a1f863f 13552 }
d04b6e6e 13553 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13554 {
13e2e16e 13555 rtx basereg;
c1e55850
GK
13556 basereg = gen_rtx_REG (Pmode, reg);
13557 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13558 src = replace_equiv_address (src, basereg);
c1e55850 13559 }
3a1f863f 13560
0423421f
AM
13561 breg = XEXP (src, 0);
13562 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13563 breg = XEXP (breg, 0);
13564
13565 /* If the base register we are using to address memory is
13566 also a destination reg, then change that register last. */
13567 if (REG_P (breg)
13568 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13569 && REGNO (breg) < REGNO (dst) + nregs)
13570 j = REGNO (breg) - REGNO (dst);
c4ad648e 13571 }
46c07df8 13572
a9baceb1 13573 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13574 {
13575 rtx breg;
13576
a9baceb1
GK
13577 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13578 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13579 {
13580 rtx delta_rtx;
a9baceb1 13581 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13582 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13583 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13584 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13585
13586 /* We have to update the breg before doing the store.
13587 Use store with update, if available. */
13588
13589 if (TARGET_UPDATE)
13590 {
a9baceb1 13591 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13592 emit_insn (TARGET_32BIT
13593 ? (TARGET_POWERPC64
13594 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13595 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13596 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13597 used_update = true;
3a1f863f
DE
13598 }
13599 else
a9baceb1
GK
13600 emit_insn (TARGET_32BIT
13601 ? gen_addsi3 (breg, breg, delta_rtx)
13602 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13603 dst = replace_equiv_address (dst, breg);
3a1f863f 13604 }
37409796 13605 else
d04b6e6e 13606 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13607 }
13608
46c07df8 13609 for (i = 0; i < nregs; i++)
f676971a 13610 {
3a1f863f
DE
13611 /* Calculate index to next subword. */
13612 ++j;
f676971a 13613 if (j == nregs)
3a1f863f 13614 j = 0;
46c07df8 13615
112cdef5 13616 /* If compiler already emitted move of first word by
a9baceb1 13617 store with update, no need to do anything. */
3a1f863f 13618 if (j == 0 && used_update)
a9baceb1 13619 continue;
f676971a 13620
a9baceb1
GK
13621 emit_insn (gen_rtx_SET (VOIDmode,
13622 simplify_gen_subreg (reg_mode, dst, mode,
13623 j * reg_mode_size),
13624 simplify_gen_subreg (reg_mode, src, mode,
13625 j * reg_mode_size)));
3a1f863f 13626 }
46c07df8
HP
13627 }
13628}
13629
12a4e8c5 13630\f
a4f6c312
SS
13631/* This page contains routines that are used to determine what the
13632 function prologue and epilogue code will do and write them out. */
9878760c 13633
a4f6c312
SS
13634/* Return the first fixed-point register that is required to be
13635 saved. 32 if none. */
9878760c
RK
13636
13637int
863d938c 13638first_reg_to_save (void)
9878760c
RK
13639{
13640 int first_reg;
13641
13642 /* Find lowest numbered live register. */
13643 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 13644 if (df_regs_ever_live_p (first_reg)
a38d360d 13645 && (! call_used_regs[first_reg]
1db02437 13646 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13647 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13648 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13649 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13650 break;
13651
ee890fe2 13652#if TARGET_MACHO
93638d7a
AM
13653 if (flag_pic
13654 && current_function_uses_pic_offset_table
13655 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13656 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13657#endif
13658
9878760c
RK
13659 return first_reg;
13660}
13661
13662/* Similar, for FP regs. */
13663
13664int
863d938c 13665first_fp_reg_to_save (void)
9878760c
RK
13666{
13667 int first_reg;
13668
13669 /* Find lowest numbered live register. */
13670 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 13671 if (df_regs_ever_live_p (first_reg))
9878760c
RK
13672 break;
13673
13674 return first_reg;
13675}
00b960c7
AH
13676
13677/* Similar, for AltiVec regs. */
13678
13679static int
863d938c 13680first_altivec_reg_to_save (void)
00b960c7
AH
13681{
13682 int i;
13683
13684 /* Stack frame remains as is unless we are in AltiVec ABI. */
13685 if (! TARGET_ALTIVEC_ABI)
13686 return LAST_ALTIVEC_REGNO + 1;
13687
22fa69da 13688 /* On Darwin, the unwind routines are compiled without
982afe02 13689 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13690 altivec registers when necessary. */
13691 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13692 && ! TARGET_ALTIVEC)
13693 return FIRST_ALTIVEC_REGNO + 20;
13694
00b960c7
AH
13695 /* Find lowest numbered live register. */
13696 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13697 if (df_regs_ever_live_p (i))
00b960c7
AH
13698 break;
13699
13700 return i;
13701}
13702
13703/* Return a 32-bit mask of the AltiVec registers we need to set in
13704 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13705 the 32-bit word is 0. */
13706
13707static unsigned int
863d938c 13708compute_vrsave_mask (void)
00b960c7
AH
13709{
13710 unsigned int i, mask = 0;
13711
22fa69da 13712 /* On Darwin, the unwind routines are compiled without
982afe02 13713 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13714 call-saved altivec registers when necessary. */
13715 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13716 && ! TARGET_ALTIVEC)
13717 mask |= 0xFFF;
13718
00b960c7
AH
13719 /* First, find out if we use _any_ altivec registers. */
13720 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13721 if (df_regs_ever_live_p (i))
00b960c7
AH
13722 mask |= ALTIVEC_REG_BIT (i);
13723
13724 if (mask == 0)
13725 return mask;
13726
00b960c7
AH
13727 /* Next, remove the argument registers from the set. These must
13728 be in the VRSAVE mask set by the caller, so we don't need to add
13729 them in again. More importantly, the mask we compute here is
13730 used to generate CLOBBERs in the set_vrsave insn, and we do not
13731 wish the argument registers to die. */
a6cf80f2 13732 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13733 mask &= ~ALTIVEC_REG_BIT (i);
13734
13735 /* Similarly, remove the return value from the set. */
13736 {
13737 bool yes = false;
13738 diddle_return_value (is_altivec_return_reg, &yes);
13739 if (yes)
13740 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13741 }
13742
13743 return mask;
13744}
13745
d62294f5 13746/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13747 size of prologues/epilogues by calling our own save/restore-the-world
13748 routines. */
d62294f5
FJ
13749
13750static void
f57fe068
AM
13751compute_save_world_info (rs6000_stack_t *info_ptr)
13752{
13753 info_ptr->world_save_p = 1;
13754 info_ptr->world_save_p
13755 = (WORLD_SAVE_P (info_ptr)
13756 && DEFAULT_ABI == ABI_DARWIN
13757 && ! (current_function_calls_setjmp && flag_exceptions)
13758 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13759 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13760 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13761 && info_ptr->cr_save_p);
f676971a 13762
d62294f5
FJ
13763 /* This will not work in conjunction with sibcalls. Make sure there
13764 are none. (This check is expensive, but seldom executed.) */
f57fe068 13765 if (WORLD_SAVE_P (info_ptr))
f676971a 13766 {
d62294f5
FJ
13767 rtx insn;
13768 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
13769 if ( GET_CODE (insn) == CALL_INSN
13770 && SIBLING_CALL_P (insn))
13771 {
13772 info_ptr->world_save_p = 0;
13773 break;
13774 }
d62294f5 13775 }
f676971a 13776
f57fe068 13777 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
13778 {
13779 /* Even if we're not touching VRsave, make sure there's room on the
13780 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 13781 will attempt to save it. */
d62294f5
FJ
13782 info_ptr->vrsave_size = 4;
13783
13784 /* "Save" the VRsave register too if we're saving the world. */
13785 if (info_ptr->vrsave_mask == 0)
c4ad648e 13786 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
13787
13788 /* Because the Darwin register save/restore routines only handle
c4ad648e 13789 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 13790 check. */
37409796
NS
13791 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
13792 && (info_ptr->first_altivec_reg_save
13793 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 13794 }
f676971a 13795 return;
d62294f5
FJ
13796}
13797
13798
00b960c7 13799static void
a2369ed3 13800is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
13801{
13802 bool *yes = (bool *) xyes;
13803 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
13804 *yes = true;
13805}
13806
4697a36c
MM
13807\f
13808/* Calculate the stack information for the current function. This is
13809 complicated by having two separate calling sequences, the AIX calling
13810 sequence and the V.4 calling sequence.
13811
592696dd 13812 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 13813 32-bit 64-bit
4697a36c 13814 SP----> +---------------------------------------+
a260abc9 13815 | back chain to caller | 0 0
4697a36c 13816 +---------------------------------------+
a260abc9 13817 | saved CR | 4 8 (8-11)
4697a36c 13818 +---------------------------------------+
a260abc9 13819 | saved LR | 8 16
4697a36c 13820 +---------------------------------------+
a260abc9 13821 | reserved for compilers | 12 24
4697a36c 13822 +---------------------------------------+
a260abc9 13823 | reserved for binders | 16 32
4697a36c 13824 +---------------------------------------+
a260abc9 13825 | saved TOC pointer | 20 40
4697a36c 13826 +---------------------------------------+
a260abc9 13827 | Parameter save area (P) | 24 48
4697a36c 13828 +---------------------------------------+
a260abc9 13829 | Alloca space (A) | 24+P etc.
802a0058 13830 +---------------------------------------+
a7df97e6 13831 | Local variable space (L) | 24+P+A
4697a36c 13832 +---------------------------------------+
a7df97e6 13833 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 13834 +---------------------------------------+
00b960c7
AH
13835 | Save area for AltiVec registers (W) | 24+P+A+L+X
13836 +---------------------------------------+
13837 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
13838 +---------------------------------------+
13839 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 13840 +---------------------------------------+
00b960c7
AH
13841 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
13842 +---------------------------------------+
13843 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
13844 +---------------------------------------+
13845 old SP->| back chain to caller's caller |
13846 +---------------------------------------+
13847
5376a30c
KR
13848 The required alignment for AIX configurations is two words (i.e., 8
13849 or 16 bytes).
13850
13851
4697a36c
MM
13852 V.4 stack frames look like:
13853
13854 SP----> +---------------------------------------+
13855 | back chain to caller | 0
13856 +---------------------------------------+
5eb387b8 13857 | caller's saved LR | 4
4697a36c
MM
13858 +---------------------------------------+
13859 | Parameter save area (P) | 8
13860 +---------------------------------------+
a7df97e6 13861 | Alloca space (A) | 8+P
f676971a 13862 +---------------------------------------+
a7df97e6 13863 | Varargs save area (V) | 8+P+A
f676971a 13864 +---------------------------------------+
a7df97e6 13865 | Local variable space (L) | 8+P+A+V
f676971a 13866 +---------------------------------------+
a7df97e6 13867 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 13868 +---------------------------------------+
00b960c7
AH
13869 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
13870 +---------------------------------------+
13871 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
13872 +---------------------------------------+
13873 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
13874 +---------------------------------------+
c4ad648e
AM
13875 | SPE: area for 64-bit GP registers |
13876 +---------------------------------------+
13877 | SPE alignment padding |
13878 +---------------------------------------+
00b960c7 13879 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 13880 +---------------------------------------+
00b960c7 13881 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 13882 +---------------------------------------+
00b960c7 13883 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
13884 +---------------------------------------+
13885 old SP->| back chain to caller's caller |
13886 +---------------------------------------+
b6c9286a 13887
5376a30c
KR
13888 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13889 given. (But note below and in sysv4.h that we require only 8 and
13890 may round up the size of our stack frame anyways. The historical
13891 reason is early versions of powerpc-linux which didn't properly
13892 align the stack at program startup. A happy side-effect is that
13893 -mno-eabi libraries can be used with -meabi programs.)
13894
50d440bc 13895 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
13896 the stack alignment requirements may differ. If -mno-eabi is not
13897 given, the required stack alignment is 8 bytes; if -mno-eabi is
13898 given, the required alignment is 16 bytes. (But see V.4 comment
13899 above.) */
4697a36c 13900
61b2fbe7
MM
13901#ifndef ABI_STACK_BOUNDARY
13902#define ABI_STACK_BOUNDARY STACK_BOUNDARY
13903#endif
13904
d1d0c603 13905static rs6000_stack_t *
863d938c 13906rs6000_stack_info (void)
4697a36c 13907{
022123e6 13908 static rs6000_stack_t info;
4697a36c 13909 rs6000_stack_t *info_ptr = &info;
327e5343 13910 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 13911 int ehrd_size;
64045029 13912 int save_align;
44688022 13913 HOST_WIDE_INT non_fixed_size;
4697a36c 13914
022123e6 13915 memset (&info, 0, sizeof (info));
4697a36c 13916
c19de7aa
AH
13917 if (TARGET_SPE)
13918 {
13919 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 13920 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
13921 cfun->machine->insn_chain_scanned_p
13922 = spe_func_has_64bit_regs_p () + 1;
13923 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
13924 }
13925
a4f6c312 13926 /* Select which calling sequence. */
178274da 13927 info_ptr->abi = DEFAULT_ABI;
9878760c 13928
a4f6c312 13929 /* Calculate which registers need to be saved & save area size. */
4697a36c 13930 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 13931 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
906fb125 13932 even if it currently looks like we won't. */
2bfcf297 13933 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
13934 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
13935 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
1db02437
FS
13936 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
13937 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
906fb125
GK
13938 else
13939 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
4697a36c 13940
a3170dc6
AH
13941 /* For the SPE, we have an additional upper 32-bits on each GPR.
13942 Ideally we should save the entire 64-bits only when the upper
13943 half is used in SIMD instructions. Since we only record
13944 registers live (not the size they are used in), this proves
13945 difficult because we'd have to traverse the instruction chain at
13946 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
13947 so we opt to save the GPRs in 64-bits always if but one register
13948 gets used in 64-bits. Otherwise, all the registers in the frame
13949 get saved in 32-bits.
a3170dc6 13950
c19de7aa 13951 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 13952 traditional GP save area will be empty. */
c19de7aa 13953 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13954 info_ptr->gp_size = 0;
13955
4697a36c
MM
13956 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13957 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13958
00b960c7
AH
13959 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13960 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13961 - info_ptr->first_altivec_reg_save);
13962
592696dd 13963 /* Does this function call anything? */
71f123ca
FS
13964 info_ptr->calls_p = (! current_function_is_leaf
13965 || cfun->machine->ra_needs_full_frame);
b6c9286a 13966
a4f6c312 13967 /* Determine if we need to save the link register. */
022123e6
AM
13968 if ((DEFAULT_ABI == ABI_AIX
13969 && current_function_profile
13970 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
13971#ifdef TARGET_RELOCATABLE
13972 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13973#endif
13974 || (info_ptr->first_fp_reg_save != 64
13975 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 13976 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 13977 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
13978 || info_ptr->calls_p
13979 || rs6000_ra_ever_killed ())
4697a36c
MM
13980 {
13981 info_ptr->lr_save_p = 1;
1de43f85 13982 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
13983 }
13984
9ebbca7d 13985 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
13986 if (df_regs_ever_live_p (CR2_REGNO)
13987 || df_regs_ever_live_p (CR3_REGNO)
13988 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
13989 {
13990 info_ptr->cr_save_p = 1;
178274da 13991 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
13992 info_ptr->cr_size = reg_size;
13993 }
13994
83720594
RH
13995 /* If the current function calls __builtin_eh_return, then we need
13996 to allocate stack space for registers that will hold data for
13997 the exception handler. */
13998 if (current_function_calls_eh_return)
13999 {
14000 unsigned int i;
14001 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14002 continue;
a3170dc6
AH
14003
14004 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14005 ehrd_size = i * (TARGET_SPE_ABI
14006 && info_ptr->spe_64bit_regs_used != 0
14007 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14008 }
14009 else
14010 ehrd_size = 0;
14011
592696dd 14012 /* Determine various sizes. */
4697a36c
MM
14013 info_ptr->reg_size = reg_size;
14014 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14015 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14016 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14017 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14018 if (FRAME_GROWS_DOWNWARD)
14019 info_ptr->vars_size
5b667039
JJ
14020 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14021 + info_ptr->parm_size,
7d5175e1 14022 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14023 - (info_ptr->fixed_size + info_ptr->vars_size
14024 + info_ptr->parm_size);
00b960c7 14025
c19de7aa 14026 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14027 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
14028 else
14029 info_ptr->spe_gp_size = 0;
14030
4d774ff8
HP
14031 if (TARGET_ALTIVEC_ABI)
14032 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14033 else
4d774ff8
HP
14034 info_ptr->vrsave_mask = 0;
14035
14036 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14037 info_ptr->vrsave_size = 4;
14038 else
14039 info_ptr->vrsave_size = 0;
b6c9286a 14040
d62294f5
FJ
14041 compute_save_world_info (info_ptr);
14042
592696dd 14043 /* Calculate the offsets. */
178274da 14044 switch (DEFAULT_ABI)
4697a36c 14045 {
b6c9286a 14046 case ABI_NONE:
24d304eb 14047 default:
37409796 14048 gcc_unreachable ();
b6c9286a
MM
14049
14050 case ABI_AIX:
ee890fe2 14051 case ABI_DARWIN:
b6c9286a
MM
14052 info_ptr->fp_save_offset = - info_ptr->fp_size;
14053 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14054
14055 if (TARGET_ALTIVEC_ABI)
14056 {
14057 info_ptr->vrsave_save_offset
14058 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14059
982afe02 14060 /* Align stack so vector save area is on a quadword boundary.
9278121c 14061 The padding goes above the vectors. */
00b960c7
AH
14062 if (info_ptr->altivec_size != 0)
14063 info_ptr->altivec_padding_size
9278121c 14064 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14065 else
14066 info_ptr->altivec_padding_size = 0;
14067
14068 info_ptr->altivec_save_offset
14069 = info_ptr->vrsave_save_offset
14070 - info_ptr->altivec_padding_size
14071 - info_ptr->altivec_size;
9278121c
GK
14072 gcc_assert (info_ptr->altivec_size == 0
14073 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14074
14075 /* Adjust for AltiVec case. */
14076 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14077 }
14078 else
14079 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14080 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14081 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14082 break;
14083
14084 case ABI_V4:
b6c9286a
MM
14085 info_ptr->fp_save_offset = - info_ptr->fp_size;
14086 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14087 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14088
c19de7aa 14089 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14090 {
14091 /* Align stack so SPE GPR save area is aligned on a
14092 double-word boundary. */
14093 if (info_ptr->spe_gp_size != 0)
14094 info_ptr->spe_padding_size
14095 = 8 - (-info_ptr->cr_save_offset % 8);
14096 else
14097 info_ptr->spe_padding_size = 0;
14098
14099 info_ptr->spe_gp_save_offset
14100 = info_ptr->cr_save_offset
14101 - info_ptr->spe_padding_size
14102 - info_ptr->spe_gp_size;
14103
14104 /* Adjust for SPE case. */
022123e6 14105 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14106 }
a3170dc6 14107 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14108 {
14109 info_ptr->vrsave_save_offset
14110 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14111
14112 /* Align stack so vector save area is on a quadword boundary. */
14113 if (info_ptr->altivec_size != 0)
14114 info_ptr->altivec_padding_size
14115 = 16 - (-info_ptr->vrsave_save_offset % 16);
14116 else
14117 info_ptr->altivec_padding_size = 0;
14118
14119 info_ptr->altivec_save_offset
14120 = info_ptr->vrsave_save_offset
14121 - info_ptr->altivec_padding_size
14122 - info_ptr->altivec_size;
14123
14124 /* Adjust for AltiVec case. */
022123e6 14125 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14126 }
14127 else
022123e6
AM
14128 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14129 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14130 info_ptr->lr_save_offset = reg_size;
14131 break;
4697a36c
MM
14132 }
14133
64045029 14134 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14135 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14136 + info_ptr->gp_size
14137 + info_ptr->altivec_size
14138 + info_ptr->altivec_padding_size
a3170dc6
AH
14139 + info_ptr->spe_gp_size
14140 + info_ptr->spe_padding_size
00b960c7
AH
14141 + ehrd_size
14142 + info_ptr->cr_size
022123e6 14143 + info_ptr->vrsave_size,
64045029 14144 save_align);
00b960c7 14145
44688022 14146 non_fixed_size = (info_ptr->vars_size
ff381587 14147 + info_ptr->parm_size
5b667039 14148 + info_ptr->save_size);
ff381587 14149
44688022
AM
14150 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14151 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14152
14153 /* Determine if we need to allocate any stack frame:
14154
a4f6c312
SS
14155 For AIX we need to push the stack if a frame pointer is needed
14156 (because the stack might be dynamically adjusted), if we are
14157 debugging, if we make calls, or if the sum of fp_save, gp_save,
14158 and local variables are more than the space needed to save all
14159 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14160 + 18*8 = 288 (GPR13 reserved).
ff381587 14161
a4f6c312
SS
14162 For V.4 we don't have the stack cushion that AIX uses, but assume
14163 that the debugger can handle stackless frames. */
ff381587
MM
14164
14165 if (info_ptr->calls_p)
14166 info_ptr->push_p = 1;
14167
178274da 14168 else if (DEFAULT_ABI == ABI_V4)
44688022 14169 info_ptr->push_p = non_fixed_size != 0;
ff381587 14170
178274da
AM
14171 else if (frame_pointer_needed)
14172 info_ptr->push_p = 1;
14173
14174 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14175 info_ptr->push_p = 1;
14176
ff381587 14177 else
44688022 14178 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14179
a4f6c312 14180 /* Zero offsets if we're not saving those registers. */
8dda1a21 14181 if (info_ptr->fp_size == 0)
4697a36c
MM
14182 info_ptr->fp_save_offset = 0;
14183
8dda1a21 14184 if (info_ptr->gp_size == 0)
4697a36c
MM
14185 info_ptr->gp_save_offset = 0;
14186
00b960c7
AH
14187 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14188 info_ptr->altivec_save_offset = 0;
14189
14190 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14191 info_ptr->vrsave_save_offset = 0;
14192
c19de7aa
AH
14193 if (! TARGET_SPE_ABI
14194 || info_ptr->spe_64bit_regs_used == 0
14195 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14196 info_ptr->spe_gp_save_offset = 0;
14197
c81fc13e 14198 if (! info_ptr->lr_save_p)
4697a36c
MM
14199 info_ptr->lr_save_offset = 0;
14200
c81fc13e 14201 if (! info_ptr->cr_save_p)
4697a36c
MM
14202 info_ptr->cr_save_offset = 0;
14203
14204 return info_ptr;
14205}
14206
c19de7aa
AH
14207/* Return true if the current function uses any GPRs in 64-bit SIMD
14208 mode. */
14209
14210static bool
863d938c 14211spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14212{
14213 rtx insns, insn;
14214
14215 /* Functions that save and restore all the call-saved registers will
14216 need to save/restore the registers in 64-bits. */
14217 if (current_function_calls_eh_return
14218 || current_function_calls_setjmp
14219 || current_function_has_nonlocal_goto)
14220 return true;
14221
14222 insns = get_insns ();
14223
14224 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14225 {
14226 if (INSN_P (insn))
14227 {
14228 rtx i;
14229
b5a5beb9
AH
14230 /* FIXME: This should be implemented with attributes...
14231
14232 (set_attr "spe64" "true")....then,
14233 if (get_spe64(insn)) return true;
14234
14235 It's the only reliable way to do the stuff below. */
14236
c19de7aa 14237 i = PATTERN (insn);
f82f556d
AH
14238 if (GET_CODE (i) == SET)
14239 {
14240 enum machine_mode mode = GET_MODE (SET_SRC (i));
14241
14242 if (SPE_VECTOR_MODE (mode))
14243 return true;
17caeff2 14244 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
14245 return true;
14246 }
c19de7aa
AH
14247 }
14248 }
14249
14250 return false;
14251}
14252
d1d0c603 14253static void
a2369ed3 14254debug_stack_info (rs6000_stack_t *info)
9878760c 14255{
d330fd93 14256 const char *abi_string;
24d304eb 14257
c81fc13e 14258 if (! info)
4697a36c
MM
14259 info = rs6000_stack_info ();
14260
14261 fprintf (stderr, "\nStack information for function %s:\n",
14262 ((current_function_decl && DECL_NAME (current_function_decl))
14263 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14264 : "<unknown>"));
14265
24d304eb
RK
14266 switch (info->abi)
14267 {
b6c9286a
MM
14268 default: abi_string = "Unknown"; break;
14269 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14270 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14271 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14272 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14273 }
14274
14275 fprintf (stderr, "\tABI = %5s\n", abi_string);
14276
00b960c7
AH
14277 if (TARGET_ALTIVEC_ABI)
14278 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14279
a3170dc6
AH
14280 if (TARGET_SPE_ABI)
14281 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14282
4697a36c
MM
14283 if (info->first_gp_reg_save != 32)
14284 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14285
14286 if (info->first_fp_reg_save != 64)
14287 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14288
00b960c7
AH
14289 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14290 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14291 info->first_altivec_reg_save);
14292
4697a36c
MM
14293 if (info->lr_save_p)
14294 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14295
4697a36c
MM
14296 if (info->cr_save_p)
14297 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14298
00b960c7
AH
14299 if (info->vrsave_mask)
14300 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14301
4697a36c
MM
14302 if (info->push_p)
14303 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14304
14305 if (info->calls_p)
14306 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14307
4697a36c
MM
14308 if (info->gp_save_offset)
14309 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14310
14311 if (info->fp_save_offset)
14312 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14313
00b960c7
AH
14314 if (info->altivec_save_offset)
14315 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14316 info->altivec_save_offset);
14317
a3170dc6
AH
14318 if (info->spe_gp_save_offset)
14319 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14320 info->spe_gp_save_offset);
14321
00b960c7
AH
14322 if (info->vrsave_save_offset)
14323 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14324 info->vrsave_save_offset);
14325
4697a36c
MM
14326 if (info->lr_save_offset)
14327 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14328
14329 if (info->cr_save_offset)
14330 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14331
14332 if (info->varargs_save_offset)
14333 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14334
14335 if (info->total_size)
d1d0c603
JJ
14336 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14337 info->total_size);
4697a36c 14338
4697a36c 14339 if (info->vars_size)
d1d0c603
JJ
14340 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14341 info->vars_size);
4697a36c
MM
14342
14343 if (info->parm_size)
14344 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14345
14346 if (info->fixed_size)
14347 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14348
14349 if (info->gp_size)
14350 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14351
a3170dc6
AH
14352 if (info->spe_gp_size)
14353 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14354
4697a36c
MM
14355 if (info->fp_size)
14356 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14357
00b960c7
AH
14358 if (info->altivec_size)
14359 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14360
14361 if (info->vrsave_size)
14362 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14363
14364 if (info->altivec_padding_size)
14365 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14366 info->altivec_padding_size);
14367
a3170dc6
AH
14368 if (info->spe_padding_size)
14369 fprintf (stderr, "\tspe_padding_size = %5d\n",
14370 info->spe_padding_size);
14371
4697a36c
MM
14372 if (info->cr_size)
14373 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14374
14375 if (info->save_size)
14376 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14377
14378 if (info->reg_size != 4)
14379 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14380
14381 fprintf (stderr, "\n");
9878760c 14382}
71f123ca
FS
14383
14384rtx
a2369ed3 14385rs6000_return_addr (int count, rtx frame)
71f123ca 14386{
a4f6c312
SS
14387 /* Currently we don't optimize very well between prolog and body
14388 code and for PIC code the code can be actually quite bad, so
14389 don't try to be too clever here. */
f1384257 14390 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14391 {
14392 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14393
14394 return
14395 gen_rtx_MEM
14396 (Pmode,
14397 memory_address
14398 (Pmode,
14399 plus_constant (copy_to_reg
14400 (gen_rtx_MEM (Pmode,
14401 memory_address (Pmode, frame))),
14402 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14403 }
14404
8c29550d 14405 cfun->machine->ra_need_lr = 1;
1de43f85 14406 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14407}
14408
5e1bf043
DJ
14409/* Say whether a function is a candidate for sibcall handling or not.
14410 We do not allow indirect calls to be optimized into sibling calls.
14411 Also, we can't do it if there are any vector parameters; there's
14412 nowhere to put the VRsave code so it works; note that functions with
14413 vector parameters are required to have a prototype, so the argument
14414 type info must be available here. (The tail recursion case can work
14415 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14416static bool
a2369ed3 14417rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14418{
14419 tree type;
4977bab6 14420 if (decl)
5e1bf043
DJ
14421 {
14422 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14423 {
4977bab6 14424 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14425 type; type = TREE_CHAIN (type))
14426 {
c15b529f 14427 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14428 return false;
5e1bf043 14429 }
c4ad648e 14430 }
5e1bf043 14431 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14432 || ((*targetm.binds_local_p) (decl)
14433 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14434 {
4977bab6 14435 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14436
14437 if (!lookup_attribute ("longcall", attr_list)
14438 || lookup_attribute ("shortcall", attr_list))
4977bab6 14439 return true;
2bcc50d0 14440 }
5e1bf043 14441 }
4977bab6 14442 return false;
5e1bf043
DJ
14443}
14444
e7e64a25
AS
14445/* NULL if INSN insn is valid within a low-overhead loop.
14446 Otherwise return why doloop cannot be applied.
9419649c
DE
14447 PowerPC uses the COUNT register for branch on table instructions. */
14448
e7e64a25 14449static const char *
3101faab 14450rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14451{
14452 if (CALL_P (insn))
e7e64a25 14453 return "Function call in the loop.";
9419649c
DE
14454
14455 if (JUMP_P (insn)
14456 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14457 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14458 return "Computed branch in the loop.";
9419649c 14459
e7e64a25 14460 return NULL;
9419649c
DE
14461}
14462
71f123ca 14463static int
863d938c 14464rs6000_ra_ever_killed (void)
71f123ca
FS
14465{
14466 rtx top;
5e1bf043
DJ
14467 rtx reg;
14468 rtx insn;
71f123ca 14469
dd292d0a 14470 if (current_function_is_thunk)
71f123ca 14471 return 0;
eb0424da 14472
36f7e964
AH
14473 /* regs_ever_live has LR marked as used if any sibcalls are present,
14474 but this should not force saving and restoring in the
14475 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14476 clobbers LR, so that is inappropriate. */
36f7e964 14477
5e1bf043
DJ
14478 /* Also, the prologue can generate a store into LR that
14479 doesn't really count, like this:
36f7e964 14480
5e1bf043
DJ
14481 move LR->R0
14482 bcl to set PIC register
14483 move LR->R31
14484 move R0->LR
36f7e964
AH
14485
14486 When we're called from the epilogue, we need to avoid counting
14487 this as a store. */
f676971a 14488
71f123ca
FS
14489 push_topmost_sequence ();
14490 top = get_insns ();
14491 pop_topmost_sequence ();
1de43f85 14492 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14493
5e1bf043
DJ
14494 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14495 {
14496 if (INSN_P (insn))
14497 {
022123e6
AM
14498 if (CALL_P (insn))
14499 {
14500 if (!SIBLING_CALL_P (insn))
14501 return 1;
14502 }
1de43f85 14503 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14504 return 1;
36f7e964
AH
14505 else if (set_of (reg, insn) != NULL_RTX
14506 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14507 return 1;
14508 }
14509 }
14510 return 0;
71f123ca 14511}
4697a36c 14512\f
9ebbca7d 14513/* Emit instructions needed to load the TOC register.
c7ca610e 14514 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14515 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14516
14517void
a2369ed3 14518rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14519{
6fb5fa3c 14520 rtx dest;
1db02437 14521 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14522
7f970b70 14523 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14524 {
7f970b70 14525 char buf[30];
e65a3857 14526 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14527
14528 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14529 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14530 if (flag_pic == 2)
14531 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14532 else
14533 got = rs6000_got_sym ();
14534 tmp1 = tmp2 = dest;
14535 if (!fromprolog)
14536 {
14537 tmp1 = gen_reg_rtx (Pmode);
14538 tmp2 = gen_reg_rtx (Pmode);
14539 }
6fb5fa3c
DB
14540 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14541 emit_move_insn (tmp1,
1de43f85 14542 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14543 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14544 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14545 }
14546 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14547 {
6fb5fa3c 14548 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14549 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14550 }
14551 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14552 {
14553 char buf[30];
20b71b17
AM
14554 rtx temp0 = (fromprolog
14555 ? gen_rtx_REG (Pmode, 0)
14556 : gen_reg_rtx (Pmode));
20b71b17 14557
20b71b17
AM
14558 if (fromprolog)
14559 {
ccbca5e4 14560 rtx symF, symL;
38c1f2d7 14561
20b71b17
AM
14562 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14563 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14564
20b71b17
AM
14565 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14566 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14567
6fb5fa3c
DB
14568 emit_insn (gen_load_toc_v4_PIC_1 (symF));
14569 emit_move_insn (dest,
1de43f85 14570 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 14571 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
14572 }
14573 else
20b71b17
AM
14574 {
14575 rtx tocsym;
20b71b17
AM
14576
14577 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14578 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14579 emit_move_insn (dest,
1de43f85 14580 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 14581 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14582 }
6fb5fa3c 14583 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 14584 }
20b71b17
AM
14585 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14586 {
14587 /* This is for AIX code running in non-PIC ELF32. */
14588 char buf[30];
14589 rtx realsym;
14590 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14591 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14592
6fb5fa3c
DB
14593 emit_insn (gen_elf_high (dest, realsym));
14594 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 14595 }
37409796 14596 else
9ebbca7d 14597 {
37409796 14598 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14599
9ebbca7d 14600 if (TARGET_32BIT)
6fb5fa3c 14601 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14602 else
6fb5fa3c 14603 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
14604 }
14605}
14606
d1d0c603
JJ
14607/* Emit instructions to restore the link register after determining where
14608 its value has been stored. */
14609
14610void
14611rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14612{
14613 rs6000_stack_t *info = rs6000_stack_info ();
14614 rtx operands[2];
14615
14616 operands[0] = source;
14617 operands[1] = scratch;
14618
14619 if (info->lr_save_p)
14620 {
14621 rtx frame_rtx = stack_pointer_rtx;
14622 HOST_WIDE_INT sp_offset = 0;
14623 rtx tmp;
14624
14625 if (frame_pointer_needed
14626 || current_function_calls_alloca
14627 || info->total_size > 32767)
14628 {
0be76840 14629 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14630 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14631 frame_rtx = operands[1];
14632 }
14633 else if (info->push_p)
14634 sp_offset = info->total_size;
14635
14636 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14637 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14638 emit_move_insn (tmp, operands[0]);
14639 }
14640 else
1de43f85 14641 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
14642}
14643
4862826d 14644static GTY(()) alias_set_type set = -1;
f103e34d 14645
4862826d 14646alias_set_type
863d938c 14647get_TOC_alias_set (void)
9ebbca7d 14648{
f103e34d
GK
14649 if (set == -1)
14650 set = new_alias_set ();
14651 return set;
f676971a 14652}
9ebbca7d 14653
c1207243 14654/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14655 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14656 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14657#if TARGET_ELF
3c9eb5f4 14658static int
f676971a 14659uses_TOC (void)
9ebbca7d 14660{
c4501e62 14661 rtx insn;
38c1f2d7 14662
c4501e62
JJ
14663 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14664 if (INSN_P (insn))
14665 {
14666 rtx pat = PATTERN (insn);
14667 int i;
9ebbca7d 14668
f676971a 14669 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14670 for (i = 0; i < XVECLEN (pat, 0); i++)
14671 {
14672 rtx sub = XVECEXP (pat, 0, i);
14673 if (GET_CODE (sub) == USE)
14674 {
14675 sub = XEXP (sub, 0);
14676 if (GET_CODE (sub) == UNSPEC
14677 && XINT (sub, 1) == UNSPEC_TOC)
14678 return 1;
14679 }
14680 }
14681 }
14682 return 0;
9ebbca7d 14683}
c954844a 14684#endif
38c1f2d7 14685
9ebbca7d 14686rtx
f676971a 14687create_TOC_reference (rtx symbol)
9ebbca7d 14688{
b3a13419 14689 if (!can_create_pseudo_p ())
6fb5fa3c 14690 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 14691 return gen_rtx_PLUS (Pmode,
a8a05998 14692 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14693 gen_rtx_CONST (Pmode,
14694 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14695 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14696}
38c1f2d7 14697
fc4767bb
JJ
14698/* If _Unwind_* has been called from within the same module,
14699 toc register is not guaranteed to be saved to 40(1) on function
14700 entry. Save it there in that case. */
c7ca610e 14701
9ebbca7d 14702void
863d938c 14703rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14704{
14705 rtx mem;
14706 rtx stack_top = gen_reg_rtx (Pmode);
14707 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14708 rtx opcode = gen_reg_rtx (SImode);
14709 rtx tocompare = gen_reg_rtx (SImode);
14710 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14711
8308679f 14712 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14713 emit_move_insn (stack_top, mem);
14714
8308679f
DE
14715 mem = gen_frame_mem (Pmode,
14716 gen_rtx_PLUS (Pmode, stack_top,
14717 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14718 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14719 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14720 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14721 : 0xE8410028, SImode));
9ebbca7d 14722
fc4767bb 14723 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14724 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14725 no_toc_save_needed);
9ebbca7d 14726
8308679f
DE
14727 mem = gen_frame_mem (Pmode,
14728 gen_rtx_PLUS (Pmode, stack_top,
14729 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14730 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14731 emit_label (no_toc_save_needed);
9ebbca7d 14732}
38c1f2d7 14733\f
0be76840
DE
14734/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14735 and the change to the stack pointer. */
ba4828e0 14736
9ebbca7d 14737static void
863d938c 14738rs6000_emit_stack_tie (void)
9ebbca7d 14739{
0be76840
DE
14740 rtx mem = gen_frame_mem (BLKmode,
14741 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14742
9ebbca7d
GK
14743 emit_insn (gen_stack_tie (mem));
14744}
38c1f2d7 14745
9ebbca7d
GK
14746/* Emit the correct code for allocating stack space, as insns.
14747 If COPY_R12, make sure a copy of the old frame is left in r12.
14748 The generated code may use hard register 0 as a temporary. */
14749
14750static void
a2369ed3 14751rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14752{
9ebbca7d
GK
14753 rtx insn;
14754 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14755 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14756 rtx todec = gen_int_mode (-size, Pmode);
14757
14758 if (INTVAL (todec) != -size)
14759 {
d4ee4d25 14760 warning (0, "stack frame too large");
61168ff1
RS
14761 emit_insn (gen_trap ());
14762 return;
14763 }
a157febd
GK
14764
14765 if (current_function_limit_stack)
14766 {
14767 if (REG_P (stack_limit_rtx)
f676971a 14768 && REGNO (stack_limit_rtx) > 1
a157febd
GK
14769 && REGNO (stack_limit_rtx) <= 31)
14770 {
5b71a4e7 14771 emit_insn (TARGET_32BIT
9ebbca7d
GK
14772 ? gen_addsi3 (tmp_reg,
14773 stack_limit_rtx,
14774 GEN_INT (size))
14775 : gen_adddi3 (tmp_reg,
14776 stack_limit_rtx,
14777 GEN_INT (size)));
5b71a4e7 14778
9ebbca7d
GK
14779 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14780 const0_rtx));
a157febd
GK
14781 }
14782 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 14783 && TARGET_32BIT
f607bc57 14784 && DEFAULT_ABI == ABI_V4)
a157febd 14785 {
9ebbca7d 14786 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
14787 gen_rtx_PLUS (Pmode,
14788 stack_limit_rtx,
9ebbca7d 14789 GEN_INT (size)));
5b71a4e7 14790
9ebbca7d
GK
14791 emit_insn (gen_elf_high (tmp_reg, toload));
14792 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
14793 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14794 const0_rtx));
a157febd
GK
14795 }
14796 else
d4ee4d25 14797 warning (0, "stack limit expression is not supported");
a157febd
GK
14798 }
14799
9ebbca7d
GK
14800 if (copy_r12 || ! TARGET_UPDATE)
14801 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
14802
38c1f2d7
MM
14803 if (TARGET_UPDATE)
14804 {
9ebbca7d 14805 if (size > 32767)
38c1f2d7 14806 {
9ebbca7d 14807 /* Need a note here so that try_split doesn't get confused. */
9390387d 14808 if (get_last_insn () == NULL_RTX)
2e040219 14809 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
14810 insn = emit_move_insn (tmp_reg, todec);
14811 try_split (PATTERN (insn), insn, 0);
14812 todec = tmp_reg;
38c1f2d7 14813 }
5b71a4e7
DE
14814
14815 insn = emit_insn (TARGET_32BIT
14816 ? gen_movsi_update (stack_reg, stack_reg,
14817 todec, stack_reg)
c4ad648e 14818 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 14819 todec, stack_reg));
38c1f2d7
MM
14820 }
14821 else
14822 {
5b71a4e7
DE
14823 insn = emit_insn (TARGET_32BIT
14824 ? gen_addsi3 (stack_reg, stack_reg, todec)
14825 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
14826 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
14827 gen_rtx_REG (Pmode, 12));
14828 }
f676971a 14829
9ebbca7d 14830 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 14831 REG_NOTES (insn) =
9ebbca7d 14832 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 14833 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
14834 gen_rtx_PLUS (Pmode, stack_reg,
14835 GEN_INT (-size))),
14836 REG_NOTES (insn));
14837}
14838
a4f6c312
SS
14839/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14840 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14841 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
14842 deduce these equivalences by itself so it wasn't necessary to hold
14843 its hand so much. */
9ebbca7d
GK
14844
14845static void
f676971a 14846rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 14847 rtx reg2, rtx rreg)
9ebbca7d
GK
14848{
14849 rtx real, temp;
14850
e56c4463
JL
14851 /* copy_rtx will not make unique copies of registers, so we need to
14852 ensure we don't have unwanted sharing here. */
14853 if (reg == reg2)
14854 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14855
14856 if (reg == rreg)
14857 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14858
9ebbca7d
GK
14859 real = copy_rtx (PATTERN (insn));
14860
89e7058f
AH
14861 if (reg2 != NULL_RTX)
14862 real = replace_rtx (real, reg2, rreg);
f676971a
EC
14863
14864 real = replace_rtx (real, reg,
9ebbca7d
GK
14865 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14866 STACK_POINTER_REGNUM),
14867 GEN_INT (val)));
f676971a 14868
9ebbca7d
GK
14869 /* We expect that 'real' is either a SET or a PARALLEL containing
14870 SETs (and possibly other stuff). In a PARALLEL, all the SETs
14871 are important so they all have to be marked RTX_FRAME_RELATED_P. */
14872
14873 if (GET_CODE (real) == SET)
14874 {
14875 rtx set = real;
f676971a 14876
9ebbca7d
GK
14877 temp = simplify_rtx (SET_SRC (set));
14878 if (temp)
14879 SET_SRC (set) = temp;
14880 temp = simplify_rtx (SET_DEST (set));
14881 if (temp)
14882 SET_DEST (set) = temp;
14883 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 14884 {
9ebbca7d
GK
14885 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14886 if (temp)
14887 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 14888 }
38c1f2d7 14889 }
37409796 14890 else
9ebbca7d
GK
14891 {
14892 int i;
37409796
NS
14893
14894 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
14895 for (i = 0; i < XVECLEN (real, 0); i++)
14896 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
14897 {
14898 rtx set = XVECEXP (real, 0, i);
f676971a 14899
9ebbca7d
GK
14900 temp = simplify_rtx (SET_SRC (set));
14901 if (temp)
14902 SET_SRC (set) = temp;
14903 temp = simplify_rtx (SET_DEST (set));
14904 if (temp)
14905 SET_DEST (set) = temp;
14906 if (GET_CODE (SET_DEST (set)) == MEM)
14907 {
14908 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14909 if (temp)
14910 XEXP (SET_DEST (set), 0) = temp;
14911 }
14912 RTX_FRAME_RELATED_P (set) = 1;
14913 }
14914 }
c19de7aa
AH
14915
14916 if (TARGET_SPE)
14917 real = spe_synthesize_frame_save (real);
14918
9ebbca7d
GK
14919 RTX_FRAME_RELATED_P (insn) = 1;
14920 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14921 real,
14922 REG_NOTES (insn));
38c1f2d7
MM
14923}
14924
c19de7aa
AH
14925/* Given an SPE frame note, return a PARALLEL of SETs with the
14926 original note, plus a synthetic register save. */
14927
14928static rtx
a2369ed3 14929spe_synthesize_frame_save (rtx real)
c19de7aa
AH
14930{
14931 rtx synth, offset, reg, real2;
14932
14933 if (GET_CODE (real) != SET
14934 || GET_MODE (SET_SRC (real)) != V2SImode)
14935 return real;
14936
14937 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14938 frame related note. The parallel contains a set of the register
41f3a930 14939 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
14940 This is so we can differentiate between 64-bit and 32-bit saves.
14941 Words cannot describe this nastiness. */
14942
37409796
NS
14943 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14944 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14945 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
14946
14947 /* Transform:
14948 (set (mem (plus (reg x) (const y)))
14949 (reg z))
14950 into:
14951 (set (mem (plus (reg x) (const y+4)))
41f3a930 14952 (reg z+1200))
c19de7aa
AH
14953 */
14954
14955 real2 = copy_rtx (real);
14956 PUT_MODE (SET_DEST (real2), SImode);
14957 reg = SET_SRC (real2);
14958 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14959 synth = copy_rtx (real2);
14960
14961 if (BYTES_BIG_ENDIAN)
14962 {
14963 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14964 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14965 }
14966
14967 reg = SET_SRC (synth);
41f3a930 14968
c19de7aa 14969 synth = replace_rtx (synth, reg,
41f3a930 14970 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
14971
14972 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14973 synth = replace_rtx (synth, offset,
14974 GEN_INT (INTVAL (offset)
14975 + (BYTES_BIG_ENDIAN ? 0 : 4)));
14976
14977 RTX_FRAME_RELATED_P (synth) = 1;
14978 RTX_FRAME_RELATED_P (real2) = 1;
14979 if (BYTES_BIG_ENDIAN)
14980 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14981 else
14982 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14983
14984 return real;
14985}
14986
00b960c7
AH
14987/* Returns an insn that has a vrsave set operation with the
14988 appropriate CLOBBERs. */
14989
14990static rtx
a2369ed3 14991generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
14992{
14993 int nclobs, i;
14994 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 14995 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 14996
a004eb82
AH
14997 clobs[0]
14998 = gen_rtx_SET (VOIDmode,
14999 vrsave,
15000 gen_rtx_UNSPEC_VOLATILE (SImode,
15001 gen_rtvec (2, reg, vrsave),
3aca4bff 15002 UNSPECV_SET_VRSAVE));
00b960c7
AH
15003
15004 nclobs = 1;
15005
9aa86737
AH
15006 /* We need to clobber the registers in the mask so the scheduler
15007 does not move sets to VRSAVE before sets of AltiVec registers.
15008
15009 However, if the function receives nonlocal gotos, reload will set
15010 all call saved registers live. We will end up with:
15011
15012 (set (reg 999) (mem))
15013 (parallel [ (set (reg vrsave) (unspec blah))
15014 (clobber (reg 999))])
15015
15016 The clobber will cause the store into reg 999 to be dead, and
15017 flow will attempt to delete an epilogue insn. In this case, we
15018 need an unspec use/set of the register. */
00b960c7
AH
15019
15020 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15021 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15022 {
15023 if (!epiloguep || call_used_regs [i])
15024 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15025 gen_rtx_REG (V4SImode, i));
15026 else
15027 {
15028 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15029
15030 clobs[nclobs++]
a004eb82
AH
15031 = gen_rtx_SET (VOIDmode,
15032 reg,
15033 gen_rtx_UNSPEC (V4SImode,
15034 gen_rtvec (1, reg), 27));
9aa86737
AH
15035 }
15036 }
00b960c7
AH
15037
15038 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15039
15040 for (i = 0; i < nclobs; ++i)
15041 XVECEXP (insn, 0, i) = clobs[i];
15042
15043 return insn;
15044}
15045
89e7058f
AH
15046/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15047 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15048
15049static void
f676971a 15050emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15051 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15052{
15053 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15054 rtx replacea, replaceb;
15055
15056 int_rtx = GEN_INT (offset);
15057
15058 /* Some cases that need register indexed addressing. */
15059 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 15060 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15061 || (TARGET_SPE_ABI
15062 && SPE_VECTOR_MODE (mode)
15063 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15064 {
15065 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15066 flow path of instructions in the prologue. */
89e7058f
AH
15067 offset_rtx = gen_rtx_REG (Pmode, 11);
15068 emit_move_insn (offset_rtx, int_rtx);
15069
15070 replacea = offset_rtx;
15071 replaceb = int_rtx;
15072 }
15073 else
15074 {
15075 offset_rtx = int_rtx;
15076 replacea = NULL_RTX;
15077 replaceb = NULL_RTX;
15078 }
15079
15080 reg = gen_rtx_REG (mode, regno);
15081 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15082 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15083
15084 insn = emit_move_insn (mem, reg);
15085
15086 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15087}
15088
a3170dc6
AH
15089/* Emit an offset memory reference suitable for a frame store, while
15090 converting to a valid addressing mode. */
15091
15092static rtx
a2369ed3 15093gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15094{
15095 rtx int_rtx, offset_rtx;
15096
15097 int_rtx = GEN_INT (offset);
15098
4d4cbc0e
AH
15099 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
15100 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15101 {
15102 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15103 emit_move_insn (offset_rtx, int_rtx);
15104 }
15105 else
15106 offset_rtx = int_rtx;
15107
0be76840 15108 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15109}
15110
6d0a8091
DJ
15111/* Look for user-defined global regs. We should not save and restore these,
15112 and cannot use stmw/lmw if there are any in its range. */
15113
15114static bool
15115no_global_regs_above (int first_greg)
15116{
15117 int i;
15118 for (i = 0; i < 32 - first_greg; i++)
15119 if (global_regs[first_greg + i])
15120 return false;
15121 return true;
15122}
15123
699c914a
MS
15124#ifndef TARGET_FIX_AND_CONTINUE
15125#define TARGET_FIX_AND_CONTINUE 0
15126#endif
15127
52ff33d0
NF
15128/* Determine whether the gp REG is really used. */
15129
15130static bool
15131rs6000_reg_live_or_pic_offset_p (int reg)
15132{
6fb5fa3c 15133 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15134 && (!call_used_regs[reg]
15135 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15136 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15137 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15138 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15139 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15140}
15141
9ebbca7d
GK
15142/* Emit function prologue as insns. */
15143
9878760c 15144void
863d938c 15145rs6000_emit_prologue (void)
9878760c 15146{
4697a36c 15147 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15148 enum machine_mode reg_mode = Pmode;
327e5343 15149 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15150 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15151 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15152 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15153 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15154 rtx insn;
15155 int saving_FPRs_inline;
15156 int using_store_multiple;
15157 HOST_WIDE_INT sp_offset = 0;
f676971a 15158
699c914a
MS
15159 if (TARGET_FIX_AND_CONTINUE)
15160 {
15161 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15162 address by modifying the first 5 instructions of the function
699c914a
MS
15163 to branch to the overriding function. This is necessary to
15164 permit function pointers that point to the old function to
15165 actually forward to the new function. */
15166 emit_insn (gen_nop ());
15167 emit_insn (gen_nop ());
de2ab0ca 15168 emit_insn (gen_nop ());
699c914a
MS
15169 emit_insn (gen_nop ());
15170 emit_insn (gen_nop ());
15171 }
15172
15173 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15174 {
15175 reg_mode = V2SImode;
15176 reg_size = 8;
15177 }
a3170dc6 15178
9ebbca7d 15179 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15180 && (!TARGET_SPE_ABI
15181 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15182 && info->first_gp_reg_save < 31
15183 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15184 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15185 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15186 || current_function_calls_eh_return
8c29550d 15187 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15188
15189 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15190 if (! WORLD_SAVE_P (info)
15191 && info->push_p
acd0b319
AM
15192 && (DEFAULT_ABI == ABI_V4
15193 || current_function_calls_eh_return))
9ebbca7d
GK
15194 {
15195 if (info->total_size < 32767)
15196 sp_offset = info->total_size;
15197 else
15198 frame_reg_rtx = frame_ptr_rtx;
f676971a 15199 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15200 (frame_reg_rtx != sp_reg_rtx
15201 && (info->cr_save_p
15202 || info->lr_save_p
15203 || info->first_fp_reg_save < 64
15204 || info->first_gp_reg_save < 32
15205 )));
15206 if (frame_reg_rtx != sp_reg_rtx)
15207 rs6000_emit_stack_tie ();
15208 }
15209
d62294f5 15210 /* Handle world saves specially here. */
f57fe068 15211 if (WORLD_SAVE_P (info))
d62294f5
FJ
15212 {
15213 int i, j, sz;
15214 rtx treg;
15215 rtvec p;
22fa69da 15216 rtx reg0;
d62294f5
FJ
15217
15218 /* save_world expects lr in r0. */
22fa69da 15219 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15220 if (info->lr_save_p)
c4ad648e 15221 {
22fa69da 15222 insn = emit_move_insn (reg0,
1de43f85 15223 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15224 RTX_FRAME_RELATED_P (insn) = 1;
15225 }
d62294f5
FJ
15226
15227 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15228 assumptions about the offsets of various bits of the stack
992d08b1 15229 frame. */
37409796
NS
15230 gcc_assert (info->gp_save_offset == -220
15231 && info->fp_save_offset == -144
15232 && info->lr_save_offset == 8
15233 && info->cr_save_offset == 4
15234 && info->push_p
15235 && info->lr_save_p
15236 && (!current_function_calls_eh_return
15237 || info->ehrd_offset == -432)
15238 && info->vrsave_save_offset == -224
22fa69da 15239 && info->altivec_save_offset == -416);
d62294f5
FJ
15240
15241 treg = gen_rtx_REG (SImode, 11);
15242 emit_move_insn (treg, GEN_INT (-info->total_size));
15243
15244 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15245 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15246
15247 /* Preserve CR2 for save_world prologues */
22fa69da 15248 sz = 5;
d62294f5
FJ
15249 sz += 32 - info->first_gp_reg_save;
15250 sz += 64 - info->first_fp_reg_save;
15251 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15252 p = rtvec_alloc (sz);
15253 j = 0;
15254 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15255 gen_rtx_REG (SImode,
1de43f85 15256 LR_REGNO));
d62294f5 15257 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15258 gen_rtx_SYMBOL_REF (Pmode,
15259 "*save_world"));
d62294f5 15260 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15261 properly. */
15262 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15263 {
15264 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15265 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15266 GEN_INT (info->fp_save_offset
15267 + sp_offset + 8 * i));
0be76840 15268 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15269
15270 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15271 }
d62294f5 15272 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15273 {
15274 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15275 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15276 GEN_INT (info->altivec_save_offset
15277 + sp_offset + 16 * i));
0be76840 15278 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15279
15280 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15281 }
d62294f5 15282 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15283 {
15284 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15285 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15286 GEN_INT (info->gp_save_offset
15287 + sp_offset + reg_size * i));
0be76840 15288 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15289
15290 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15291 }
15292
15293 {
15294 /* CR register traditionally saved as CR2. */
15295 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15296 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15297 GEN_INT (info->cr_save_offset
15298 + sp_offset));
0be76840 15299 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15300
15301 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15302 }
22fa69da
GK
15303 /* Explain about use of R0. */
15304 if (info->lr_save_p)
15305 {
15306 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15307 GEN_INT (info->lr_save_offset
15308 + sp_offset));
15309 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15310
22fa69da
GK
15311 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15312 }
15313 /* Explain what happens to the stack pointer. */
15314 {
15315 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15316 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15317 }
d62294f5
FJ
15318
15319 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15320 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15321 treg, GEN_INT (-info->total_size));
15322 sp_offset = info->total_size;
d62294f5
FJ
15323 }
15324
9ebbca7d 15325 /* If we use the link register, get it into r0. */
f57fe068 15326 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15327 {
52ff33d0
NF
15328 rtx addr, reg, mem;
15329
f8a57be8 15330 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15331 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15332 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15333
15334 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15335 GEN_INT (info->lr_save_offset + sp_offset));
15336 reg = gen_rtx_REG (Pmode, 0);
15337 mem = gen_rtx_MEM (Pmode, addr);
15338 /* This should not be of rs6000_sr_alias_set, because of
15339 __builtin_return_address. */
15340
15341 insn = emit_move_insn (mem, reg);
15342 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15343 NULL_RTX, NULL_RTX);
f8a57be8 15344 }
9ebbca7d
GK
15345
15346 /* If we need to save CR, put it into r12. */
f57fe068 15347 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15348 {
f8a57be8 15349 rtx set;
f676971a 15350
9ebbca7d 15351 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15352 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15353 RTX_FRAME_RELATED_P (insn) = 1;
15354 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15355 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15356 But that's OK. All we have to do is specify that _one_ condition
15357 code register is saved in this stack slot. The thrower's epilogue
15358 will then restore all the call-saved registers.
15359 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15360 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15361 gen_rtx_REG (SImode, CR2_REGNO));
15362 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15363 set,
15364 REG_NOTES (insn));
9ebbca7d
GK
15365 }
15366
a4f6c312
SS
15367 /* Do any required saving of fpr's. If only one or two to save, do
15368 it ourselves. Otherwise, call function. */
f57fe068 15369 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15370 {
15371 int i;
15372 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15373 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15374 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15375 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15376 info->first_fp_reg_save + i,
15377 info->fp_save_offset + sp_offset + 8 * i,
15378 info->total_size);
9ebbca7d 15379 }
f57fe068 15380 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15381 {
15382 int i;
15383 char rname[30];
520a57c8 15384 const char *alloc_rname;
9ebbca7d
GK
15385 rtvec p;
15386 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15387
15388 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15389 gen_rtx_REG (Pmode,
1de43f85 15390 LR_REGNO));
9ebbca7d
GK
15391 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15392 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15393 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15394 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15395 gen_rtx_SYMBOL_REF (Pmode,
15396 alloc_rname));
15397 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15398 {
15399 rtx addr, reg, mem;
15400 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15401 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15402 GEN_INT (info->fp_save_offset
9ebbca7d 15403 + sp_offset + 8*i));
0be76840 15404 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15405
15406 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15407 }
15408 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15409 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15410 NULL_RTX, NULL_RTX);
15411 }
b6c9286a 15412
9ebbca7d
GK
15413 /* Save GPRs. This is done as a PARALLEL if we are using
15414 the store-multiple instructions. */
f57fe068 15415 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15416 {
308c142a 15417 rtvec p;
9ebbca7d
GK
15418 int i;
15419 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15420 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15421 {
15422 rtx addr, reg, mem;
15423 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15424 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15425 GEN_INT (info->gp_save_offset
15426 + sp_offset
9ebbca7d 15427 + reg_size * i));
0be76840 15428 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15429
15430 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15431 }
15432 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15433 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15434 NULL_RTX, NULL_RTX);
b6c9286a 15435 }
52ff33d0
NF
15436 else if (!WORLD_SAVE_P (info)
15437 && TARGET_SPE_ABI
15438 && info->spe_64bit_regs_used != 0
15439 && info->first_gp_reg_save != 32)
15440 {
15441 int i;
15442 rtx spe_save_area_ptr;
15443 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15444 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15445 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15446
15447 /* Determine whether we can address all of the registers that need
15448 to be saved with an offset from the stack pointer that fits in
15449 the small const field for SPE memory instructions. */
15450 int spe_regs_addressable_via_sp
15451 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15452 + (32 - info->first_gp_reg_save - 1) * reg_size);
15453 int spe_offset;
15454
15455 if (spe_regs_addressable_via_sp)
15456 {
15457 spe_save_area_ptr = sp_reg_rtx;
15458 spe_offset = info->spe_gp_save_offset + sp_offset;
15459 }
15460 else
15461 {
15462 /* Make r11 point to the start of the SPE save area. We need
15463 to be careful here if r11 is holding the static chain. If
15464 it is, then temporarily save it in r0. We would use r0 as
15465 our base register here, but using r0 as a base register in
15466 loads and stores means something different from what we
15467 would like. */
15468 if (using_static_chain_p)
15469 {
15470 rtx r0 = gen_rtx_REG (Pmode, 0);
15471
15472 gcc_assert (info->first_gp_reg_save > 11);
15473
15474 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15475 }
15476
15477 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
15478 emit_insn (gen_addsi3 (spe_save_area_ptr, sp_reg_rtx,
15479 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15480
15481 spe_offset = 0;
15482 }
15483
15484 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15485 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15486 {
15487 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15488 rtx offset, addr, mem;
15489
15490 /* We're doing all this to ensure that the offset fits into
15491 the immediate offset of 'evstdd'. */
15492 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15493
15494 offset = GEN_INT (reg_size * i + spe_offset);
15495 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15496 mem = gen_rtx_MEM (V2SImode, addr);
15497
15498 insn = emit_move_insn (mem, reg);
15499
15500 rs6000_frame_related (insn, spe_save_area_ptr,
15501 info->spe_gp_save_offset
15502 + sp_offset + reg_size * i,
15503 offset, const0_rtx);
15504 }
15505
15506 /* Move the static chain pointer back. */
15507 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15508 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15509 }
f57fe068 15510 else if (!WORLD_SAVE_P (info))
b6c9286a 15511 {
9ebbca7d
GK
15512 int i;
15513 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15514 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15515 {
15516 rtx addr, reg, mem;
15517 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15518
52ff33d0
NF
15519 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15520 GEN_INT (info->gp_save_offset
15521 + sp_offset
15522 + reg_size * i));
15523 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15524
52ff33d0
NF
15525 insn = emit_move_insn (mem, reg);
15526 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15527 NULL_RTX, NULL_RTX);
15528 }
9ebbca7d
GK
15529 }
15530
83720594
RH
15531 /* ??? There's no need to emit actual instructions here, but it's the
15532 easiest way to get the frame unwind information emitted. */
22fa69da 15533 if (current_function_calls_eh_return)
83720594 15534 {
78e1b90d
DE
15535 unsigned int i, regno;
15536
fc4767bb
JJ
15537 /* In AIX ABI we need to pretend we save r2 here. */
15538 if (TARGET_AIX)
15539 {
15540 rtx addr, reg, mem;
15541
15542 reg = gen_rtx_REG (reg_mode, 2);
15543 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15544 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15545 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15546
15547 insn = emit_move_insn (mem, reg);
f676971a 15548 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15549 NULL_RTX, NULL_RTX);
15550 PATTERN (insn) = gen_blockage ();
15551 }
15552
83720594
RH
15553 for (i = 0; ; ++i)
15554 {
83720594
RH
15555 regno = EH_RETURN_DATA_REGNO (i);
15556 if (regno == INVALID_REGNUM)
15557 break;
15558
89e7058f
AH
15559 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15560 info->ehrd_offset + sp_offset
15561 + reg_size * (int) i,
15562 info->total_size);
83720594
RH
15563 }
15564 }
15565
9ebbca7d 15566 /* Save CR if we use any that must be preserved. */
f57fe068 15567 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15568 {
15569 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15570 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15571 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15572 /* See the large comment above about why CR2_REGNO is used. */
15573 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15574
9ebbca7d
GK
15575 /* If r12 was used to hold the original sp, copy cr into r0 now
15576 that it's free. */
15577 if (REGNO (frame_reg_rtx) == 12)
15578 {
f8a57be8
GK
15579 rtx set;
15580
9ebbca7d 15581 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15582 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15583 RTX_FRAME_RELATED_P (insn) = 1;
15584 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15585 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15586 set,
15587 REG_NOTES (insn));
f676971a 15588
9ebbca7d
GK
15589 }
15590 insn = emit_move_insn (mem, cr_save_rtx);
15591
f676971a 15592 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15593 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15594 }
15595
f676971a 15596 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15597 for which it was done previously. */
f57fe068 15598 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15599 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 15600 {
bcb2d701 15601 if (info->total_size < 32767)
2b2c2fe5 15602 sp_offset = info->total_size;
bcb2d701
EC
15603 else
15604 frame_reg_rtx = frame_ptr_rtx;
15605 rs6000_emit_allocate_stack (info->total_size,
15606 (frame_reg_rtx != sp_reg_rtx
15607 && ((info->altivec_size != 0)
15608 || (info->vrsave_mask != 0)
15609 )));
15610 if (frame_reg_rtx != sp_reg_rtx)
15611 rs6000_emit_stack_tie ();
2b2c2fe5 15612 }
9ebbca7d
GK
15613
15614 /* Set frame pointer, if needed. */
15615 if (frame_pointer_needed)
15616 {
7d5175e1 15617 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15618 sp_reg_rtx);
15619 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15620 }
9878760c 15621
2b2c2fe5
EC
15622 /* Save AltiVec registers if needed. Save here because the red zone does
15623 not include AltiVec registers. */
15624 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15625 {
15626 int i;
15627
15628 /* There should be a non inline version of this, for when we
15629 are saving lots of vector registers. */
15630 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15631 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15632 {
15633 rtx areg, savereg, mem;
15634 int offset;
15635
15636 offset = info->altivec_save_offset + sp_offset
15637 + 16 * (i - info->first_altivec_reg_save);
15638
15639 savereg = gen_rtx_REG (V4SImode, i);
15640
15641 areg = gen_rtx_REG (Pmode, 0);
15642 emit_move_insn (areg, GEN_INT (offset));
15643
15644 /* AltiVec addressing mode is [reg+reg]. */
15645 mem = gen_frame_mem (V4SImode,
15646 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15647
15648 insn = emit_move_insn (mem, savereg);
15649
15650 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15651 areg, GEN_INT (offset));
15652 }
15653 }
15654
15655 /* VRSAVE is a bit vector representing which AltiVec registers
15656 are used. The OS uses this to determine which vector
15657 registers to save on a context switch. We need to save
15658 VRSAVE on the stack frame, add whatever AltiVec registers we
15659 used in this function, and do the corresponding magic in the
15660 epilogue. */
15661
15662 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15663 && info->vrsave_mask != 0)
15664 {
15665 rtx reg, mem, vrsave;
15666 int offset;
15667
15668 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15669 as frame_reg_rtx and r11 as the static chain pointer for
15670 nested functions. */
15671 reg = gen_rtx_REG (SImode, 0);
15672 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15673 if (TARGET_MACHO)
15674 emit_insn (gen_get_vrsave_internal (reg));
15675 else
15676 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15677
15678 if (!WORLD_SAVE_P (info))
15679 {
15680 /* Save VRSAVE. */
15681 offset = info->vrsave_save_offset + sp_offset;
15682 mem = gen_frame_mem (SImode,
15683 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15684 GEN_INT (offset)));
15685 insn = emit_move_insn (mem, reg);
15686 }
15687
15688 /* Include the registers in the mask. */
15689 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15690
15691 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15692 }
15693
1db02437 15694 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15695 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15696 || (DEFAULT_ABI == ABI_V4
15697 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 15698 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
15699 {
15700 /* If emit_load_toc_table will use the link register, we need to save
15701 it. We use R12 for this purpose because emit_load_toc_table
15702 can use register 0. This allows us to use a plain 'blr' to return
15703 from the procedure more often. */
15704 int save_LR_around_toc_setup = (TARGET_ELF
15705 && DEFAULT_ABI != ABI_AIX
15706 && flag_pic
15707 && ! info->lr_save_p
15708 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15709 if (save_LR_around_toc_setup)
15710 {
1de43f85 15711 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 15712
c4ad648e 15713 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 15714 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15715
c4ad648e 15716 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15717
c4ad648e 15718 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
15719 RTX_FRAME_RELATED_P (insn) = 1;
15720 }
15721 else
15722 rs6000_emit_load_toc_table (TRUE);
15723 }
ee890fe2 15724
fcce224d 15725#if TARGET_MACHO
ee890fe2
SS
15726 if (DEFAULT_ABI == ABI_DARWIN
15727 && flag_pic && current_function_uses_pic_offset_table)
15728 {
1de43f85 15729 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 15730 rtx src = machopic_function_base_sym ();
ee890fe2 15731
6d0a8091
DJ
15732 /* Save and restore LR locally around this call (in R0). */
15733 if (!info->lr_save_p)
6fb5fa3c 15734 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 15735
6fb5fa3c 15736 emit_insn (gen_load_macho_picbase (src));
ee890fe2 15737
6fb5fa3c
DB
15738 emit_move_insn (gen_rtx_REG (Pmode,
15739 RS6000_PIC_OFFSET_TABLE_REGNUM),
15740 lr);
6d0a8091
DJ
15741
15742 if (!info->lr_save_p)
6fb5fa3c 15743 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 15744 }
fcce224d 15745#endif
9ebbca7d
GK
15746}
15747
9ebbca7d 15748/* Write function prologue. */
a4f6c312 15749
08c148a8 15750static void
f676971a 15751rs6000_output_function_prologue (FILE *file,
a2369ed3 15752 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15753{
15754 rs6000_stack_t *info = rs6000_stack_info ();
15755
4697a36c
MM
15756 if (TARGET_DEBUG_STACK)
15757 debug_stack_info (info);
9878760c 15758
a4f6c312
SS
15759 /* Write .extern for any function we will call to save and restore
15760 fp values. */
15761 if (info->first_fp_reg_save < 64
15762 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 15763 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 15764 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
15765 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
15766 RESTORE_FP_SUFFIX);
9878760c 15767
c764f757
RK
15768 /* Write .extern for AIX common mode routines, if needed. */
15769 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
15770 {
f6709c70
JW
15771 fputs ("\t.extern __mulh\n", file);
15772 fputs ("\t.extern __mull\n", file);
15773 fputs ("\t.extern __divss\n", file);
15774 fputs ("\t.extern __divus\n", file);
15775 fputs ("\t.extern __quoss\n", file);
15776 fputs ("\t.extern __quous\n", file);
c764f757
RK
15777 common_mode_defined = 1;
15778 }
9878760c 15779
9ebbca7d 15780 if (! HAVE_prologue)
979721f8 15781 {
9ebbca7d 15782 start_sequence ();
9dda4cc8 15783
a4f6c312
SS
15784 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
15785 the "toplevel" insn chain. */
2e040219 15786 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15787 rs6000_emit_prologue ();
2e040219 15788 emit_note (NOTE_INSN_DELETED);
178c3eff 15789
a3c9585f 15790 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15791 {
15792 rtx insn;
15793 unsigned addr = 0;
15794 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15795 {
15796 INSN_ADDRESSES_NEW (insn, addr);
15797 addr += 4;
15798 }
15799 }
9dda4cc8 15800
9ebbca7d 15801 if (TARGET_DEBUG_STACK)
a4f6c312 15802 debug_rtx_list (get_insns (), 100);
c9d691e9 15803 final (get_insns (), file, FALSE);
9ebbca7d 15804 end_sequence ();
979721f8
MM
15805 }
15806
9ebbca7d
GK
15807 rs6000_pic_labelno++;
15808}
f676971a 15809
9ebbca7d 15810/* Emit function epilogue as insns.
9878760c 15811
9ebbca7d
GK
15812 At present, dwarf2out_frame_debug_expr doesn't understand
15813 register restores, so we don't bother setting RTX_FRAME_RELATED_P
15814 anywhere in the epilogue. Most of the insns below would in any case
15815 need special notes to explain where r11 is in relation to the stack. */
9878760c 15816
9ebbca7d 15817void
a2369ed3 15818rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
15819{
15820 rs6000_stack_t *info;
15821 int restoring_FPRs_inline;
15822 int using_load_multiple;
d296e02e 15823 int using_mtcr_multiple;
9ebbca7d
GK
15824 int use_backchain_to_restore_sp;
15825 int sp_offset = 0;
15826 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
15827 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 15828 enum machine_mode reg_mode = Pmode;
327e5343 15829 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15830 int i;
15831
c19de7aa
AH
15832 info = rs6000_stack_info ();
15833
15834 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
15835 {
15836 reg_mode = V2SImode;
15837 reg_size = 8;
15838 }
15839
9ebbca7d 15840 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15841 && (!TARGET_SPE_ABI
15842 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15843 && info->first_gp_reg_save < 31
15844 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15845 restoring_FPRs_inline = (sibcall
83720594 15846 || current_function_calls_eh_return
9ebbca7d
GK
15847 || info->first_fp_reg_save == 64
15848 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 15849 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
15850 || current_function_calls_alloca
15851 || info->total_size > 32767);
d296e02e 15852 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
15853 || rs6000_cpu == PROCESSOR_PPC603
15854 || rs6000_cpu == PROCESSOR_PPC750
15855 || optimize_size);
15856
f57fe068 15857 if (WORLD_SAVE_P (info))
d62294f5
FJ
15858 {
15859 int i, j;
15860 char rname[30];
15861 const char *alloc_rname;
15862 rtvec p;
15863
15864 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
15865 stack slot (which is not likely to be our caller.)
15866 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
15867 rest_world is similar, except any R10 parameter is ignored.
15868 The exception-handling stuff that was here in 2.95 is no
15869 longer necessary. */
d62294f5
FJ
15870
15871 p = rtvec_alloc (9
15872 + 1
f676971a 15873 + 32 - info->first_gp_reg_save
c4ad648e
AM
15874 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
15875 + 63 + 1 - info->first_fp_reg_save);
d62294f5 15876
c4ad648e
AM
15877 strcpy (rname, ((current_function_calls_eh_return) ?
15878 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
15879 alloc_rname = ggc_strdup (rname);
15880
15881 j = 0;
15882 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
15883 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 15884 gen_rtx_REG (Pmode,
1de43f85 15885 LR_REGNO));
d62294f5 15886 RTVEC_ELT (p, j++)
c4ad648e 15887 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 15888 /* The instruction pattern requires a clobber here;
c4ad648e 15889 it is shared with the restVEC helper. */
d62294f5 15890 RTVEC_ELT (p, j++)
c4ad648e 15891 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
15892
15893 {
c4ad648e
AM
15894 /* CR register traditionally saved as CR2. */
15895 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15896 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15897 GEN_INT (info->cr_save_offset));
0be76840 15898 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15899
15900 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
15901 }
15902
15903 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15904 {
15905 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15906 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15907 GEN_INT (info->gp_save_offset
15908 + reg_size * i));
0be76840 15909 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15910
15911 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15912 }
d62294f5 15913 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15914 {
15915 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15916 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15917 GEN_INT (info->altivec_save_offset
15918 + 16 * i));
0be76840 15919 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15920
15921 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15922 }
d62294f5 15923 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
15924 {
15925 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15926 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15927 GEN_INT (info->fp_save_offset
15928 + 8 * i));
0be76840 15929 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15930
15931 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15932 }
d62294f5 15933 RTVEC_ELT (p, j++)
c4ad648e 15934 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 15935 RTVEC_ELT (p, j++)
c4ad648e 15936 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 15937 RTVEC_ELT (p, j++)
c4ad648e 15938 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 15939 RTVEC_ELT (p, j++)
c4ad648e 15940 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 15941 RTVEC_ELT (p, j++)
c4ad648e 15942 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
15943 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15944
15945 return;
15946 }
15947
2b2c2fe5 15948 /* Set sp_offset based on the stack push from the prologue. */
bcb2d701 15949 if (info->total_size < 32767)
2b2c2fe5 15950 sp_offset = info->total_size;
f676971a 15951
9aa86737
AH
15952 /* Restore AltiVec registers if needed. */
15953 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15954 {
15955 int i;
15956
15957 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15958 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15959 {
15960 rtx addr, areg, mem;
15961
15962 areg = gen_rtx_REG (Pmode, 0);
15963 emit_move_insn
15964 (areg, GEN_INT (info->altivec_save_offset
15965 + sp_offset
15966 + 16 * (i - info->first_altivec_reg_save)));
15967
15968 /* AltiVec addressing mode is [reg+reg]. */
15969 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 15970 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
15971
15972 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15973 }
15974 }
15975
15976 /* Restore VRSAVE if needed. */
44688022 15977 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 15978 && info->vrsave_mask != 0)
9aa86737
AH
15979 {
15980 rtx addr, mem, reg;
15981
15982 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15983 GEN_INT (info->vrsave_save_offset + sp_offset));
0be76840 15984 mem = gen_frame_mem (SImode, addr);
9aa86737
AH
15985 reg = gen_rtx_REG (SImode, 12);
15986 emit_move_insn (reg, mem);
15987
15988 emit_insn (generate_set_vrsave (reg, info, 1));
15989 }
15990
2b2c2fe5
EC
15991 sp_offset = 0;
15992
15993 /* If we have a frame pointer, a call to alloca, or a large stack
15994 frame, restore the old stack pointer using the backchain. Otherwise,
15995 we know what size to update it with. */
15996 if (use_backchain_to_restore_sp)
15997 {
15998 /* Under V.4, don't reset the stack pointer until after we're done
15999 loading the saved registers. */
16000 if (DEFAULT_ABI == ABI_V4)
16001 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16002
16003 emit_move_insn (frame_reg_rtx,
16004 gen_rtx_MEM (Pmode, sp_reg_rtx));
16005 }
16006 else if (info->push_p)
16007 {
16008 if (DEFAULT_ABI == ABI_V4
16009 || current_function_calls_eh_return)
16010 sp_offset = info->total_size;
16011 else
16012 {
16013 emit_insn (TARGET_32BIT
16014 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16015 GEN_INT (info->total_size))
16016 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16017 GEN_INT (info->total_size)));
16018 }
16019 }
16020
9ebbca7d
GK
16021 /* Get the old lr if we saved it. */
16022 if (info->lr_save_p)
b6c9286a 16023 {
a3170dc6
AH
16024 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16025 info->lr_save_offset + sp_offset);
ba4828e0 16026
9ebbca7d 16027 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16028 }
f676971a 16029
9ebbca7d
GK
16030 /* Get the old cr if we saved it. */
16031 if (info->cr_save_p)
16032 {
16033 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16034 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16035 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16036
9ebbca7d
GK
16037 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16038 }
f676971a 16039
9ebbca7d 16040 /* Set LR here to try to overlap restores below. */
4697a36c 16041 if (info->lr_save_p)
1de43f85 16042 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16043 gen_rtx_REG (Pmode, 0));
f676971a 16044
83720594
RH
16045 /* Load exception handler data registers, if needed. */
16046 if (current_function_calls_eh_return)
16047 {
78e1b90d
DE
16048 unsigned int i, regno;
16049
fc4767bb
JJ
16050 if (TARGET_AIX)
16051 {
16052 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16053 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16054 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16055
16056 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16057 }
16058
83720594
RH
16059 for (i = 0; ; ++i)
16060 {
a3170dc6 16061 rtx mem;
83720594
RH
16062
16063 regno = EH_RETURN_DATA_REGNO (i);
16064 if (regno == INVALID_REGNUM)
16065 break;
16066
a3170dc6
AH
16067 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16068 info->ehrd_offset + sp_offset
16069 + reg_size * (int) i);
83720594
RH
16070
16071 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16072 }
16073 }
f676971a 16074
9ebbca7d
GK
16075 /* Restore GPRs. This is done as a PARALLEL if we are using
16076 the load-multiple instructions. */
16077 if (using_load_multiple)
979721f8 16078 {
9ebbca7d
GK
16079 rtvec p;
16080 p = rtvec_alloc (32 - info->first_gp_reg_save);
16081 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16082 {
f676971a
EC
16083 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16084 GEN_INT (info->gp_save_offset
16085 + sp_offset
9ebbca7d 16086 + reg_size * i));
0be76840 16087 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16088
f676971a 16089 RTVEC_ELT (p, i) =
9ebbca7d
GK
16090 gen_rtx_SET (VOIDmode,
16091 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16092 mem);
979721f8 16093 }
9ebbca7d 16094 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16095 }
52ff33d0
NF
16096 else if (TARGET_SPE_ABI
16097 && info->spe_64bit_regs_used != 0
16098 && info->first_gp_reg_save != 32)
16099 {
16100 rtx spe_save_area_ptr;
16101 /* Determine whether we can address all of the registers that need
16102 to be saved with an offset from the stack pointer that fits in
16103 the small const field for SPE memory instructions. */
16104 int spe_regs_addressable_via_sp
16105 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16106 + (32 - info->first_gp_reg_save - 1) * reg_size);
16107 int spe_offset;
16108
16109 if (spe_regs_addressable_via_sp)
16110 {
16111 spe_save_area_ptr = frame_reg_rtx;
16112 spe_offset = info->spe_gp_save_offset + sp_offset;
16113 }
16114 else
16115 {
16116 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16117 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16118 There's no need to worry here because the static chain is passed
16119 anew to every function. */
16120 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16121
16122 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
16123 GEN_INT (info->spe_gp_save_offset + sp_offset)));
16124
16125 spe_offset = 0;
16126 }
16127
16128 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16129 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16130 {
16131 rtx offset, addr, mem;
16132
16133 /* We're doing all this to ensure that the immediate offset
16134 fits into the immediate field of 'evldd'. */
16135 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16136
16137 offset = GEN_INT (spe_offset + reg_size * i);
16138 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16139 mem = gen_rtx_MEM (V2SImode, addr);
16140
16141 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16142 mem);
16143 }
16144 }
9ebbca7d
GK
16145 else
16146 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16147 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16148 {
f676971a
EC
16149 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16150 GEN_INT (info->gp_save_offset
16151 + sp_offset
9ebbca7d 16152 + reg_size * i));
0be76840 16153 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16154
f676971a 16155 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16156 info->first_gp_reg_save + i), mem);
9ebbca7d 16157 }
9878760c 16158
9ebbca7d
GK
16159 /* Restore fpr's if we need to do it without calling a function. */
16160 if (restoring_FPRs_inline)
16161 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16162 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16163 && ! call_used_regs[info->first_fp_reg_save+i]))
16164 {
16165 rtx addr, mem;
16166 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16167 GEN_INT (info->fp_save_offset
16168 + sp_offset
a4f6c312 16169 + 8 * i));
0be76840 16170 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16171
f676971a 16172 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16173 info->first_fp_reg_save + i),
16174 mem);
16175 }
8d30c4ee 16176
9ebbca7d
GK
16177 /* If we saved cr, restore it here. Just those that were used. */
16178 if (info->cr_save_p)
979721f8 16179 {
9ebbca7d 16180 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16181 int count = 0;
f676971a 16182
d296e02e 16183 if (using_mtcr_multiple)
979721f8 16184 {
9ebbca7d 16185 for (i = 0; i < 8; i++)
6fb5fa3c 16186 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16187 count++;
37409796 16188 gcc_assert (count);
e35b9579
GK
16189 }
16190
d296e02e 16191 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16192 {
16193 rtvec p;
16194 int ndx;
f676971a 16195
e35b9579 16196 p = rtvec_alloc (count);
9ebbca7d 16197
e35b9579 16198 ndx = 0;
9ebbca7d 16199 for (i = 0; i < 8; i++)
6fb5fa3c 16200 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16201 {
16202 rtvec r = rtvec_alloc (2);
16203 RTVEC_ELT (r, 0) = r12_rtx;
16204 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16205 RTVEC_ELT (p, ndx) =
f676971a 16206 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16207 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16208 ndx++;
9ebbca7d
GK
16209 }
16210 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16211 gcc_assert (ndx == count);
979721f8
MM
16212 }
16213 else
9ebbca7d 16214 for (i = 0; i < 8; i++)
6fb5fa3c 16215 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16216 {
f676971a 16217 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16218 CR0_REGNO+i),
16219 r12_rtx));
979721f8 16220 }
979721f8
MM
16221 }
16222
9ebbca7d 16223 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16224 have been done. */
16225 if (frame_reg_rtx != sp_reg_rtx)
16226 {
16227 /* This blockage is needed so that sched doesn't decide to move
16228 the sp change before the register restores. */
16229 rs6000_emit_stack_tie ();
52ff33d0
NF
16230 if (TARGET_SPE_ABI
16231 && info->spe_64bit_regs_used != 0
16232 && info->first_gp_reg_save != 32)
16233 emit_insn (gen_addsi3 (sp_reg_rtx, gen_rtx_REG (Pmode, 11),
16234 GEN_INT (-(info->spe_gp_save_offset + sp_offset))));
16235 else
16236 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16237 }
16238 else if (sp_offset != 0)
16239 emit_insn (TARGET_32BIT
16240 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16241 GEN_INT (sp_offset))
16242 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16243 GEN_INT (sp_offset)));
b6c9286a 16244
83720594
RH
16245 if (current_function_calls_eh_return)
16246 {
16247 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16248 emit_insn (TARGET_32BIT
83720594
RH
16249 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16250 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16251 }
16252
9ebbca7d
GK
16253 if (!sibcall)
16254 {
16255 rtvec p;
16256 if (! restoring_FPRs_inline)
16257 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16258 else
16259 p = rtvec_alloc (2);
b6c9286a 16260
e35b9579 16261 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16262 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16263 gen_rtx_REG (Pmode,
1de43f85 16264 LR_REGNO));
9ebbca7d
GK
16265
16266 /* If we have to restore more than two FP registers, branch to the
16267 restore function. It will return to our caller. */
16268 if (! restoring_FPRs_inline)
16269 {
16270 int i;
16271 char rname[30];
520a57c8 16272 const char *alloc_rname;
979721f8 16273
f676971a 16274 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16275 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16276 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16277 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16278 gen_rtx_SYMBOL_REF (Pmode,
16279 alloc_rname));
b6c9286a 16280
9ebbca7d
GK
16281 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16282 {
16283 rtx addr, mem;
16284 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16285 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16286 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16287
f676971a 16288 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16289 gen_rtx_SET (VOIDmode,
16290 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16291 mem);
b6c9286a
MM
16292 }
16293 }
f676971a 16294
9ebbca7d 16295 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16296 }
9878760c
RK
16297}
16298
16299/* Write function epilogue. */
16300
08c148a8 16301static void
f676971a 16302rs6000_output_function_epilogue (FILE *file,
a2369ed3 16303 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16304{
9ebbca7d 16305 if (! HAVE_epilogue)
9878760c 16306 {
9ebbca7d
GK
16307 rtx insn = get_last_insn ();
16308 /* If the last insn was a BARRIER, we don't have to write anything except
16309 the trace table. */
16310 if (GET_CODE (insn) == NOTE)
16311 insn = prev_nonnote_insn (insn);
16312 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16313 {
9ebbca7d
GK
16314 /* This is slightly ugly, but at least we don't have two
16315 copies of the epilogue-emitting code. */
16316 start_sequence ();
16317
16318 /* A NOTE_INSN_DELETED is supposed to be at the start
16319 and end of the "toplevel" insn chain. */
2e040219 16320 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16321 rs6000_emit_epilogue (FALSE);
2e040219 16322 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16323
a3c9585f 16324 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16325 {
16326 rtx insn;
16327 unsigned addr = 0;
16328 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16329 {
16330 INSN_ADDRESSES_NEW (insn, addr);
16331 addr += 4;
16332 }
16333 }
16334
9ebbca7d 16335 if (TARGET_DEBUG_STACK)
a4f6c312 16336 debug_rtx_list (get_insns (), 100);
c9d691e9 16337 final (get_insns (), file, FALSE);
9ebbca7d 16338 end_sequence ();
4697a36c 16339 }
9878760c 16340 }
b4ac57ab 16341
efdba735
SH
16342#if TARGET_MACHO
16343 macho_branch_islands ();
0e5da0be
GK
16344 /* Mach-O doesn't support labels at the end of objects, so if
16345 it looks like we might want one, insert a NOP. */
16346 {
16347 rtx insn = get_last_insn ();
16348 while (insn
16349 && NOTE_P (insn)
a38e7aa5 16350 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16351 insn = PREV_INSN (insn);
f676971a
EC
16352 if (insn
16353 && (LABEL_P (insn)
0e5da0be 16354 || (NOTE_P (insn)
a38e7aa5 16355 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16356 fputs ("\tnop\n", file);
16357 }
16358#endif
16359
9b30bae2 16360 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16361 on its format.
16362
16363 We don't output a traceback table if -finhibit-size-directive was
16364 used. The documentation for -finhibit-size-directive reads
16365 ``don't output a @code{.size} assembler directive, or anything
16366 else that would cause trouble if the function is split in the
16367 middle, and the two halves are placed at locations far apart in
16368 memory.'' The traceback table has this property, since it
16369 includes the offset from the start of the function to the
4d30c363
MM
16370 traceback table itself.
16371
16372 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16373 different traceback table. */
57ac7be9 16374 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16375 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16376 {
69c75916 16377 const char *fname = NULL;
3ac88239 16378 const char *language_string = lang_hooks.name;
6041bf2f 16379 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16380 int i;
57ac7be9 16381 int optional_tbtab;
8097c268 16382 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16383
16384 if (rs6000_traceback == traceback_full)
16385 optional_tbtab = 1;
16386 else if (rs6000_traceback == traceback_part)
16387 optional_tbtab = 0;
16388 else
16389 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16390
69c75916
AM
16391 if (optional_tbtab)
16392 {
16393 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16394 while (*fname == '.') /* V.4 encodes . in the name */
16395 fname++;
16396
16397 /* Need label immediately before tbtab, so we can compute
16398 its offset from the function start. */
16399 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16400 ASM_OUTPUT_LABEL (file, fname);
16401 }
314fc5a9
ILT
16402
16403 /* The .tbtab pseudo-op can only be used for the first eight
16404 expressions, since it can't handle the possibly variable
16405 length fields that follow. However, if you omit the optional
16406 fields, the assembler outputs zeros for all optional fields
16407 anyways, giving each variable length field is minimum length
16408 (as defined in sys/debug.h). Thus we can not use the .tbtab
16409 pseudo-op at all. */
16410
16411 /* An all-zero word flags the start of the tbtab, for debuggers
16412 that have to find it by searching forward from the entry
16413 point or from the current pc. */
19d2d16f 16414 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16415
16416 /* Tbtab format type. Use format type 0. */
19d2d16f 16417 fputs ("\t.byte 0,", file);
314fc5a9 16418
5fc921c1
DE
16419 /* Language type. Unfortunately, there does not seem to be any
16420 official way to discover the language being compiled, so we
16421 use language_string.
16422 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16423 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16424 a number, so for now use 9. */
5fc921c1 16425 if (! strcmp (language_string, "GNU C"))
314fc5a9 16426 i = 0;
6de9cd9a
DN
16427 else if (! strcmp (language_string, "GNU F77")
16428 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16429 i = 1;
8b83775b 16430 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16431 i = 2;
5fc921c1
DE
16432 else if (! strcmp (language_string, "GNU Ada"))
16433 i = 3;
56438901
AM
16434 else if (! strcmp (language_string, "GNU C++")
16435 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16436 i = 9;
9517ead8
AG
16437 else if (! strcmp (language_string, "GNU Java"))
16438 i = 13;
5fc921c1
DE
16439 else if (! strcmp (language_string, "GNU Objective-C"))
16440 i = 14;
314fc5a9 16441 else
37409796 16442 gcc_unreachable ();
314fc5a9
ILT
16443 fprintf (file, "%d,", i);
16444
16445 /* 8 single bit fields: global linkage (not set for C extern linkage,
16446 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16447 from start of procedure stored in tbtab, internal function, function
16448 has controlled storage, function has no toc, function uses fp,
16449 function logs/aborts fp operations. */
16450 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16451 fprintf (file, "%d,",
16452 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16453
16454 /* 6 bitfields: function is interrupt handler, name present in
16455 proc table, function calls alloca, on condition directives
16456 (controls stack walks, 3 bits), saves condition reg, saves
16457 link reg. */
16458 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16459 set up as a frame pointer, even when there is no alloca call. */
16460 fprintf (file, "%d,",
6041bf2f
DE
16461 ((optional_tbtab << 6)
16462 | ((optional_tbtab & frame_pointer_needed) << 5)
16463 | (info->cr_save_p << 1)
16464 | (info->lr_save_p)));
314fc5a9 16465
6041bf2f 16466 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16467 (6 bits). */
16468 fprintf (file, "%d,",
4697a36c 16469 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16470
16471 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16472 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16473
6041bf2f
DE
16474 if (optional_tbtab)
16475 {
16476 /* Compute the parameter info from the function decl argument
16477 list. */
16478 tree decl;
16479 int next_parm_info_bit = 31;
314fc5a9 16480
6041bf2f
DE
16481 for (decl = DECL_ARGUMENTS (current_function_decl);
16482 decl; decl = TREE_CHAIN (decl))
16483 {
16484 rtx parameter = DECL_INCOMING_RTL (decl);
16485 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16486
6041bf2f
DE
16487 if (GET_CODE (parameter) == REG)
16488 {
ebb109ad 16489 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16490 {
16491 int bits;
16492
16493 float_parms++;
16494
37409796
NS
16495 switch (mode)
16496 {
16497 case SFmode:
16498 bits = 0x2;
16499 break;
16500
16501 case DFmode:
7393f7f8 16502 case DDmode:
37409796 16503 case TFmode:
7393f7f8 16504 case TDmode:
37409796
NS
16505 bits = 0x3;
16506 break;
16507
16508 default:
16509 gcc_unreachable ();
16510 }
6041bf2f
DE
16511
16512 /* If only one bit will fit, don't or in this entry. */
16513 if (next_parm_info_bit > 0)
16514 parm_info |= (bits << (next_parm_info_bit - 1));
16515 next_parm_info_bit -= 2;
16516 }
16517 else
16518 {
16519 fixed_parms += ((GET_MODE_SIZE (mode)
16520 + (UNITS_PER_WORD - 1))
16521 / UNITS_PER_WORD);
16522 next_parm_info_bit -= 1;
16523 }
16524 }
16525 }
16526 }
314fc5a9
ILT
16527
16528 /* Number of fixed point parameters. */
16529 /* This is actually the number of words of fixed point parameters; thus
16530 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16531 fprintf (file, "%d,", fixed_parms);
16532
16533 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16534 all on stack. */
16535 /* This is actually the number of fp registers that hold parameters;
16536 and thus the maximum value is 13. */
16537 /* Set parameters on stack bit if parameters are not in their original
16538 registers, regardless of whether they are on the stack? Xlc
16539 seems to set the bit when not optimizing. */
16540 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16541
6041bf2f
DE
16542 if (! optional_tbtab)
16543 return;
16544
314fc5a9
ILT
16545 /* Optional fields follow. Some are variable length. */
16546
16547 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16548 11 double float. */
16549 /* There is an entry for each parameter in a register, in the order that
16550 they occur in the parameter list. Any intervening arguments on the
16551 stack are ignored. If the list overflows a long (max possible length
16552 34 bits) then completely leave off all elements that don't fit. */
16553 /* Only emit this long if there was at least one parameter. */
16554 if (fixed_parms || float_parms)
16555 fprintf (file, "\t.long %d\n", parm_info);
16556
16557 /* Offset from start of code to tb table. */
19d2d16f 16558 fputs ("\t.long ", file);
314fc5a9 16559 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16560 if (TARGET_AIX)
16561 RS6000_OUTPUT_BASENAME (file, fname);
16562 else
16563 assemble_name (file, fname);
16564 putc ('-', file);
16565 rs6000_output_function_entry (file, fname);
19d2d16f 16566 putc ('\n', file);
314fc5a9
ILT
16567
16568 /* Interrupt handler mask. */
16569 /* Omit this long, since we never set the interrupt handler bit
16570 above. */
16571
16572 /* Number of CTL (controlled storage) anchors. */
16573 /* Omit this long, since the has_ctl bit is never set above. */
16574
16575 /* Displacement into stack of each CTL anchor. */
16576 /* Omit this list of longs, because there are no CTL anchors. */
16577
16578 /* Length of function name. */
69c75916
AM
16579 if (*fname == '*')
16580 ++fname;
296b8152 16581 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16582
16583 /* Function name. */
16584 assemble_string (fname, strlen (fname));
16585
16586 /* Register for alloca automatic storage; this is always reg 31.
16587 Only emit this if the alloca bit was set above. */
16588 if (frame_pointer_needed)
19d2d16f 16589 fputs ("\t.byte 31\n", file);
b1765bde
DE
16590
16591 fputs ("\t.align 2\n", file);
9b30bae2 16592 }
9878760c 16593}
17167fd8 16594\f
a4f6c312
SS
16595/* A C compound statement that outputs the assembler code for a thunk
16596 function, used to implement C++ virtual function calls with
16597 multiple inheritance. The thunk acts as a wrapper around a virtual
16598 function, adjusting the implicit object parameter before handing
16599 control off to the real function.
16600
16601 First, emit code to add the integer DELTA to the location that
16602 contains the incoming first argument. Assume that this argument
16603 contains a pointer, and is the one used to pass the `this' pointer
16604 in C++. This is the incoming argument *before* the function
16605 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16606 values of all other incoming arguments.
17167fd8
MM
16607
16608 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16609 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16610 not touch the return address. Hence returning from FUNCTION will
16611 return to whoever called the current `thunk'.
17167fd8 16612
a4f6c312
SS
16613 The effect must be as if FUNCTION had been called directly with the
16614 adjusted first argument. This macro is responsible for emitting
16615 all of the code for a thunk function; output_function_prologue()
16616 and output_function_epilogue() are not invoked.
17167fd8 16617
a4f6c312
SS
16618 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16619 been extracted from it.) It might possibly be useful on some
16620 targets, but probably not.
17167fd8 16621
a4f6c312
SS
16622 If you do not define this macro, the target-independent code in the
16623 C++ frontend will generate a less efficient heavyweight thunk that
16624 calls FUNCTION instead of jumping to it. The generic approach does
16625 not support varargs. */
17167fd8 16626
3961e8fe 16627static void
f676971a
EC
16628rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16629 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16630 tree function)
17167fd8 16631{
5b71a4e7 16632 rtx this, insn, funexp;
17167fd8 16633
5b71a4e7 16634 reload_completed = 1;
fe3ad572 16635 epilogue_completed = 1;
56a7189a 16636
5b71a4e7 16637 /* Mark the end of the (empty) prologue. */
2e040219 16638 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16639
5b71a4e7
DE
16640 /* Find the "this" pointer. If the function returns a structure,
16641 the structure return pointer is in r3. */
61f71b34 16642 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16643 this = gen_rtx_REG (Pmode, 4);
56a7189a 16644 else
5b71a4e7 16645 this = gen_rtx_REG (Pmode, 3);
17167fd8 16646
5b71a4e7
DE
16647 /* Apply the constant offset, if required. */
16648 if (delta)
16649 {
16650 rtx delta_rtx = GEN_INT (delta);
16651 emit_insn (TARGET_32BIT
16652 ? gen_addsi3 (this, this, delta_rtx)
16653 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16654 }
16655
5b71a4e7
DE
16656 /* Apply the offset from the vtable, if required. */
16657 if (vcall_offset)
17167fd8 16658 {
5b71a4e7
DE
16659 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16660 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16661
5b71a4e7 16662 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16663 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16664 {
16665 emit_insn (TARGET_32BIT
16666 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16667 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16668 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16669 }
16670 else
16671 {
16672 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16673
16674 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16675 }
5b71a4e7
DE
16676 emit_insn (TARGET_32BIT
16677 ? gen_addsi3 (this, this, tmp)
16678 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16679 }
16680
5b71a4e7
DE
16681 /* Generate a tail call to the target function. */
16682 if (!TREE_USED (function))
16683 {
16684 assemble_external (function);
16685 TREE_USED (function) = 1;
16686 }
16687 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16688 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16689
16690#if TARGET_MACHO
ab82a49f 16691 if (MACHOPIC_INDIRECT)
5b71a4e7 16692 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16693#endif
5b71a4e7
DE
16694
16695 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16696 generate sibcall RTL explicitly. */
5b71a4e7
DE
16697 insn = emit_call_insn (
16698 gen_rtx_PARALLEL (VOIDmode,
16699 gen_rtvec (4,
16700 gen_rtx_CALL (VOIDmode,
16701 funexp, const0_rtx),
16702 gen_rtx_USE (VOIDmode, const0_rtx),
16703 gen_rtx_USE (VOIDmode,
16704 gen_rtx_REG (SImode,
1de43f85 16705 LR_REGNO)),
5b71a4e7
DE
16706 gen_rtx_RETURN (VOIDmode))));
16707 SIBLING_CALL_P (insn) = 1;
16708 emit_barrier ();
16709
16710 /* Run just enough of rest_of_compilation to get the insns emitted.
16711 There's not really enough bulk here to make other passes such as
16712 instruction scheduling worth while. Note that use_thunk calls
16713 assemble_start_function and assemble_end_function. */
16714 insn = get_insns ();
55e092c4 16715 insn_locators_alloc ();
5b71a4e7
DE
16716 shorten_branches (insn);
16717 final_start_function (insn, file, 1);
c9d691e9 16718 final (insn, file, 1);
5b71a4e7
DE
16719 final_end_function ();
16720
16721 reload_completed = 0;
fe3ad572 16722 epilogue_completed = 0;
9ebbca7d 16723}
9ebbca7d
GK
16724\f
16725/* A quick summary of the various types of 'constant-pool tables'
16726 under PowerPC:
16727
f676971a 16728 Target Flags Name One table per
9ebbca7d
GK
16729 AIX (none) AIX TOC object file
16730 AIX -mfull-toc AIX TOC object file
16731 AIX -mminimal-toc AIX minimal TOC translation unit
16732 SVR4/EABI (none) SVR4 SDATA object file
16733 SVR4/EABI -fpic SVR4 pic object file
16734 SVR4/EABI -fPIC SVR4 PIC translation unit
16735 SVR4/EABI -mrelocatable EABI TOC function
16736 SVR4/EABI -maix AIX TOC object file
f676971a 16737 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16738 AIX minimal TOC translation unit
16739
16740 Name Reg. Set by entries contains:
16741 made by addrs? fp? sum?
16742
16743 AIX TOC 2 crt0 as Y option option
16744 AIX minimal TOC 30 prolog gcc Y Y option
16745 SVR4 SDATA 13 crt0 gcc N Y N
16746 SVR4 pic 30 prolog ld Y not yet N
16747 SVR4 PIC 30 prolog gcc Y option option
16748 EABI TOC 30 prolog gcc Y option option
16749
16750*/
16751
9ebbca7d
GK
16752/* Hash functions for the hash table. */
16753
16754static unsigned
a2369ed3 16755rs6000_hash_constant (rtx k)
9ebbca7d 16756{
46b33600
RH
16757 enum rtx_code code = GET_CODE (k);
16758 enum machine_mode mode = GET_MODE (k);
16759 unsigned result = (code << 3) ^ mode;
16760 const char *format;
16761 int flen, fidx;
f676971a 16762
46b33600
RH
16763 format = GET_RTX_FORMAT (code);
16764 flen = strlen (format);
16765 fidx = 0;
9ebbca7d 16766
46b33600
RH
16767 switch (code)
16768 {
16769 case LABEL_REF:
16770 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
16771
16772 case CONST_DOUBLE:
16773 if (mode != VOIDmode)
16774 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
16775 flen = 2;
16776 break;
16777
16778 case CODE_LABEL:
16779 fidx = 3;
16780 break;
16781
16782 default:
16783 break;
16784 }
9ebbca7d
GK
16785
16786 for (; fidx < flen; fidx++)
16787 switch (format[fidx])
16788 {
16789 case 's':
16790 {
16791 unsigned i, len;
16792 const char *str = XSTR (k, fidx);
16793 len = strlen (str);
16794 result = result * 613 + len;
16795 for (i = 0; i < len; i++)
16796 result = result * 613 + (unsigned) str[i];
17167fd8
MM
16797 break;
16798 }
9ebbca7d
GK
16799 case 'u':
16800 case 'e':
16801 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
16802 break;
16803 case 'i':
16804 case 'n':
16805 result = result * 613 + (unsigned) XINT (k, fidx);
16806 break;
16807 case 'w':
16808 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
16809 result = result * 613 + (unsigned) XWINT (k, fidx);
16810 else
16811 {
16812 size_t i;
9390387d 16813 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
16814 result = result * 613 + (unsigned) (XWINT (k, fidx)
16815 >> CHAR_BIT * i);
16816 }
16817 break;
09501938
DE
16818 case '0':
16819 break;
9ebbca7d 16820 default:
37409796 16821 gcc_unreachable ();
9ebbca7d 16822 }
46b33600 16823
9ebbca7d
GK
16824 return result;
16825}
16826
16827static unsigned
a2369ed3 16828toc_hash_function (const void *hash_entry)
9ebbca7d 16829{
f676971a 16830 const struct toc_hash_struct *thc =
a9098fd0
GK
16831 (const struct toc_hash_struct *) hash_entry;
16832 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
16833}
16834
16835/* Compare H1 and H2 for equivalence. */
16836
16837static int
a2369ed3 16838toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
16839{
16840 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
16841 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
16842
a9098fd0
GK
16843 if (((const struct toc_hash_struct *) h1)->key_mode
16844 != ((const struct toc_hash_struct *) h2)->key_mode)
16845 return 0;
16846
5692c7bc 16847 return rtx_equal_p (r1, r2);
9ebbca7d
GK
16848}
16849
28e510bd
MM
16850/* These are the names given by the C++ front-end to vtables, and
16851 vtable-like objects. Ideally, this logic should not be here;
16852 instead, there should be some programmatic way of inquiring as
16853 to whether or not an object is a vtable. */
16854
16855#define VTABLE_NAME_P(NAME) \
9390387d 16856 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
16857 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
16858 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 16859 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 16860 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
16861
16862void
a2369ed3 16863rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
16864{
16865 /* Currently C++ toc references to vtables can be emitted before it
16866 is decided whether the vtable is public or private. If this is
16867 the case, then the linker will eventually complain that there is
f676971a 16868 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
16869 we emit the TOC reference to reference the symbol and not the
16870 section. */
16871 const char *name = XSTR (x, 0);
54ee9799 16872
f676971a 16873 if (VTABLE_NAME_P (name))
54ee9799
DE
16874 {
16875 RS6000_OUTPUT_BASENAME (file, name);
16876 }
16877 else
16878 assemble_name (file, name);
28e510bd
MM
16879}
16880
a4f6c312
SS
16881/* Output a TOC entry. We derive the entry name from what is being
16882 written. */
9878760c
RK
16883
16884void
a2369ed3 16885output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
16886{
16887 char buf[256];
3cce094d 16888 const char *name = buf;
ec940faa 16889 const char *real_name;
9878760c 16890 rtx base = x;
16fdeb48 16891 HOST_WIDE_INT offset = 0;
9878760c 16892
37409796 16893 gcc_assert (!TARGET_NO_TOC);
4697a36c 16894
9ebbca7d
GK
16895 /* When the linker won't eliminate them, don't output duplicate
16896 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
16897 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
16898 CODE_LABELs. */
16899 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
16900 {
16901 struct toc_hash_struct *h;
16902 void * * found;
f676971a 16903
17211ab5 16904 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 16905 time because GGC is not initialized at that point. */
17211ab5 16906 if (toc_hash_table == NULL)
f676971a 16907 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
16908 toc_hash_eq, NULL);
16909
9ebbca7d
GK
16910 h = ggc_alloc (sizeof (*h));
16911 h->key = x;
a9098fd0 16912 h->key_mode = mode;
9ebbca7d 16913 h->labelno = labelno;
f676971a 16914
9ebbca7d
GK
16915 found = htab_find_slot (toc_hash_table, h, 1);
16916 if (*found == NULL)
16917 *found = h;
f676971a 16918 else /* This is indeed a duplicate.
9ebbca7d
GK
16919 Set this label equal to that label. */
16920 {
16921 fputs ("\t.set ", file);
16922 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
16923 fprintf (file, "%d,", labelno);
16924 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 16925 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
16926 found)->labelno));
16927 return;
16928 }
16929 }
16930
16931 /* If we're going to put a double constant in the TOC, make sure it's
16932 aligned properly when strict alignment is on. */
ff1720ed
RK
16933 if (GET_CODE (x) == CONST_DOUBLE
16934 && STRICT_ALIGNMENT
a9098fd0 16935 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
16936 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
16937 ASM_OUTPUT_ALIGN (file, 3);
16938 }
16939
4977bab6 16940 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 16941
37c37a57
RK
16942 /* Handle FP constants specially. Note that if we have a minimal
16943 TOC, things we put here aren't actually in the TOC, so we can allow
16944 FP constants. */
00b79d54
BE
16945 if (GET_CODE (x) == CONST_DOUBLE &&
16946 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
16947 {
16948 REAL_VALUE_TYPE rv;
16949 long k[4];
16950
16951 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16952 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16953 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
16954 else
16955 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
16956
16957 if (TARGET_64BIT)
16958 {
16959 if (TARGET_MINIMAL_TOC)
16960 fputs (DOUBLE_INT_ASM_OP, file);
16961 else
16962 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16963 k[0] & 0xffffffff, k[1] & 0xffffffff,
16964 k[2] & 0xffffffff, k[3] & 0xffffffff);
16965 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
16966 k[0] & 0xffffffff, k[1] & 0xffffffff,
16967 k[2] & 0xffffffff, k[3] & 0xffffffff);
16968 return;
16969 }
16970 else
16971 {
16972 if (TARGET_MINIMAL_TOC)
16973 fputs ("\t.long ", file);
16974 else
16975 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16976 k[0] & 0xffffffff, k[1] & 0xffffffff,
16977 k[2] & 0xffffffff, k[3] & 0xffffffff);
16978 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16979 k[0] & 0xffffffff, k[1] & 0xffffffff,
16980 k[2] & 0xffffffff, k[3] & 0xffffffff);
16981 return;
16982 }
16983 }
00b79d54
BE
16984 else if (GET_CODE (x) == CONST_DOUBLE &&
16985 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 16986 {
042259f2
DE
16987 REAL_VALUE_TYPE rv;
16988 long k[2];
0adc764e 16989
042259f2 16990 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16991
16992 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16993 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
16994 else
16995 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 16996
13ded975
DE
16997 if (TARGET_64BIT)
16998 {
16999 if (TARGET_MINIMAL_TOC)
2bfcf297 17000 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17001 else
2f0552b6
AM
17002 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17003 k[0] & 0xffffffff, k[1] & 0xffffffff);
17004 fprintf (file, "0x%lx%08lx\n",
17005 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17006 return;
17007 }
1875cc88 17008 else
13ded975
DE
17009 {
17010 if (TARGET_MINIMAL_TOC)
2bfcf297 17011 fputs ("\t.long ", file);
13ded975 17012 else
2f0552b6
AM
17013 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17014 k[0] & 0xffffffff, k[1] & 0xffffffff);
17015 fprintf (file, "0x%lx,0x%lx\n",
17016 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17017 return;
17018 }
9878760c 17019 }
00b79d54
BE
17020 else if (GET_CODE (x) == CONST_DOUBLE &&
17021 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17022 {
042259f2
DE
17023 REAL_VALUE_TYPE rv;
17024 long l;
9878760c 17025
042259f2 17026 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17027 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17028 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17029 else
17030 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17031
31bfaa0b
DE
17032 if (TARGET_64BIT)
17033 {
17034 if (TARGET_MINIMAL_TOC)
2bfcf297 17035 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17036 else
2f0552b6
AM
17037 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17038 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17039 return;
17040 }
042259f2 17041 else
31bfaa0b
DE
17042 {
17043 if (TARGET_MINIMAL_TOC)
2bfcf297 17044 fputs ("\t.long ", file);
31bfaa0b 17045 else
2f0552b6
AM
17046 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17047 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17048 return;
17049 }
042259f2 17050 }
f176e826 17051 else if (GET_MODE (x) == VOIDmode
a9098fd0 17052 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17053 {
e2c953b6 17054 unsigned HOST_WIDE_INT low;
042259f2
DE
17055 HOST_WIDE_INT high;
17056
17057 if (GET_CODE (x) == CONST_DOUBLE)
17058 {
17059 low = CONST_DOUBLE_LOW (x);
17060 high = CONST_DOUBLE_HIGH (x);
17061 }
17062 else
17063#if HOST_BITS_PER_WIDE_INT == 32
17064 {
17065 low = INTVAL (x);
0858c623 17066 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17067 }
17068#else
17069 {
c4ad648e
AM
17070 low = INTVAL (x) & 0xffffffff;
17071 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17072 }
17073#endif
9878760c 17074
a9098fd0
GK
17075 /* TOC entries are always Pmode-sized, but since this
17076 is a bigendian machine then if we're putting smaller
17077 integer constants in the TOC we have to pad them.
17078 (This is still a win over putting the constants in
17079 a separate constant pool, because then we'd have
02a4ec28
FS
17080 to have both a TOC entry _and_ the actual constant.)
17081
17082 For a 32-bit target, CONST_INT values are loaded and shifted
17083 entirely within `low' and can be stored in one TOC entry. */
17084
37409796
NS
17085 /* It would be easy to make this work, but it doesn't now. */
17086 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17087
17088 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17089 {
17090#if HOST_BITS_PER_WIDE_INT == 32
17091 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17092 POINTER_SIZE, &low, &high, 0);
17093#else
17094 low |= high << 32;
17095 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17096 high = (HOST_WIDE_INT) low >> 32;
17097 low &= 0xffffffff;
17098#endif
17099 }
a9098fd0 17100
13ded975
DE
17101 if (TARGET_64BIT)
17102 {
17103 if (TARGET_MINIMAL_TOC)
2bfcf297 17104 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17105 else
2f0552b6
AM
17106 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17107 (long) high & 0xffffffff, (long) low & 0xffffffff);
17108 fprintf (file, "0x%lx%08lx\n",
17109 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17110 return;
17111 }
1875cc88 17112 else
13ded975 17113 {
02a4ec28
FS
17114 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17115 {
17116 if (TARGET_MINIMAL_TOC)
2bfcf297 17117 fputs ("\t.long ", file);
02a4ec28 17118 else
2bfcf297 17119 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17120 (long) high & 0xffffffff, (long) low & 0xffffffff);
17121 fprintf (file, "0x%lx,0x%lx\n",
17122 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17123 }
13ded975 17124 else
02a4ec28
FS
17125 {
17126 if (TARGET_MINIMAL_TOC)
2bfcf297 17127 fputs ("\t.long ", file);
02a4ec28 17128 else
2f0552b6
AM
17129 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17130 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17131 }
13ded975
DE
17132 return;
17133 }
9878760c
RK
17134 }
17135
17136 if (GET_CODE (x) == CONST)
17137 {
37409796 17138 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17139
9878760c
RK
17140 base = XEXP (XEXP (x, 0), 0);
17141 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17142 }
f676971a 17143
37409796
NS
17144 switch (GET_CODE (base))
17145 {
17146 case SYMBOL_REF:
17147 name = XSTR (base, 0);
17148 break;
17149
17150 case LABEL_REF:
17151 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17152 CODE_LABEL_NUMBER (XEXP (base, 0)));
17153 break;
17154
17155 case CODE_LABEL:
17156 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17157 break;
17158
17159 default:
17160 gcc_unreachable ();
17161 }
9878760c 17162
772c5265 17163 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17164 if (TARGET_MINIMAL_TOC)
2bfcf297 17165 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17166 else
17167 {
b6c9286a 17168 fprintf (file, "\t.tc %s", real_name);
9878760c 17169
1875cc88 17170 if (offset < 0)
16fdeb48 17171 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17172 else if (offset)
16fdeb48 17173 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17174
19d2d16f 17175 fputs ("[TC],", file);
1875cc88 17176 }
581bc4de
MM
17177
17178 /* Currently C++ toc references to vtables can be emitted before it
17179 is decided whether the vtable is public or private. If this is
17180 the case, then the linker will eventually complain that there is
17181 a TOC reference to an unknown section. Thus, for vtables only,
17182 we emit the TOC reference to reference the symbol and not the
17183 section. */
28e510bd 17184 if (VTABLE_NAME_P (name))
581bc4de 17185 {
54ee9799 17186 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17187 if (offset < 0)
16fdeb48 17188 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17189 else if (offset > 0)
16fdeb48 17190 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17191 }
17192 else
17193 output_addr_const (file, x);
19d2d16f 17194 putc ('\n', file);
9878760c
RK
17195}
17196\f
17197/* Output an assembler pseudo-op to write an ASCII string of N characters
17198 starting at P to FILE.
17199
17200 On the RS/6000, we have to do this using the .byte operation and
17201 write out special characters outside the quoted string.
17202 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17203 so we must artificially break them up early. */
9878760c
RK
17204
17205void
a2369ed3 17206output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17207{
17208 char c;
17209 int i, count_string;
d330fd93
KG
17210 const char *for_string = "\t.byte \"";
17211 const char *for_decimal = "\t.byte ";
17212 const char *to_close = NULL;
9878760c
RK
17213
17214 count_string = 0;
17215 for (i = 0; i < n; i++)
17216 {
17217 c = *p++;
17218 if (c >= ' ' && c < 0177)
17219 {
17220 if (for_string)
17221 fputs (for_string, file);
17222 putc (c, file);
17223
17224 /* Write two quotes to get one. */
17225 if (c == '"')
17226 {
17227 putc (c, file);
17228 ++count_string;
17229 }
17230
17231 for_string = NULL;
17232 for_decimal = "\"\n\t.byte ";
17233 to_close = "\"\n";
17234 ++count_string;
17235
17236 if (count_string >= 512)
17237 {
17238 fputs (to_close, file);
17239
17240 for_string = "\t.byte \"";
17241 for_decimal = "\t.byte ";
17242 to_close = NULL;
17243 count_string = 0;
17244 }
17245 }
17246 else
17247 {
17248 if (for_decimal)
17249 fputs (for_decimal, file);
17250 fprintf (file, "%d", c);
17251
17252 for_string = "\n\t.byte \"";
17253 for_decimal = ", ";
17254 to_close = "\n";
17255 count_string = 0;
17256 }
17257 }
17258
17259 /* Now close the string if we have written one. Then end the line. */
17260 if (to_close)
9ebbca7d 17261 fputs (to_close, file);
9878760c
RK
17262}
17263\f
17264/* Generate a unique section name for FILENAME for a section type
17265 represented by SECTION_DESC. Output goes into BUF.
17266
17267 SECTION_DESC can be any string, as long as it is different for each
17268 possible section type.
17269
17270 We name the section in the same manner as xlc. The name begins with an
17271 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17272 names) with the last period replaced by the string SECTION_DESC. If
17273 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17274 the name. */
9878760c
RK
17275
17276void
f676971a 17277rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17278 const char *section_desc)
9878760c 17279{
9ebbca7d 17280 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17281 char *p;
17282 int len;
9878760c
RK
17283
17284 after_last_slash = filename;
17285 for (q = filename; *q; q++)
11e5fe42
RK
17286 {
17287 if (*q == '/')
17288 after_last_slash = q + 1;
17289 else if (*q == '.')
17290 last_period = q;
17291 }
9878760c 17292
11e5fe42 17293 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17294 *buf = (char *) xmalloc (len);
9878760c
RK
17295
17296 p = *buf;
17297 *p++ = '_';
17298
17299 for (q = after_last_slash; *q; q++)
17300 {
11e5fe42 17301 if (q == last_period)
c4ad648e 17302 {
9878760c
RK
17303 strcpy (p, section_desc);
17304 p += strlen (section_desc);
e3981aab 17305 break;
c4ad648e 17306 }
9878760c 17307
e9a780ec 17308 else if (ISALNUM (*q))
c4ad648e 17309 *p++ = *q;
9878760c
RK
17310 }
17311
11e5fe42 17312 if (last_period == 0)
9878760c
RK
17313 strcpy (p, section_desc);
17314 else
17315 *p = '\0';
17316}
e165f3f0 17317\f
a4f6c312 17318/* Emit profile function. */
411707f4 17319
411707f4 17320void
a2369ed3 17321output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17322{
858081ad
AH
17323 /* Non-standard profiling for kernels, which just saves LR then calls
17324 _mcount without worrying about arg saves. The idea is to change
17325 the function prologue as little as possible as it isn't easy to
17326 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17327 if (TARGET_PROFILE_KERNEL)
17328 return;
17329
8480e480
CC
17330 if (DEFAULT_ABI == ABI_AIX)
17331 {
9739c90c
JJ
17332#ifndef NO_PROFILE_COUNTERS
17333# define NO_PROFILE_COUNTERS 0
17334#endif
f676971a 17335 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17336 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17337 else
17338 {
17339 char buf[30];
17340 const char *label_name;
17341 rtx fun;
411707f4 17342
9739c90c
JJ
17343 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17344 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17345 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17346
9739c90c
JJ
17347 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17348 fun, Pmode);
17349 }
8480e480 17350 }
ee890fe2
SS
17351 else if (DEFAULT_ABI == ABI_DARWIN)
17352 {
d5fa86ba 17353 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17354 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17355
17356 /* Be conservative and always set this, at least for now. */
17357 current_function_uses_pic_offset_table = 1;
17358
17359#if TARGET_MACHO
17360 /* For PIC code, set up a stub and collect the caller's address
17361 from r0, which is where the prologue puts it. */
11abc112
MM
17362 if (MACHOPIC_INDIRECT
17363 && current_function_uses_pic_offset_table)
17364 caller_addr_regno = 0;
ee890fe2
SS
17365#endif
17366 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17367 0, VOIDmode, 1,
17368 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17369 }
411707f4
CC
17370}
17371
a4f6c312 17372/* Write function profiler code. */
e165f3f0
RK
17373
17374void
a2369ed3 17375output_function_profiler (FILE *file, int labelno)
e165f3f0 17376{
3daf36a4 17377 char buf[100];
e165f3f0 17378
38c1f2d7 17379 switch (DEFAULT_ABI)
3daf36a4 17380 {
38c1f2d7 17381 default:
37409796 17382 gcc_unreachable ();
38c1f2d7
MM
17383
17384 case ABI_V4:
09eeeacb
AM
17385 if (!TARGET_32BIT)
17386 {
d4ee4d25 17387 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17388 return;
17389 }
ffcfcb5f 17390 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17391 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17392 if (NO_PROFILE_COUNTERS)
17393 {
17394 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17395 reg_names[0], reg_names[1]);
17396 }
17397 else if (TARGET_SECURE_PLT && flag_pic)
17398 {
17399 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17400 reg_names[0], reg_names[1]);
17401 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17402 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17403 reg_names[12], reg_names[12]);
17404 assemble_name (file, buf);
17405 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17406 assemble_name (file, buf);
17407 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17408 }
17409 else if (flag_pic == 1)
38c1f2d7 17410 {
dfdfa60f 17411 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17412 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17413 reg_names[0], reg_names[1]);
17167fd8 17414 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17415 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17416 assemble_name (file, buf);
17167fd8 17417 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17418 }
9ebbca7d 17419 else if (flag_pic > 1)
38c1f2d7 17420 {
71625f3d
AM
17421 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17422 reg_names[0], reg_names[1]);
9ebbca7d 17423 /* Now, we need to get the address of the label. */
71625f3d 17424 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17425 assemble_name (file, buf);
9ebbca7d
GK
17426 fputs ("-.\n1:", file);
17427 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17428 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17429 reg_names[0], reg_names[11]);
17430 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17431 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17432 }
38c1f2d7
MM
17433 else
17434 {
17167fd8 17435 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17436 assemble_name (file, buf);
dfdfa60f 17437 fputs ("@ha\n", file);
71625f3d
AM
17438 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17439 reg_names[0], reg_names[1]);
a260abc9 17440 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17441 assemble_name (file, buf);
17167fd8 17442 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17443 }
17444
50d440bc 17445 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17446 fprintf (file, "\tbl %s%s\n",
17447 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17448 break;
17449
17450 case ABI_AIX:
ee890fe2 17451 case ABI_DARWIN:
ffcfcb5f
AM
17452 if (!TARGET_PROFILE_KERNEL)
17453 {
a3c9585f 17454 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17455 }
17456 else
17457 {
37409796 17458 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17459
17460 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17461 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17462
6de9cd9a 17463 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17464 {
17465 asm_fprintf (file, "\tstd %s,24(%s)\n",
17466 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17467 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17468 asm_fprintf (file, "\tld %s,24(%s)\n",
17469 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17470 }
17471 else
17472 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17473 }
38c1f2d7
MM
17474 break;
17475 }
e165f3f0 17476}
a251ffd0 17477
b54cf83a 17478\f
44cd321e
PS
17479
17480/* The following variable value is the last issued insn. */
17481
17482static rtx last_scheduled_insn;
17483
17484/* The following variable helps to balance issuing of load and
17485 store instructions */
17486
17487static int load_store_pendulum;
17488
b54cf83a
DE
17489/* Power4 load update and store update instructions are cracked into a
17490 load or store and an integer insn which are executed in the same cycle.
17491 Branches have their own dispatch slot which does not count against the
17492 GCC issue rate, but it changes the program flow so there are no other
17493 instructions to issue in this cycle. */
17494
17495static int
f676971a
EC
17496rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17497 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17498 rtx insn, int more)
b54cf83a 17499{
44cd321e 17500 last_scheduled_insn = insn;
b54cf83a
DE
17501 if (GET_CODE (PATTERN (insn)) == USE
17502 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17503 {
17504 cached_can_issue_more = more;
17505 return cached_can_issue_more;
17506 }
17507
17508 if (insn_terminates_group_p (insn, current_group))
17509 {
17510 cached_can_issue_more = 0;
17511 return cached_can_issue_more;
17512 }
b54cf83a 17513
d296e02e
AP
17514 /* If no reservation, but reach here */
17515 if (recog_memoized (insn) < 0)
17516 return more;
17517
ec507f2d 17518 if (rs6000_sched_groups)
b54cf83a 17519 {
cbe26ab8 17520 if (is_microcoded_insn (insn))
44cd321e 17521 cached_can_issue_more = 0;
cbe26ab8 17522 else if (is_cracked_insn (insn))
44cd321e
PS
17523 cached_can_issue_more = more > 2 ? more - 2 : 0;
17524 else
17525 cached_can_issue_more = more - 1;
17526
17527 return cached_can_issue_more;
b54cf83a 17528 }
165b263e 17529
d296e02e
AP
17530 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17531 return 0;
17532
44cd321e
PS
17533 cached_can_issue_more = more - 1;
17534 return cached_can_issue_more;
b54cf83a
DE
17535}
17536
a251ffd0
TG
17537/* Adjust the cost of a scheduling dependency. Return the new cost of
17538 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17539
c237e94a 17540static int
0a4f0294 17541rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17542{
44cd321e 17543 enum attr_type attr_type;
a251ffd0 17544
44cd321e 17545 if (! recog_memoized (insn))
a251ffd0
TG
17546 return 0;
17547
44cd321e 17548 switch (REG_NOTE_KIND (link))
a251ffd0 17549 {
44cd321e
PS
17550 case REG_DEP_TRUE:
17551 {
17552 /* Data dependency; DEP_INSN writes a register that INSN reads
17553 some cycles later. */
17554
17555 /* Separate a load from a narrower, dependent store. */
17556 if (rs6000_sched_groups
17557 && GET_CODE (PATTERN (insn)) == SET
17558 && GET_CODE (PATTERN (dep_insn)) == SET
17559 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17560 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17561 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17562 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17563 return cost + 14;
17564
17565 attr_type = get_attr_type (insn);
17566
17567 switch (attr_type)
17568 {
17569 case TYPE_JMPREG:
17570 /* Tell the first scheduling pass about the latency between
17571 a mtctr and bctr (and mtlr and br/blr). The first
17572 scheduling pass will not know about this latency since
17573 the mtctr instruction, which has the latency associated
17574 to it, will be generated by reload. */
17575 return TARGET_POWER ? 5 : 4;
17576 case TYPE_BRANCH:
17577 /* Leave some extra cycles between a compare and its
17578 dependent branch, to inhibit expensive mispredicts. */
17579 if ((rs6000_cpu_attr == CPU_PPC603
17580 || rs6000_cpu_attr == CPU_PPC604
17581 || rs6000_cpu_attr == CPU_PPC604E
17582 || rs6000_cpu_attr == CPU_PPC620
17583 || rs6000_cpu_attr == CPU_PPC630
17584 || rs6000_cpu_attr == CPU_PPC750
17585 || rs6000_cpu_attr == CPU_PPC7400
17586 || rs6000_cpu_attr == CPU_PPC7450
17587 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17588 || rs6000_cpu_attr == CPU_POWER5
17589 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17590 && recog_memoized (dep_insn)
17591 && (INSN_CODE (dep_insn) >= 0))
982afe02 17592
44cd321e
PS
17593 switch (get_attr_type (dep_insn))
17594 {
17595 case TYPE_CMP:
17596 case TYPE_COMPARE:
17597 case TYPE_DELAYED_COMPARE:
17598 case TYPE_IMUL_COMPARE:
17599 case TYPE_LMUL_COMPARE:
17600 case TYPE_FPCOMPARE:
17601 case TYPE_CR_LOGICAL:
17602 case TYPE_DELAYED_CR:
17603 return cost + 2;
17604 default:
17605 break;
17606 }
17607 break;
17608
17609 case TYPE_STORE:
17610 case TYPE_STORE_U:
17611 case TYPE_STORE_UX:
17612 case TYPE_FPSTORE:
17613 case TYPE_FPSTORE_U:
17614 case TYPE_FPSTORE_UX:
17615 if ((rs6000_cpu == PROCESSOR_POWER6)
17616 && recog_memoized (dep_insn)
17617 && (INSN_CODE (dep_insn) >= 0))
17618 {
17619
17620 if (GET_CODE (PATTERN (insn)) != SET)
17621 /* If this happens, we have to extend this to schedule
17622 optimally. Return default for now. */
17623 return cost;
17624
17625 /* Adjust the cost for the case where the value written
17626 by a fixed point operation is used as the address
17627 gen value on a store. */
17628 switch (get_attr_type (dep_insn))
17629 {
17630 case TYPE_LOAD:
17631 case TYPE_LOAD_U:
17632 case TYPE_LOAD_UX:
17633 case TYPE_CNTLZ:
17634 {
17635 if (! store_data_bypass_p (dep_insn, insn))
17636 return 4;
17637 break;
17638 }
17639 case TYPE_LOAD_EXT:
17640 case TYPE_LOAD_EXT_U:
17641 case TYPE_LOAD_EXT_UX:
17642 case TYPE_VAR_SHIFT_ROTATE:
17643 case TYPE_VAR_DELAYED_COMPARE:
17644 {
17645 if (! store_data_bypass_p (dep_insn, insn))
17646 return 6;
17647 break;
17648 }
17649 case TYPE_INTEGER:
17650 case TYPE_COMPARE:
17651 case TYPE_FAST_COMPARE:
17652 case TYPE_EXTS:
17653 case TYPE_SHIFT:
17654 case TYPE_INSERT_WORD:
17655 case TYPE_INSERT_DWORD:
17656 case TYPE_FPLOAD_U:
17657 case TYPE_FPLOAD_UX:
17658 case TYPE_STORE_U:
17659 case TYPE_STORE_UX:
17660 case TYPE_FPSTORE_U:
17661 case TYPE_FPSTORE_UX:
17662 {
17663 if (! store_data_bypass_p (dep_insn, insn))
17664 return 3;
17665 break;
17666 }
17667 case TYPE_IMUL:
17668 case TYPE_IMUL2:
17669 case TYPE_IMUL3:
17670 case TYPE_LMUL:
17671 case TYPE_IMUL_COMPARE:
17672 case TYPE_LMUL_COMPARE:
17673 {
17674 if (! store_data_bypass_p (dep_insn, insn))
17675 return 17;
17676 break;
17677 }
17678 case TYPE_IDIV:
17679 {
17680 if (! store_data_bypass_p (dep_insn, insn))
17681 return 45;
17682 break;
17683 }
17684 case TYPE_LDIV:
17685 {
17686 if (! store_data_bypass_p (dep_insn, insn))
17687 return 57;
17688 break;
17689 }
17690 default:
17691 break;
17692 }
17693 }
17694 break;
17695
17696 case TYPE_LOAD:
17697 case TYPE_LOAD_U:
17698 case TYPE_LOAD_UX:
17699 case TYPE_LOAD_EXT:
17700 case TYPE_LOAD_EXT_U:
17701 case TYPE_LOAD_EXT_UX:
17702 if ((rs6000_cpu == PROCESSOR_POWER6)
17703 && recog_memoized (dep_insn)
17704 && (INSN_CODE (dep_insn) >= 0))
17705 {
17706
17707 /* Adjust the cost for the case where the value written
17708 by a fixed point instruction is used within the address
17709 gen portion of a subsequent load(u)(x) */
17710 switch (get_attr_type (dep_insn))
17711 {
17712 case TYPE_LOAD:
17713 case TYPE_LOAD_U:
17714 case TYPE_LOAD_UX:
17715 case TYPE_CNTLZ:
17716 {
17717 if (set_to_load_agen (dep_insn, insn))
17718 return 4;
17719 break;
17720 }
17721 case TYPE_LOAD_EXT:
17722 case TYPE_LOAD_EXT_U:
17723 case TYPE_LOAD_EXT_UX:
17724 case TYPE_VAR_SHIFT_ROTATE:
17725 case TYPE_VAR_DELAYED_COMPARE:
17726 {
17727 if (set_to_load_agen (dep_insn, insn))
17728 return 6;
17729 break;
17730 }
17731 case TYPE_INTEGER:
17732 case TYPE_COMPARE:
17733 case TYPE_FAST_COMPARE:
17734 case TYPE_EXTS:
17735 case TYPE_SHIFT:
17736 case TYPE_INSERT_WORD:
17737 case TYPE_INSERT_DWORD:
17738 case TYPE_FPLOAD_U:
17739 case TYPE_FPLOAD_UX:
17740 case TYPE_STORE_U:
17741 case TYPE_STORE_UX:
17742 case TYPE_FPSTORE_U:
17743 case TYPE_FPSTORE_UX:
17744 {
17745 if (set_to_load_agen (dep_insn, insn))
17746 return 3;
17747 break;
17748 }
17749 case TYPE_IMUL:
17750 case TYPE_IMUL2:
17751 case TYPE_IMUL3:
17752 case TYPE_LMUL:
17753 case TYPE_IMUL_COMPARE:
17754 case TYPE_LMUL_COMPARE:
17755 {
17756 if (set_to_load_agen (dep_insn, insn))
17757 return 17;
17758 break;
17759 }
17760 case TYPE_IDIV:
17761 {
17762 if (set_to_load_agen (dep_insn, insn))
17763 return 45;
17764 break;
17765 }
17766 case TYPE_LDIV:
17767 {
17768 if (set_to_load_agen (dep_insn, insn))
17769 return 57;
17770 break;
17771 }
17772 default:
17773 break;
17774 }
17775 }
17776 break;
17777
17778 case TYPE_FPLOAD:
17779 if ((rs6000_cpu == PROCESSOR_POWER6)
17780 && recog_memoized (dep_insn)
17781 && (INSN_CODE (dep_insn) >= 0)
17782 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
17783 return 2;
17784
17785 default:
17786 break;
17787 }
c9dbf840 17788
a251ffd0 17789 /* Fall out to return default cost. */
44cd321e
PS
17790 }
17791 break;
17792
17793 case REG_DEP_OUTPUT:
17794 /* Output dependency; DEP_INSN writes a register that INSN writes some
17795 cycles later. */
17796 if ((rs6000_cpu == PROCESSOR_POWER6)
17797 && recog_memoized (dep_insn)
17798 && (INSN_CODE (dep_insn) >= 0))
17799 {
17800 attr_type = get_attr_type (insn);
17801
17802 switch (attr_type)
17803 {
17804 case TYPE_FP:
17805 if (get_attr_type (dep_insn) == TYPE_FP)
17806 return 1;
17807 break;
17808 case TYPE_FPLOAD:
17809 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
17810 return 2;
17811 break;
17812 default:
17813 break;
17814 }
17815 }
17816 case REG_DEP_ANTI:
17817 /* Anti dependency; DEP_INSN reads a register that INSN writes some
17818 cycles later. */
17819 return 0;
17820
17821 default:
17822 gcc_unreachable ();
a251ffd0
TG
17823 }
17824
17825 return cost;
17826}
b6c9286a 17827
cbe26ab8 17828/* The function returns a true if INSN is microcoded.
839a4992 17829 Return false otherwise. */
cbe26ab8
DN
17830
17831static bool
17832is_microcoded_insn (rtx insn)
17833{
17834 if (!insn || !INSN_P (insn)
17835 || GET_CODE (PATTERN (insn)) == USE
17836 || GET_CODE (PATTERN (insn)) == CLOBBER)
17837 return false;
17838
d296e02e
AP
17839 if (rs6000_cpu_attr == CPU_CELL)
17840 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
17841
ec507f2d 17842 if (rs6000_sched_groups)
cbe26ab8
DN
17843 {
17844 enum attr_type type = get_attr_type (insn);
17845 if (type == TYPE_LOAD_EXT_U
17846 || type == TYPE_LOAD_EXT_UX
17847 || type == TYPE_LOAD_UX
17848 || type == TYPE_STORE_UX
17849 || type == TYPE_MFCR)
c4ad648e 17850 return true;
cbe26ab8
DN
17851 }
17852
17853 return false;
17854}
17855
cbe26ab8
DN
17856/* The function returns true if INSN is cracked into 2 instructions
17857 by the processor (and therefore occupies 2 issue slots). */
17858
17859static bool
17860is_cracked_insn (rtx insn)
17861{
17862 if (!insn || !INSN_P (insn)
17863 || GET_CODE (PATTERN (insn)) == USE
17864 || GET_CODE (PATTERN (insn)) == CLOBBER)
17865 return false;
17866
ec507f2d 17867 if (rs6000_sched_groups)
cbe26ab8
DN
17868 {
17869 enum attr_type type = get_attr_type (insn);
17870 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
17871 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
17872 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
17873 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
17874 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
17875 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
17876 || type == TYPE_IDIV || type == TYPE_LDIV
17877 || type == TYPE_INSERT_WORD)
17878 return true;
cbe26ab8
DN
17879 }
17880
17881 return false;
17882}
17883
17884/* The function returns true if INSN can be issued only from
a3c9585f 17885 the branch slot. */
cbe26ab8
DN
17886
17887static bool
17888is_branch_slot_insn (rtx insn)
17889{
17890 if (!insn || !INSN_P (insn)
17891 || GET_CODE (PATTERN (insn)) == USE
17892 || GET_CODE (PATTERN (insn)) == CLOBBER)
17893 return false;
17894
ec507f2d 17895 if (rs6000_sched_groups)
cbe26ab8
DN
17896 {
17897 enum attr_type type = get_attr_type (insn);
17898 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 17899 return true;
cbe26ab8
DN
17900 return false;
17901 }
17902
17903 return false;
17904}
79ae11c4 17905
44cd321e
PS
17906/* The function returns true if out_inst sets a value that is
17907 used in the address generation computation of in_insn */
17908static bool
17909set_to_load_agen (rtx out_insn, rtx in_insn)
17910{
17911 rtx out_set, in_set;
17912
17913 /* For performance reasons, only handle the simple case where
17914 both loads are a single_set. */
17915 out_set = single_set (out_insn);
17916 if (out_set)
17917 {
17918 in_set = single_set (in_insn);
17919 if (in_set)
17920 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
17921 }
17922
17923 return false;
17924}
17925
17926/* The function returns true if the target storage location of
17927 out_insn is adjacent to the target storage location of in_insn */
17928/* Return 1 if memory locations are adjacent. */
17929
17930static bool
17931adjacent_mem_locations (rtx insn1, rtx insn2)
17932{
17933
e3a0e200
PB
17934 rtx a = get_store_dest (PATTERN (insn1));
17935 rtx b = get_store_dest (PATTERN (insn2));
17936
44cd321e
PS
17937 if ((GET_CODE (XEXP (a, 0)) == REG
17938 || (GET_CODE (XEXP (a, 0)) == PLUS
17939 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
17940 && (GET_CODE (XEXP (b, 0)) == REG
17941 || (GET_CODE (XEXP (b, 0)) == PLUS
17942 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
17943 {
17944 HOST_WIDE_INT val0 = 0, val1 = 0;
17945 rtx reg0, reg1;
17946 int val_diff;
17947
17948 if (GET_CODE (XEXP (a, 0)) == PLUS)
17949 {
17950 reg0 = XEXP (XEXP (a, 0), 0);
17951 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
17952 }
17953 else
17954 reg0 = XEXP (a, 0);
17955
17956 if (GET_CODE (XEXP (b, 0)) == PLUS)
17957 {
17958 reg1 = XEXP (XEXP (b, 0), 0);
17959 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
17960 }
17961 else
17962 reg1 = XEXP (b, 0);
17963
17964 val_diff = val1 - val0;
17965
17966 return ((REGNO (reg0) == REGNO (reg1))
17967 && (val_diff == INTVAL (MEM_SIZE (a))
17968 || val_diff == -INTVAL (MEM_SIZE (b))));
17969 }
17970
17971 return false;
17972}
17973
a4f6c312 17974/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
17975 priority INSN_PRIORITY (INSN). Increase the priority to execute the
17976 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
17977 define this macro if you do not need to adjust the scheduling
17978 priorities of insns. */
bef84347 17979
c237e94a 17980static int
a2369ed3 17981rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 17982{
a4f6c312
SS
17983 /* On machines (like the 750) which have asymmetric integer units,
17984 where one integer unit can do multiply and divides and the other
17985 can't, reduce the priority of multiply/divide so it is scheduled
17986 before other integer operations. */
bef84347
VM
17987
17988#if 0
2c3c49de 17989 if (! INSN_P (insn))
bef84347
VM
17990 return priority;
17991
17992 if (GET_CODE (PATTERN (insn)) == USE)
17993 return priority;
17994
17995 switch (rs6000_cpu_attr) {
17996 case CPU_PPC750:
17997 switch (get_attr_type (insn))
17998 {
17999 default:
18000 break;
18001
18002 case TYPE_IMUL:
18003 case TYPE_IDIV:
3cb999d8
DE
18004 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18005 priority, priority);
bef84347
VM
18006 if (priority >= 0 && priority < 0x01000000)
18007 priority >>= 3;
18008 break;
18009 }
18010 }
18011#endif
18012
44cd321e 18013 if (insn_must_be_first_in_group (insn)
79ae11c4 18014 && reload_completed
f676971a 18015 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18016 && rs6000_sched_restricted_insns_priority)
18017 {
18018
c4ad648e
AM
18019 /* Prioritize insns that can be dispatched only in the first
18020 dispatch slot. */
79ae11c4 18021 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18022 /* Attach highest priority to insn. This means that in
18023 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18024 precede 'priority' (critical path) considerations. */
f676971a 18025 return current_sched_info->sched_max_insns_priority;
79ae11c4 18026 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18027 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18028 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18029 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18030 return (priority + 1);
18031 }
79ae11c4 18032
44cd321e
PS
18033 if (rs6000_cpu == PROCESSOR_POWER6
18034 && ((load_store_pendulum == -2 && is_load_insn (insn))
18035 || (load_store_pendulum == 2 && is_store_insn (insn))))
18036 /* Attach highest priority to insn if the scheduler has just issued two
18037 stores and this instruction is a load, or two loads and this instruction
18038 is a store. Power6 wants loads and stores scheduled alternately
18039 when possible */
18040 return current_sched_info->sched_max_insns_priority;
18041
bef84347
VM
18042 return priority;
18043}
18044
d296e02e
AP
18045/* Return true if the instruction is nonpipelined on the Cell. */
18046static bool
18047is_nonpipeline_insn (rtx insn)
18048{
18049 enum attr_type type;
18050 if (!insn || !INSN_P (insn)
18051 || GET_CODE (PATTERN (insn)) == USE
18052 || GET_CODE (PATTERN (insn)) == CLOBBER)
18053 return false;
18054
18055 type = get_attr_type (insn);
18056 if (type == TYPE_IMUL
18057 || type == TYPE_IMUL2
18058 || type == TYPE_IMUL3
18059 || type == TYPE_LMUL
18060 || type == TYPE_IDIV
18061 || type == TYPE_LDIV
18062 || type == TYPE_SDIV
18063 || type == TYPE_DDIV
18064 || type == TYPE_SSQRT
18065 || type == TYPE_DSQRT
18066 || type == TYPE_MFCR
18067 || type == TYPE_MFCRF
18068 || type == TYPE_MFJMPR)
18069 {
18070 return true;
18071 }
18072 return false;
18073}
18074
18075
a4f6c312
SS
18076/* Return how many instructions the machine can issue per cycle. */
18077
c237e94a 18078static int
863d938c 18079rs6000_issue_rate (void)
b6c9286a 18080{
3317bab1
DE
18081 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18082 if (!reload_completed)
18083 return 1;
18084
b6c9286a 18085 switch (rs6000_cpu_attr) {
3cb999d8
DE
18086 case CPU_RIOS1: /* ? */
18087 case CPU_RS64A:
18088 case CPU_PPC601: /* ? */
ed947a96 18089 case CPU_PPC7450:
3cb999d8 18090 return 3;
b54cf83a 18091 case CPU_PPC440:
b6c9286a 18092 case CPU_PPC603:
bef84347 18093 case CPU_PPC750:
ed947a96 18094 case CPU_PPC7400:
be12c2b0 18095 case CPU_PPC8540:
d296e02e 18096 case CPU_CELL:
f676971a 18097 return 2;
3cb999d8 18098 case CPU_RIOS2:
b6c9286a 18099 case CPU_PPC604:
19684119 18100 case CPU_PPC604E:
b6c9286a 18101 case CPU_PPC620:
3cb999d8 18102 case CPU_PPC630:
b6c9286a 18103 return 4;
cbe26ab8 18104 case CPU_POWER4:
ec507f2d 18105 case CPU_POWER5:
44cd321e 18106 case CPU_POWER6:
cbe26ab8 18107 return 5;
b6c9286a
MM
18108 default:
18109 return 1;
18110 }
18111}
18112
be12c2b0
VM
18113/* Return how many instructions to look ahead for better insn
18114 scheduling. */
18115
18116static int
863d938c 18117rs6000_use_sched_lookahead (void)
be12c2b0
VM
18118{
18119 if (rs6000_cpu_attr == CPU_PPC8540)
18120 return 4;
d296e02e
AP
18121 if (rs6000_cpu_attr == CPU_CELL)
18122 return (reload_completed ? 8 : 0);
be12c2b0
VM
18123 return 0;
18124}
18125
d296e02e
AP
18126/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18127static int
18128rs6000_use_sched_lookahead_guard (rtx insn)
18129{
18130 if (rs6000_cpu_attr != CPU_CELL)
18131 return 1;
18132
18133 if (insn == NULL_RTX || !INSN_P (insn))
18134 abort ();
982afe02 18135
d296e02e
AP
18136 if (!reload_completed
18137 || is_nonpipeline_insn (insn)
18138 || is_microcoded_insn (insn))
18139 return 0;
18140
18141 return 1;
18142}
18143
569fa502
DN
18144/* Determine is PAT refers to memory. */
18145
18146static bool
18147is_mem_ref (rtx pat)
18148{
18149 const char * fmt;
18150 int i, j;
18151 bool ret = false;
18152
18153 if (GET_CODE (pat) == MEM)
18154 return true;
18155
18156 /* Recursively process the pattern. */
18157 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18158
18159 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18160 {
18161 if (fmt[i] == 'e')
18162 ret |= is_mem_ref (XEXP (pat, i));
18163 else if (fmt[i] == 'E')
18164 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18165 ret |= is_mem_ref (XVECEXP (pat, i, j));
18166 }
18167
18168 return ret;
18169}
18170
18171/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18172
569fa502
DN
18173static bool
18174is_load_insn1 (rtx pat)
18175{
18176 if (!pat || pat == NULL_RTX)
18177 return false;
18178
18179 if (GET_CODE (pat) == SET)
18180 return is_mem_ref (SET_SRC (pat));
18181
18182 if (GET_CODE (pat) == PARALLEL)
18183 {
18184 int i;
18185
18186 for (i = 0; i < XVECLEN (pat, 0); i++)
18187 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18188 return true;
18189 }
18190
18191 return false;
18192}
18193
18194/* Determine if INSN loads from memory. */
18195
18196static bool
18197is_load_insn (rtx insn)
18198{
18199 if (!insn || !INSN_P (insn))
18200 return false;
18201
18202 if (GET_CODE (insn) == CALL_INSN)
18203 return false;
18204
18205 return is_load_insn1 (PATTERN (insn));
18206}
18207
18208/* Determine if PAT is a PATTERN of a store insn. */
18209
18210static bool
18211is_store_insn1 (rtx pat)
18212{
18213 if (!pat || pat == NULL_RTX)
18214 return false;
18215
18216 if (GET_CODE (pat) == SET)
18217 return is_mem_ref (SET_DEST (pat));
18218
18219 if (GET_CODE (pat) == PARALLEL)
18220 {
18221 int i;
18222
18223 for (i = 0; i < XVECLEN (pat, 0); i++)
18224 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18225 return true;
18226 }
18227
18228 return false;
18229}
18230
18231/* Determine if INSN stores to memory. */
18232
18233static bool
18234is_store_insn (rtx insn)
18235{
18236 if (!insn || !INSN_P (insn))
18237 return false;
18238
18239 return is_store_insn1 (PATTERN (insn));
18240}
18241
e3a0e200
PB
18242/* Return the dest of a store insn. */
18243
18244static rtx
18245get_store_dest (rtx pat)
18246{
18247 gcc_assert (is_store_insn1 (pat));
18248
18249 if (GET_CODE (pat) == SET)
18250 return SET_DEST (pat);
18251 else if (GET_CODE (pat) == PARALLEL)
18252 {
18253 int i;
18254
18255 for (i = 0; i < XVECLEN (pat, 0); i++)
18256 {
18257 rtx inner_pat = XVECEXP (pat, 0, i);
18258 if (GET_CODE (inner_pat) == SET
18259 && is_mem_ref (SET_DEST (inner_pat)))
18260 return inner_pat;
18261 }
18262 }
18263 /* We shouldn't get here, because we should have either a simple
18264 store insn or a store with update which are covered above. */
18265 gcc_unreachable();
18266}
18267
569fa502
DN
18268/* Returns whether the dependence between INSN and NEXT is considered
18269 costly by the given target. */
18270
18271static bool
b198261f 18272rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18273{
b198261f
MK
18274 rtx insn;
18275 rtx next;
18276
aabcd309 18277 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18278 allow all dependent insns in the same group.
569fa502
DN
18279 This is the most aggressive option. */
18280 if (rs6000_sched_costly_dep == no_dep_costly)
18281 return false;
18282
f676971a 18283 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18284 do not allow dependent instructions in the same group.
18285 This is the most conservative option. */
18286 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18287 return true;
569fa502 18288
b198261f
MK
18289 insn = DEP_PRO (dep);
18290 next = DEP_CON (dep);
18291
f676971a
EC
18292 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18293 && is_load_insn (next)
569fa502
DN
18294 && is_store_insn (insn))
18295 /* Prevent load after store in the same group. */
18296 return true;
18297
18298 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18299 && is_load_insn (next)
569fa502 18300 && is_store_insn (insn)
e2f6ff94 18301 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18302 /* Prevent load after store in the same group if it is a true
18303 dependence. */
569fa502 18304 return true;
f676971a
EC
18305
18306 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18307 and will not be scheduled in the same group. */
18308 if (rs6000_sched_costly_dep <= max_dep_latency
18309 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18310 return true;
18311
18312 return false;
18313}
18314
f676971a 18315/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18316 skipping any "non-active" insns - insns that will not actually occupy
18317 an issue slot. Return NULL_RTX if such an insn is not found. */
18318
18319static rtx
18320get_next_active_insn (rtx insn, rtx tail)
18321{
f489aff8 18322 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18323 return NULL_RTX;
18324
f489aff8 18325 while (1)
cbe26ab8 18326 {
f489aff8
AM
18327 insn = NEXT_INSN (insn);
18328 if (insn == NULL_RTX || insn == tail)
18329 return NULL_RTX;
cbe26ab8 18330
f489aff8
AM
18331 if (CALL_P (insn)
18332 || JUMP_P (insn)
18333 || (NONJUMP_INSN_P (insn)
18334 && GET_CODE (PATTERN (insn)) != USE
18335 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18336 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18337 break;
18338 }
18339 return insn;
cbe26ab8
DN
18340}
18341
44cd321e
PS
18342/* We are about to begin issuing insns for this clock cycle. */
18343
18344static int
18345rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18346 rtx *ready ATTRIBUTE_UNUSED,
18347 int *pn_ready ATTRIBUTE_UNUSED,
18348 int clock_var ATTRIBUTE_UNUSED)
18349{
d296e02e
AP
18350 int n_ready = *pn_ready;
18351
44cd321e
PS
18352 if (sched_verbose)
18353 fprintf (dump, "// rs6000_sched_reorder :\n");
18354
d296e02e
AP
18355 /* Reorder the ready list, if the second to last ready insn
18356 is a nonepipeline insn. */
18357 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18358 {
18359 if (is_nonpipeline_insn (ready[n_ready - 1])
18360 && (recog_memoized (ready[n_ready - 2]) > 0))
18361 /* Simply swap first two insns. */
18362 {
18363 rtx tmp = ready[n_ready - 1];
18364 ready[n_ready - 1] = ready[n_ready - 2];
18365 ready[n_ready - 2] = tmp;
18366 }
18367 }
18368
44cd321e
PS
18369 if (rs6000_cpu == PROCESSOR_POWER6)
18370 load_store_pendulum = 0;
18371
18372 return rs6000_issue_rate ();
18373}
18374
18375/* Like rs6000_sched_reorder, but called after issuing each insn. */
18376
18377static int
18378rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18379 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18380{
18381 if (sched_verbose)
18382 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18383
18384 /* For Power6, we need to handle some special cases to try and keep the
18385 store queue from overflowing and triggering expensive flushes.
18386
18387 This code monitors how load and store instructions are being issued
18388 and skews the ready list one way or the other to increase the likelihood
18389 that a desired instruction is issued at the proper time.
18390
18391 A couple of things are done. First, we maintain a "load_store_pendulum"
18392 to track the current state of load/store issue.
18393
18394 - If the pendulum is at zero, then no loads or stores have been
18395 issued in the current cycle so we do nothing.
18396
18397 - If the pendulum is 1, then a single load has been issued in this
18398 cycle and we attempt to locate another load in the ready list to
18399 issue with it.
18400
2f8e468b 18401 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18402 issued in this cycle, so we increase the priority of the first load
18403 in the ready list to increase it's likelihood of being chosen first
18404 in the next cycle.
18405
18406 - If the pendulum is -1, then a single store has been issued in this
18407 cycle and we attempt to locate another store in the ready list to
18408 issue with it, preferring a store to an adjacent memory location to
18409 facilitate store pairing in the store queue.
18410
18411 - If the pendulum is 2, then two loads have already been
18412 issued in this cycle, so we increase the priority of the first store
18413 in the ready list to increase it's likelihood of being chosen first
18414 in the next cycle.
18415
18416 - If the pendulum < -2 or > 2, then do nothing.
18417
18418 Note: This code covers the most common scenarios. There exist non
18419 load/store instructions which make use of the LSU and which
18420 would need to be accounted for to strictly model the behavior
18421 of the machine. Those instructions are currently unaccounted
18422 for to help minimize compile time overhead of this code.
18423 */
18424 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18425 {
18426 int pos;
18427 int i;
18428 rtx tmp;
18429
18430 if (is_store_insn (last_scheduled_insn))
18431 /* Issuing a store, swing the load_store_pendulum to the left */
18432 load_store_pendulum--;
18433 else if (is_load_insn (last_scheduled_insn))
18434 /* Issuing a load, swing the load_store_pendulum to the right */
18435 load_store_pendulum++;
18436 else
18437 return cached_can_issue_more;
18438
18439 /* If the pendulum is balanced, or there is only one instruction on
18440 the ready list, then all is well, so return. */
18441 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18442 return cached_can_issue_more;
18443
18444 if (load_store_pendulum == 1)
18445 {
18446 /* A load has been issued in this cycle. Scan the ready list
18447 for another load to issue with it */
18448 pos = *pn_ready-1;
18449
18450 while (pos >= 0)
18451 {
18452 if (is_load_insn (ready[pos]))
18453 {
18454 /* Found a load. Move it to the head of the ready list,
18455 and adjust it's priority so that it is more likely to
18456 stay there */
18457 tmp = ready[pos];
18458 for (i=pos; i<*pn_ready-1; i++)
18459 ready[i] = ready[i + 1];
18460 ready[*pn_ready-1] = tmp;
18461 if INSN_PRIORITY_KNOWN (tmp)
18462 INSN_PRIORITY (tmp)++;
18463 break;
18464 }
18465 pos--;
18466 }
18467 }
18468 else if (load_store_pendulum == -2)
18469 {
18470 /* Two stores have been issued in this cycle. Increase the
18471 priority of the first load in the ready list to favor it for
18472 issuing in the next cycle. */
18473 pos = *pn_ready-1;
18474
18475 while (pos >= 0)
18476 {
18477 if (is_load_insn (ready[pos])
18478 && INSN_PRIORITY_KNOWN (ready[pos]))
18479 {
18480 INSN_PRIORITY (ready[pos])++;
18481
18482 /* Adjust the pendulum to account for the fact that a load
18483 was found and increased in priority. This is to prevent
18484 increasing the priority of multiple loads */
18485 load_store_pendulum--;
18486
18487 break;
18488 }
18489 pos--;
18490 }
18491 }
18492 else if (load_store_pendulum == -1)
18493 {
18494 /* A store has been issued in this cycle. Scan the ready list for
18495 another store to issue with it, preferring a store to an adjacent
18496 memory location */
18497 int first_store_pos = -1;
18498
18499 pos = *pn_ready-1;
18500
18501 while (pos >= 0)
18502 {
18503 if (is_store_insn (ready[pos]))
18504 {
18505 /* Maintain the index of the first store found on the
18506 list */
18507 if (first_store_pos == -1)
18508 first_store_pos = pos;
18509
18510 if (is_store_insn (last_scheduled_insn)
18511 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18512 {
18513 /* Found an adjacent store. Move it to the head of the
18514 ready list, and adjust it's priority so that it is
18515 more likely to stay there */
18516 tmp = ready[pos];
18517 for (i=pos; i<*pn_ready-1; i++)
18518 ready[i] = ready[i + 1];
18519 ready[*pn_ready-1] = tmp;
18520 if INSN_PRIORITY_KNOWN (tmp)
18521 INSN_PRIORITY (tmp)++;
18522 first_store_pos = -1;
18523
18524 break;
18525 };
18526 }
18527 pos--;
18528 }
18529
18530 if (first_store_pos >= 0)
18531 {
18532 /* An adjacent store wasn't found, but a non-adjacent store was,
18533 so move the non-adjacent store to the front of the ready
18534 list, and adjust its priority so that it is more likely to
18535 stay there. */
18536 tmp = ready[first_store_pos];
18537 for (i=first_store_pos; i<*pn_ready-1; i++)
18538 ready[i] = ready[i + 1];
18539 ready[*pn_ready-1] = tmp;
18540 if INSN_PRIORITY_KNOWN (tmp)
18541 INSN_PRIORITY (tmp)++;
18542 }
18543 }
18544 else if (load_store_pendulum == 2)
18545 {
18546 /* Two loads have been issued in this cycle. Increase the priority
18547 of the first store in the ready list to favor it for issuing in
18548 the next cycle. */
18549 pos = *pn_ready-1;
18550
18551 while (pos >= 0)
18552 {
18553 if (is_store_insn (ready[pos])
18554 && INSN_PRIORITY_KNOWN (ready[pos]))
18555 {
18556 INSN_PRIORITY (ready[pos])++;
18557
18558 /* Adjust the pendulum to account for the fact that a store
18559 was found and increased in priority. This is to prevent
18560 increasing the priority of multiple stores */
18561 load_store_pendulum++;
18562
18563 break;
18564 }
18565 pos--;
18566 }
18567 }
18568 }
18569
18570 return cached_can_issue_more;
18571}
18572
839a4992 18573/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18574 of group WHICH_GROUP.
18575
18576 If WHICH_GROUP == current_group, this function will return true if INSN
18577 causes the termination of the current group (i.e, the dispatch group to
18578 which INSN belongs). This means that INSN will be the last insn in the
18579 group it belongs to.
18580
18581 If WHICH_GROUP == previous_group, this function will return true if INSN
18582 causes the termination of the previous group (i.e, the dispatch group that
18583 precedes the group to which INSN belongs). This means that INSN will be
18584 the first insn in the group it belongs to). */
18585
18586static bool
18587insn_terminates_group_p (rtx insn, enum group_termination which_group)
18588{
44cd321e 18589 bool first, last;
cbe26ab8
DN
18590
18591 if (! insn)
18592 return false;
569fa502 18593
44cd321e
PS
18594 first = insn_must_be_first_in_group (insn);
18595 last = insn_must_be_last_in_group (insn);
cbe26ab8 18596
44cd321e 18597 if (first && last)
cbe26ab8
DN
18598 return true;
18599
18600 if (which_group == current_group)
44cd321e 18601 return last;
cbe26ab8 18602 else if (which_group == previous_group)
44cd321e
PS
18603 return first;
18604
18605 return false;
18606}
18607
18608
18609static bool
18610insn_must_be_first_in_group (rtx insn)
18611{
18612 enum attr_type type;
18613
18614 if (!insn
18615 || insn == NULL_RTX
18616 || GET_CODE (insn) == NOTE
18617 || GET_CODE (PATTERN (insn)) == USE
18618 || GET_CODE (PATTERN (insn)) == CLOBBER)
18619 return false;
18620
18621 switch (rs6000_cpu)
cbe26ab8 18622 {
44cd321e
PS
18623 case PROCESSOR_POWER5:
18624 if (is_cracked_insn (insn))
18625 return true;
18626 case PROCESSOR_POWER4:
18627 if (is_microcoded_insn (insn))
18628 return true;
18629
18630 if (!rs6000_sched_groups)
18631 return false;
18632
18633 type = get_attr_type (insn);
18634
18635 switch (type)
18636 {
18637 case TYPE_MFCR:
18638 case TYPE_MFCRF:
18639 case TYPE_MTCR:
18640 case TYPE_DELAYED_CR:
18641 case TYPE_CR_LOGICAL:
18642 case TYPE_MTJMPR:
18643 case TYPE_MFJMPR:
18644 case TYPE_IDIV:
18645 case TYPE_LDIV:
18646 case TYPE_LOAD_L:
18647 case TYPE_STORE_C:
18648 case TYPE_ISYNC:
18649 case TYPE_SYNC:
18650 return true;
18651 default:
18652 break;
18653 }
18654 break;
18655 case PROCESSOR_POWER6:
18656 type = get_attr_type (insn);
18657
18658 switch (type)
18659 {
18660 case TYPE_INSERT_DWORD:
18661 case TYPE_EXTS:
18662 case TYPE_CNTLZ:
18663 case TYPE_SHIFT:
18664 case TYPE_VAR_SHIFT_ROTATE:
18665 case TYPE_TRAP:
18666 case TYPE_IMUL:
18667 case TYPE_IMUL2:
18668 case TYPE_IMUL3:
18669 case TYPE_LMUL:
18670 case TYPE_IDIV:
18671 case TYPE_INSERT_WORD:
18672 case TYPE_DELAYED_COMPARE:
18673 case TYPE_IMUL_COMPARE:
18674 case TYPE_LMUL_COMPARE:
18675 case TYPE_FPCOMPARE:
18676 case TYPE_MFCR:
18677 case TYPE_MTCR:
18678 case TYPE_MFJMPR:
18679 case TYPE_MTJMPR:
18680 case TYPE_ISYNC:
18681 case TYPE_SYNC:
18682 case TYPE_LOAD_L:
18683 case TYPE_STORE_C:
18684 case TYPE_LOAD_U:
18685 case TYPE_LOAD_UX:
18686 case TYPE_LOAD_EXT_UX:
18687 case TYPE_STORE_U:
18688 case TYPE_STORE_UX:
18689 case TYPE_FPLOAD_U:
18690 case TYPE_FPLOAD_UX:
18691 case TYPE_FPSTORE_U:
18692 case TYPE_FPSTORE_UX:
18693 return true;
18694 default:
18695 break;
18696 }
18697 break;
18698 default:
18699 break;
18700 }
18701
18702 return false;
18703}
18704
18705static bool
18706insn_must_be_last_in_group (rtx insn)
18707{
18708 enum attr_type type;
18709
18710 if (!insn
18711 || insn == NULL_RTX
18712 || GET_CODE (insn) == NOTE
18713 || GET_CODE (PATTERN (insn)) == USE
18714 || GET_CODE (PATTERN (insn)) == CLOBBER)
18715 return false;
18716
18717 switch (rs6000_cpu) {
18718 case PROCESSOR_POWER4:
18719 case PROCESSOR_POWER5:
18720 if (is_microcoded_insn (insn))
18721 return true;
18722
18723 if (is_branch_slot_insn (insn))
18724 return true;
18725
18726 break;
18727 case PROCESSOR_POWER6:
18728 type = get_attr_type (insn);
18729
18730 switch (type)
18731 {
18732 case TYPE_EXTS:
18733 case TYPE_CNTLZ:
18734 case TYPE_SHIFT:
18735 case TYPE_VAR_SHIFT_ROTATE:
18736 case TYPE_TRAP:
18737 case TYPE_IMUL:
18738 case TYPE_IMUL2:
18739 case TYPE_IMUL3:
18740 case TYPE_LMUL:
18741 case TYPE_IDIV:
18742 case TYPE_DELAYED_COMPARE:
18743 case TYPE_IMUL_COMPARE:
18744 case TYPE_LMUL_COMPARE:
18745 case TYPE_FPCOMPARE:
18746 case TYPE_MFCR:
18747 case TYPE_MTCR:
18748 case TYPE_MFJMPR:
18749 case TYPE_MTJMPR:
18750 case TYPE_ISYNC:
18751 case TYPE_SYNC:
18752 case TYPE_LOAD_L:
18753 case TYPE_STORE_C:
18754 return true;
18755 default:
18756 break;
cbe26ab8 18757 }
44cd321e
PS
18758 break;
18759 default:
18760 break;
18761 }
cbe26ab8
DN
18762
18763 return false;
18764}
18765
839a4992 18766/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
18767 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18768
18769static bool
18770is_costly_group (rtx *group_insns, rtx next_insn)
18771{
18772 int i;
cbe26ab8
DN
18773 int issue_rate = rs6000_issue_rate ();
18774
18775 for (i = 0; i < issue_rate; i++)
18776 {
e2f6ff94
MK
18777 sd_iterator_def sd_it;
18778 dep_t dep;
cbe26ab8 18779 rtx insn = group_insns[i];
b198261f 18780
cbe26ab8 18781 if (!insn)
c4ad648e 18782 continue;
b198261f 18783
e2f6ff94 18784 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 18785 {
b198261f
MK
18786 rtx next = DEP_CON (dep);
18787
18788 if (next == next_insn
18789 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
18790 return true;
c4ad648e 18791 }
cbe26ab8
DN
18792 }
18793
18794 return false;
18795}
18796
f676971a 18797/* Utility of the function redefine_groups.
cbe26ab8
DN
18798 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
18799 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
18800 to keep it "far" (in a separate group) from GROUP_INSNS, following
18801 one of the following schemes, depending on the value of the flag
18802 -minsert_sched_nops = X:
18803 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 18804 in order to force NEXT_INSN into a separate group.
f676971a
EC
18805 (2) X < sched_finish_regroup_exact: insert exactly X nops.
18806 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
18807 insertion (has a group just ended, how many vacant issue slots remain in the
18808 last group, and how many dispatch groups were encountered so far). */
18809
f676971a 18810static int
c4ad648e
AM
18811force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
18812 rtx next_insn, bool *group_end, int can_issue_more,
18813 int *group_count)
cbe26ab8
DN
18814{
18815 rtx nop;
18816 bool force;
18817 int issue_rate = rs6000_issue_rate ();
18818 bool end = *group_end;
18819 int i;
18820
18821 if (next_insn == NULL_RTX)
18822 return can_issue_more;
18823
18824 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
18825 return can_issue_more;
18826
18827 force = is_costly_group (group_insns, next_insn);
18828 if (!force)
18829 return can_issue_more;
18830
18831 if (sched_verbose > 6)
18832 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 18833 *group_count ,can_issue_more);
cbe26ab8
DN
18834
18835 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
18836 {
18837 if (*group_end)
c4ad648e 18838 can_issue_more = 0;
cbe26ab8
DN
18839
18840 /* Since only a branch can be issued in the last issue_slot, it is
18841 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
18842 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
18843 in this case the last nop will start a new group and the branch
18844 will be forced to the new group. */
cbe26ab8 18845 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 18846 can_issue_more--;
cbe26ab8
DN
18847
18848 while (can_issue_more > 0)
c4ad648e 18849 {
9390387d 18850 nop = gen_nop ();
c4ad648e
AM
18851 emit_insn_before (nop, next_insn);
18852 can_issue_more--;
18853 }
cbe26ab8
DN
18854
18855 *group_end = true;
18856 return 0;
f676971a 18857 }
cbe26ab8
DN
18858
18859 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
18860 {
18861 int n_nops = rs6000_sched_insert_nops;
18862
f676971a 18863 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 18864 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 18865 if (can_issue_more == 0)
c4ad648e 18866 can_issue_more = issue_rate;
cbe26ab8
DN
18867 can_issue_more--;
18868 if (can_issue_more == 0)
c4ad648e
AM
18869 {
18870 can_issue_more = issue_rate - 1;
18871 (*group_count)++;
18872 end = true;
18873 for (i = 0; i < issue_rate; i++)
18874 {
18875 group_insns[i] = 0;
18876 }
18877 }
cbe26ab8
DN
18878
18879 while (n_nops > 0)
c4ad648e
AM
18880 {
18881 nop = gen_nop ();
18882 emit_insn_before (nop, next_insn);
18883 if (can_issue_more == issue_rate - 1) /* new group begins */
18884 end = false;
18885 can_issue_more--;
18886 if (can_issue_more == 0)
18887 {
18888 can_issue_more = issue_rate - 1;
18889 (*group_count)++;
18890 end = true;
18891 for (i = 0; i < issue_rate; i++)
18892 {
18893 group_insns[i] = 0;
18894 }
18895 }
18896 n_nops--;
18897 }
cbe26ab8
DN
18898
18899 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 18900 can_issue_more++;
cbe26ab8 18901
c4ad648e
AM
18902 /* Is next_insn going to start a new group? */
18903 *group_end
18904 = (end
cbe26ab8
DN
18905 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18906 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18907 || (can_issue_more < issue_rate &&
c4ad648e 18908 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18909 if (*group_end && end)
c4ad648e 18910 (*group_count)--;
cbe26ab8
DN
18911
18912 if (sched_verbose > 6)
c4ad648e
AM
18913 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
18914 *group_count, can_issue_more);
f676971a
EC
18915 return can_issue_more;
18916 }
cbe26ab8
DN
18917
18918 return can_issue_more;
18919}
18920
18921/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 18922 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
18923 form in practice. It tries to achieve this synchronization by forcing the
18924 estimated processor grouping on the compiler (as opposed to the function
18925 'pad_goups' which tries to force the scheduler's grouping on the processor).
18926
18927 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
18928 examines the (estimated) dispatch groups that will be formed by the processor
18929 dispatcher. It marks these group boundaries to reflect the estimated
18930 processor grouping, overriding the grouping that the scheduler had marked.
18931 Depending on the value of the flag '-minsert-sched-nops' this function can
18932 force certain insns into separate groups or force a certain distance between
18933 them by inserting nops, for example, if there exists a "costly dependence"
18934 between the insns.
18935
18936 The function estimates the group boundaries that the processor will form as
0fa2e4df 18937 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
18938 each insn. A subsequent insn will start a new group if one of the following
18939 4 cases applies:
18940 - no more vacant issue slots remain in the current dispatch group.
18941 - only the last issue slot, which is the branch slot, is vacant, but the next
18942 insn is not a branch.
18943 - only the last 2 or less issue slots, including the branch slot, are vacant,
18944 which means that a cracked insn (which occupies two issue slots) can't be
18945 issued in this group.
f676971a 18946 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
18947 start a new group. */
18948
18949static int
18950redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
18951{
18952 rtx insn, next_insn;
18953 int issue_rate;
18954 int can_issue_more;
18955 int slot, i;
18956 bool group_end;
18957 int group_count = 0;
18958 rtx *group_insns;
18959
18960 /* Initialize. */
18961 issue_rate = rs6000_issue_rate ();
18962 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 18963 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
18964 {
18965 group_insns[i] = 0;
18966 }
18967 can_issue_more = issue_rate;
18968 slot = 0;
18969 insn = get_next_active_insn (prev_head_insn, tail);
18970 group_end = false;
18971
18972 while (insn != NULL_RTX)
18973 {
18974 slot = (issue_rate - can_issue_more);
18975 group_insns[slot] = insn;
18976 can_issue_more =
c4ad648e 18977 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 18978 if (insn_terminates_group_p (insn, current_group))
c4ad648e 18979 can_issue_more = 0;
cbe26ab8
DN
18980
18981 next_insn = get_next_active_insn (insn, tail);
18982 if (next_insn == NULL_RTX)
c4ad648e 18983 return group_count + 1;
cbe26ab8 18984
c4ad648e
AM
18985 /* Is next_insn going to start a new group? */
18986 group_end
18987 = (can_issue_more == 0
18988 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18989 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18990 || (can_issue_more < issue_rate &&
18991 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18992
f676971a 18993 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
18994 next_insn, &group_end, can_issue_more,
18995 &group_count);
cbe26ab8
DN
18996
18997 if (group_end)
c4ad648e
AM
18998 {
18999 group_count++;
19000 can_issue_more = 0;
19001 for (i = 0; i < issue_rate; i++)
19002 {
19003 group_insns[i] = 0;
19004 }
19005 }
cbe26ab8
DN
19006
19007 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19008 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19009 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19010 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19011
19012 insn = next_insn;
19013 if (can_issue_more == 0)
c4ad648e
AM
19014 can_issue_more = issue_rate;
19015 } /* while */
cbe26ab8
DN
19016
19017 return group_count;
19018}
19019
19020/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19021 dispatch group boundaries that the scheduler had marked. Pad with nops
19022 any dispatch groups which have vacant issue slots, in order to force the
19023 scheduler's grouping on the processor dispatcher. The function
19024 returns the number of dispatch groups found. */
19025
19026static int
19027pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19028{
19029 rtx insn, next_insn;
19030 rtx nop;
19031 int issue_rate;
19032 int can_issue_more;
19033 int group_end;
19034 int group_count = 0;
19035
19036 /* Initialize issue_rate. */
19037 issue_rate = rs6000_issue_rate ();
19038 can_issue_more = issue_rate;
19039
19040 insn = get_next_active_insn (prev_head_insn, tail);
19041 next_insn = get_next_active_insn (insn, tail);
19042
19043 while (insn != NULL_RTX)
19044 {
19045 can_issue_more =
19046 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19047
19048 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19049
19050 if (next_insn == NULL_RTX)
c4ad648e 19051 break;
cbe26ab8
DN
19052
19053 if (group_end)
c4ad648e
AM
19054 {
19055 /* If the scheduler had marked group termination at this location
19056 (between insn and next_indn), and neither insn nor next_insn will
19057 force group termination, pad the group with nops to force group
19058 termination. */
19059 if (can_issue_more
19060 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19061 && !insn_terminates_group_p (insn, current_group)
19062 && !insn_terminates_group_p (next_insn, previous_group))
19063 {
9390387d 19064 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19065 can_issue_more--;
19066
19067 while (can_issue_more)
19068 {
19069 nop = gen_nop ();
19070 emit_insn_before (nop, next_insn);
19071 can_issue_more--;
19072 }
19073 }
19074
19075 can_issue_more = issue_rate;
19076 group_count++;
19077 }
cbe26ab8
DN
19078
19079 insn = next_insn;
19080 next_insn = get_next_active_insn (insn, tail);
19081 }
19082
19083 return group_count;
19084}
19085
44cd321e
PS
19086/* We're beginning a new block. Initialize data structures as necessary. */
19087
19088static void
19089rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19090 int sched_verbose ATTRIBUTE_UNUSED,
19091 int max_ready ATTRIBUTE_UNUSED)
982afe02 19092{
44cd321e
PS
19093 last_scheduled_insn = NULL_RTX;
19094 load_store_pendulum = 0;
19095}
19096
cbe26ab8
DN
19097/* The following function is called at the end of scheduling BB.
19098 After reload, it inserts nops at insn group bundling. */
19099
19100static void
38f391a5 19101rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19102{
19103 int n_groups;
19104
19105 if (sched_verbose)
19106 fprintf (dump, "=== Finishing schedule.\n");
19107
ec507f2d 19108 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19109 {
19110 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19111 return;
cbe26ab8
DN
19112
19113 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19114 n_groups = pad_groups (dump, sched_verbose,
19115 current_sched_info->prev_head,
19116 current_sched_info->next_tail);
cbe26ab8 19117 else
c4ad648e
AM
19118 n_groups = redefine_groups (dump, sched_verbose,
19119 current_sched_info->prev_head,
19120 current_sched_info->next_tail);
cbe26ab8
DN
19121
19122 if (sched_verbose >= 6)
19123 {
19124 fprintf (dump, "ngroups = %d\n", n_groups);
19125 print_rtl (dump, current_sched_info->prev_head);
19126 fprintf (dump, "Done finish_sched\n");
19127 }
19128 }
19129}
b6c9286a 19130\f
b6c9286a
MM
19131/* Length in units of the trampoline for entering a nested function. */
19132
19133int
863d938c 19134rs6000_trampoline_size (void)
b6c9286a
MM
19135{
19136 int ret = 0;
19137
19138 switch (DEFAULT_ABI)
19139 {
19140 default:
37409796 19141 gcc_unreachable ();
b6c9286a
MM
19142
19143 case ABI_AIX:
8f802bfb 19144 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19145 break;
19146
4dabc42d 19147 case ABI_DARWIN:
b6c9286a 19148 case ABI_V4:
03a7e1a5 19149 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19150 break;
b6c9286a
MM
19151 }
19152
19153 return ret;
19154}
19155
19156/* Emit RTL insns to initialize the variable parts of a trampoline.
19157 FNADDR is an RTX for the address of the function's pure code.
19158 CXT is an RTX for the static chain value for the function. */
19159
19160void
a2369ed3 19161rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19162{
8bd04c56 19163 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19164 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19165
19166 switch (DEFAULT_ABI)
19167 {
19168 default:
37409796 19169 gcc_unreachable ();
b6c9286a 19170
8bd04c56 19171/* Macros to shorten the code expansions below. */
9613eaff 19172#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19173#define MEM_PLUS(addr,offset) \
9613eaff 19174 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19175
b6c9286a
MM
19176 /* Under AIX, just build the 3 word function descriptor */
19177 case ABI_AIX:
8bd04c56 19178 {
9613eaff
SH
19179 rtx fn_reg = gen_reg_rtx (Pmode);
19180 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19181 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19182 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19183 emit_move_insn (MEM_DEREF (addr), fn_reg);
19184 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19185 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19186 }
b6c9286a
MM
19187 break;
19188
4dabc42d
TC
19189 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19190 case ABI_DARWIN:
b6c9286a 19191 case ABI_V4:
9613eaff 19192 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19193 FALSE, VOIDmode, 4,
9613eaff 19194 addr, Pmode,
eaf1bcf1 19195 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19196 fnaddr, Pmode,
19197 ctx_reg, Pmode);
b6c9286a 19198 break;
b6c9286a
MM
19199 }
19200
19201 return;
19202}
7509c759
MM
19203
19204\f
91d231cb 19205/* Table of valid machine attributes. */
a4f6c312 19206
91d231cb 19207const struct attribute_spec rs6000_attribute_table[] =
7509c759 19208{
91d231cb 19209 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19210 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19211 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19212 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19213 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19214 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19215#ifdef SUBTARGET_ATTRIBUTE_TABLE
19216 SUBTARGET_ATTRIBUTE_TABLE,
19217#endif
a5c76ee6 19218 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19219};
7509c759 19220
8bb418a3
ZL
19221/* Handle the "altivec" attribute. The attribute may have
19222 arguments as follows:
f676971a 19223
8bb418a3
ZL
19224 __attribute__((altivec(vector__)))
19225 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19226 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19227
19228 and may appear more than once (e.g., 'vector bool char') in a
19229 given declaration. */
19230
19231static tree
f90ac3f0
UP
19232rs6000_handle_altivec_attribute (tree *node,
19233 tree name ATTRIBUTE_UNUSED,
19234 tree args,
8bb418a3
ZL
19235 int flags ATTRIBUTE_UNUSED,
19236 bool *no_add_attrs)
19237{
19238 tree type = *node, result = NULL_TREE;
19239 enum machine_mode mode;
19240 int unsigned_p;
19241 char altivec_type
19242 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19243 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19244 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19245 : '?');
8bb418a3
ZL
19246
19247 while (POINTER_TYPE_P (type)
19248 || TREE_CODE (type) == FUNCTION_TYPE
19249 || TREE_CODE (type) == METHOD_TYPE
19250 || TREE_CODE (type) == ARRAY_TYPE)
19251 type = TREE_TYPE (type);
19252
19253 mode = TYPE_MODE (type);
19254
f90ac3f0
UP
19255 /* Check for invalid AltiVec type qualifiers. */
19256 if (type == long_unsigned_type_node || type == long_integer_type_node)
19257 {
19258 if (TARGET_64BIT)
19259 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19260 else if (rs6000_warn_altivec_long)
d4ee4d25 19261 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19262 }
19263 else if (type == long_long_unsigned_type_node
19264 || type == long_long_integer_type_node)
19265 error ("use of %<long long%> in AltiVec types is invalid");
19266 else if (type == double_type_node)
19267 error ("use of %<double%> in AltiVec types is invalid");
19268 else if (type == long_double_type_node)
19269 error ("use of %<long double%> in AltiVec types is invalid");
19270 else if (type == boolean_type_node)
19271 error ("use of boolean types in AltiVec types is invalid");
19272 else if (TREE_CODE (type) == COMPLEX_TYPE)
19273 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19274 else if (DECIMAL_FLOAT_MODE_P (mode))
19275 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19276
19277 switch (altivec_type)
19278 {
19279 case 'v':
8df83eae 19280 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19281 switch (mode)
19282 {
c4ad648e
AM
19283 case SImode:
19284 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19285 break;
19286 case HImode:
19287 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19288 break;
19289 case QImode:
19290 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19291 break;
19292 case SFmode: result = V4SF_type_node; break;
19293 /* If the user says 'vector int bool', we may be handed the 'bool'
19294 attribute _before_ the 'vector' attribute, and so select the
19295 proper type in the 'b' case below. */
19296 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19297 result = type;
19298 default: break;
8bb418a3
ZL
19299 }
19300 break;
19301 case 'b':
19302 switch (mode)
19303 {
c4ad648e
AM
19304 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19305 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19306 case QImode: case V16QImode: result = bool_V16QI_type_node;
19307 default: break;
8bb418a3
ZL
19308 }
19309 break;
19310 case 'p':
19311 switch (mode)
19312 {
c4ad648e
AM
19313 case V8HImode: result = pixel_V8HI_type_node;
19314 default: break;
8bb418a3
ZL
19315 }
19316 default: break;
19317 }
19318
7958a2a6
FJ
19319 if (result && result != type && TYPE_READONLY (type))
19320 result = build_qualified_type (result, TYPE_QUAL_CONST);
19321
8bb418a3
ZL
19322 *no_add_attrs = true; /* No need to hang on to the attribute. */
19323
f90ac3f0 19324 if (result)
8bb418a3
ZL
19325 *node = reconstruct_complex_type (*node, result);
19326
19327 return NULL_TREE;
19328}
19329
f18eca82
ZL
19330/* AltiVec defines four built-in scalar types that serve as vector
19331 elements; we must teach the compiler how to mangle them. */
19332
19333static const char *
3101faab 19334rs6000_mangle_type (const_tree type)
f18eca82 19335{
608063c3
JB
19336 type = TYPE_MAIN_VARIANT (type);
19337
19338 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19339 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19340 return NULL;
19341
f18eca82
ZL
19342 if (type == bool_char_type_node) return "U6__boolc";
19343 if (type == bool_short_type_node) return "U6__bools";
19344 if (type == pixel_type_node) return "u7__pixel";
19345 if (type == bool_int_type_node) return "U6__booli";
19346
337bde91
DE
19347 /* Mangle IBM extended float long double as `g' (__float128) on
19348 powerpc*-linux where long-double-64 previously was the default. */
19349 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19350 && TARGET_ELF
19351 && TARGET_LONG_DOUBLE_128
19352 && !TARGET_IEEEQUAD)
19353 return "g";
19354
f18eca82
ZL
19355 /* For all other types, use normal C++ mangling. */
19356 return NULL;
19357}
19358
a5c76ee6
ZW
19359/* Handle a "longcall" or "shortcall" attribute; arguments as in
19360 struct attribute_spec.handler. */
a4f6c312 19361
91d231cb 19362static tree
f676971a
EC
19363rs6000_handle_longcall_attribute (tree *node, tree name,
19364 tree args ATTRIBUTE_UNUSED,
19365 int flags ATTRIBUTE_UNUSED,
a2369ed3 19366 bool *no_add_attrs)
91d231cb
JM
19367{
19368 if (TREE_CODE (*node) != FUNCTION_TYPE
19369 && TREE_CODE (*node) != FIELD_DECL
19370 && TREE_CODE (*node) != TYPE_DECL)
19371 {
5c498b10 19372 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19373 IDENTIFIER_POINTER (name));
19374 *no_add_attrs = true;
19375 }
6a4cee5f 19376
91d231cb 19377 return NULL_TREE;
7509c759
MM
19378}
19379
a5c76ee6
ZW
19380/* Set longcall attributes on all functions declared when
19381 rs6000_default_long_calls is true. */
19382static void
a2369ed3 19383rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19384{
19385 if (rs6000_default_long_calls
19386 && (TREE_CODE (type) == FUNCTION_TYPE
19387 || TREE_CODE (type) == METHOD_TYPE))
19388 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19389 NULL_TREE,
19390 TYPE_ATTRIBUTES (type));
16d6f994
EC
19391
19392#if TARGET_MACHO
19393 darwin_set_default_type_attributes (type);
19394#endif
a5c76ee6
ZW
19395}
19396
3cb999d8
DE
19397/* Return a reference suitable for calling a function with the
19398 longcall attribute. */
a4f6c312 19399
9390387d 19400rtx
a2369ed3 19401rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19402{
d330fd93 19403 const char *call_name;
6a4cee5f
MM
19404 tree node;
19405
19406 if (GET_CODE (call_ref) != SYMBOL_REF)
19407 return call_ref;
19408
19409 /* System V adds '.' to the internal name, so skip them. */
19410 call_name = XSTR (call_ref, 0);
19411 if (*call_name == '.')
19412 {
19413 while (*call_name == '.')
19414 call_name++;
19415
19416 node = get_identifier (call_name);
39403d82 19417 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19418 }
19419
19420 return force_reg (Pmode, call_ref);
19421}
7509c759 19422\f
77ccdfed
EC
19423#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19424#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19425#endif
19426
19427/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19428 struct attribute_spec.handler. */
19429static tree
19430rs6000_handle_struct_attribute (tree *node, tree name,
19431 tree args ATTRIBUTE_UNUSED,
19432 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19433{
19434 tree *type = NULL;
19435 if (DECL_P (*node))
19436 {
19437 if (TREE_CODE (*node) == TYPE_DECL)
19438 type = &TREE_TYPE (*node);
19439 }
19440 else
19441 type = node;
19442
19443 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19444 || TREE_CODE (*type) == UNION_TYPE)))
19445 {
19446 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19447 *no_add_attrs = true;
19448 }
19449
19450 else if ((is_attribute_p ("ms_struct", name)
19451 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19452 || ((is_attribute_p ("gcc_struct", name)
19453 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19454 {
19455 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19456 IDENTIFIER_POINTER (name));
19457 *no_add_attrs = true;
19458 }
19459
19460 return NULL_TREE;
19461}
19462
19463static bool
3101faab 19464rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19465{
19466 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19467 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19468 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19469}
19470\f
b64a1b53
RH
19471#ifdef USING_ELFOS_H
19472
d6b5193b 19473/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19474
d6b5193b
RS
19475static void
19476rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19477{
19478 if (DEFAULT_ABI == ABI_AIX
19479 && TARGET_MINIMAL_TOC
19480 && !TARGET_RELOCATABLE)
19481 {
19482 if (!toc_initialized)
19483 {
19484 toc_initialized = 1;
19485 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19486 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19487 fprintf (asm_out_file, "\t.tc ");
19488 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19489 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19490 fprintf (asm_out_file, "\n");
19491
19492 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19493 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19494 fprintf (asm_out_file, " = .+32768\n");
19495 }
19496 else
19497 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19498 }
19499 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19500 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19501 else
19502 {
19503 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19504 if (!toc_initialized)
19505 {
19506 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19507 fprintf (asm_out_file, " = .+32768\n");
19508 toc_initialized = 1;
19509 }
19510 }
19511}
19512
19513/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19514
b64a1b53 19515static void
d6b5193b
RS
19516rs6000_elf_asm_init_sections (void)
19517{
19518 toc_section
19519 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19520
19521 sdata2_section
19522 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19523 SDATA2_SECTION_ASM_OP);
19524}
19525
19526/* Implement TARGET_SELECT_RTX_SECTION. */
19527
19528static section *
f676971a 19529rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19530 unsigned HOST_WIDE_INT align)
7509c759 19531{
a9098fd0 19532 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19533 return toc_section;
7509c759 19534 else
d6b5193b 19535 return default_elf_select_rtx_section (mode, x, align);
7509c759 19536}
d9407988 19537\f
d1908feb
JJ
19538/* For a SYMBOL_REF, set generic flags and then perform some
19539 target-specific processing.
19540
d1908feb
JJ
19541 When the AIX ABI is requested on a non-AIX system, replace the
19542 function name with the real name (with a leading .) rather than the
19543 function descriptor name. This saves a lot of overriding code to
19544 read the prefixes. */
d9407988 19545
fb49053f 19546static void
a2369ed3 19547rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19548{
d1908feb 19549 default_encode_section_info (decl, rtl, first);
b2003250 19550
d1908feb
JJ
19551 if (first
19552 && TREE_CODE (decl) == FUNCTION_DECL
19553 && !TARGET_AIX
19554 && DEFAULT_ABI == ABI_AIX)
d9407988 19555 {
c6a2438a 19556 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19557 size_t len = strlen (XSTR (sym_ref, 0));
19558 char *str = alloca (len + 2);
19559 str[0] = '.';
19560 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19561 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19562 }
d9407988
MM
19563}
19564
21d9bb3f
PB
19565static inline bool
19566compare_section_name (const char *section, const char *template)
19567{
19568 int len;
19569
19570 len = strlen (template);
19571 return (strncmp (section, template, len) == 0
19572 && (section[len] == 0 || section[len] == '.'));
19573}
19574
c1b7d95a 19575bool
3101faab 19576rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
19577{
19578 if (rs6000_sdata == SDATA_NONE)
19579 return false;
19580
7482ad25
AF
19581 /* We want to merge strings, so we never consider them small data. */
19582 if (TREE_CODE (decl) == STRING_CST)
19583 return false;
19584
19585 /* Functions are never in the small data area. */
19586 if (TREE_CODE (decl) == FUNCTION_DECL)
19587 return false;
19588
0e5dbd9b
DE
19589 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
19590 {
19591 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
19592 if (compare_section_name (section, ".sdata")
19593 || compare_section_name (section, ".sdata2")
19594 || compare_section_name (section, ".gnu.linkonce.s")
19595 || compare_section_name (section, ".sbss")
19596 || compare_section_name (section, ".sbss2")
19597 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
19598 || strcmp (section, ".PPC.EMB.sdata0") == 0
19599 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19600 return true;
19601 }
19602 else
19603 {
19604 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19605
19606 if (size > 0
307b599c 19607 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19608 /* If it's not public, and we're not going to reference it there,
19609 there's no need to put it in the small data section. */
0e5dbd9b
DE
19610 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19611 return true;
19612 }
19613
19614 return false;
19615}
19616
b91da81f 19617#endif /* USING_ELFOS_H */
aacd3885
RS
19618\f
19619/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19620
aacd3885 19621static bool
3101faab 19622rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
19623{
19624 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19625}
a6c2a102 19626\f
000034eb 19627/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19628 ADDR can be effectively incremented by incrementing REG.
19629
19630 r0 is special and we must not select it as an address
19631 register by this routine since our caller will try to
19632 increment the returned register via an "la" instruction. */
000034eb 19633
9390387d 19634rtx
a2369ed3 19635find_addr_reg (rtx addr)
000034eb
DE
19636{
19637 while (GET_CODE (addr) == PLUS)
19638 {
02441cd6
JL
19639 if (GET_CODE (XEXP (addr, 0)) == REG
19640 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19641 addr = XEXP (addr, 0);
02441cd6
JL
19642 else if (GET_CODE (XEXP (addr, 1)) == REG
19643 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19644 addr = XEXP (addr, 1);
19645 else if (CONSTANT_P (XEXP (addr, 0)))
19646 addr = XEXP (addr, 1);
19647 else if (CONSTANT_P (XEXP (addr, 1)))
19648 addr = XEXP (addr, 0);
19649 else
37409796 19650 gcc_unreachable ();
000034eb 19651 }
37409796
NS
19652 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19653 return addr;
000034eb
DE
19654}
19655
a6c2a102 19656void
a2369ed3 19657rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19658{
19659 fatal_insn ("bad address", op);
19660}
c8023011 19661
ee890fe2
SS
19662#if TARGET_MACHO
19663
efdba735 19664static tree branch_island_list = 0;
ee890fe2 19665
efdba735
SH
19666/* Remember to generate a branch island for far calls to the given
19667 function. */
ee890fe2 19668
f676971a 19669static void
c4ad648e
AM
19670add_compiler_branch_island (tree label_name, tree function_name,
19671 int line_number)
ee890fe2 19672{
efdba735 19673 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19674 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19675 TREE_CHAIN (branch_island) = branch_island_list;
19676 branch_island_list = branch_island;
ee890fe2
SS
19677}
19678
efdba735
SH
19679#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19680#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19681#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19682 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19683
efdba735
SH
19684/* Generate far-jump branch islands for everything on the
19685 branch_island_list. Invoked immediately after the last instruction
19686 of the epilogue has been emitted; the branch-islands must be
19687 appended to, and contiguous with, the function body. Mach-O stubs
19688 are generated in machopic_output_stub(). */
ee890fe2 19689
efdba735
SH
19690static void
19691macho_branch_islands (void)
19692{
19693 char tmp_buf[512];
19694 tree branch_island;
19695
19696 for (branch_island = branch_island_list;
19697 branch_island;
19698 branch_island = TREE_CHAIN (branch_island))
19699 {
19700 const char *label =
19701 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19702 const char *name =
11abc112 19703 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19704 char name_buf[512];
19705 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19706 if (name[0] == '*' || name[0] == '&')
19707 strcpy (name_buf, name+1);
19708 else
19709 {
19710 name_buf[0] = '_';
19711 strcpy (name_buf+1, name);
19712 }
19713 strcpy (tmp_buf, "\n");
19714 strcat (tmp_buf, label);
ee890fe2 19715#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19716 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19717 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19718#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19719 if (flag_pic)
19720 {
19721 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19722 strcat (tmp_buf, label);
19723 strcat (tmp_buf, "_pic\n");
19724 strcat (tmp_buf, label);
19725 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19726
efdba735
SH
19727 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19728 strcat (tmp_buf, name_buf);
19729 strcat (tmp_buf, " - ");
19730 strcat (tmp_buf, label);
19731 strcat (tmp_buf, "_pic)\n");
f676971a 19732
efdba735 19733 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19734
efdba735
SH
19735 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19736 strcat (tmp_buf, name_buf);
19737 strcat (tmp_buf, " - ");
19738 strcat (tmp_buf, label);
19739 strcat (tmp_buf, "_pic)\n");
f676971a 19740
efdba735
SH
19741 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19742 }
19743 else
19744 {
19745 strcat (tmp_buf, ":\nlis r12,hi16(");
19746 strcat (tmp_buf, name_buf);
19747 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19748 strcat (tmp_buf, name_buf);
19749 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19750 }
19751 output_asm_insn (tmp_buf, 0);
ee890fe2 19752#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19753 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19754 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19755#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19756 }
ee890fe2 19757
efdba735 19758 branch_island_list = 0;
ee890fe2
SS
19759}
19760
19761/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19762 already there or not. */
19763
efdba735 19764static int
a2369ed3 19765no_previous_def (tree function_name)
ee890fe2 19766{
efdba735
SH
19767 tree branch_island;
19768 for (branch_island = branch_island_list;
19769 branch_island;
19770 branch_island = TREE_CHAIN (branch_island))
19771 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
19772 return 0;
19773 return 1;
19774}
19775
19776/* GET_PREV_LABEL gets the label name from the previous definition of
19777 the function. */
19778
efdba735 19779static tree
a2369ed3 19780get_prev_label (tree function_name)
ee890fe2 19781{
efdba735
SH
19782 tree branch_island;
19783 for (branch_island = branch_island_list;
19784 branch_island;
19785 branch_island = TREE_CHAIN (branch_island))
19786 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
19787 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
19788 return 0;
19789}
19790
75b1b789
MS
19791#ifndef DARWIN_LINKER_GENERATES_ISLANDS
19792#define DARWIN_LINKER_GENERATES_ISLANDS 0
19793#endif
19794
19795/* KEXTs still need branch islands. */
19796#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
19797 || flag_mkernel || flag_apple_kext)
19798
ee890fe2 19799/* INSN is either a function call or a millicode call. It may have an
f676971a 19800 unconditional jump in its delay slot.
ee890fe2
SS
19801
19802 CALL_DEST is the routine we are calling. */
19803
19804char *
c4ad648e
AM
19805output_call (rtx insn, rtx *operands, int dest_operand_number,
19806 int cookie_operand_number)
ee890fe2
SS
19807{
19808 static char buf[256];
75b1b789
MS
19809 if (DARWIN_GENERATE_ISLANDS
19810 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 19811 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
19812 {
19813 tree labelname;
efdba735 19814 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 19815
ee890fe2
SS
19816 if (no_previous_def (funname))
19817 {
ee890fe2
SS
19818 rtx label_rtx = gen_label_rtx ();
19819 char *label_buf, temp_buf[256];
19820 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
19821 CODE_LABEL_NUMBER (label_rtx));
19822 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
19823 labelname = get_identifier (label_buf);
a38e7aa5 19824 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
19825 }
19826 else
19827 labelname = get_prev_label (funname);
19828
efdba735
SH
19829 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
19830 instruction will reach 'foo', otherwise link as 'bl L42'".
19831 "L42" should be a 'branch island', that will do a far jump to
19832 'foo'. Branch islands are generated in
19833 macho_branch_islands(). */
ee890fe2 19834 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 19835 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
19836 }
19837 else
efdba735
SH
19838 sprintf (buf, "bl %%z%d", dest_operand_number);
19839 return buf;
ee890fe2
SS
19840}
19841
ee890fe2
SS
19842/* Generate PIC and indirect symbol stubs. */
19843
19844void
a2369ed3 19845machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
19846{
19847 unsigned int length;
a4f6c312
SS
19848 char *symbol_name, *lazy_ptr_name;
19849 char *local_label_0;
ee890fe2
SS
19850 static int label = 0;
19851
df56a27f 19852 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 19853 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 19854
ee890fe2 19855
ee890fe2
SS
19856 length = strlen (symb);
19857 symbol_name = alloca (length + 32);
19858 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
19859
19860 lazy_ptr_name = alloca (length + 32);
19861 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
19862
ee890fe2 19863 if (flag_pic == 2)
56c779bc 19864 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 19865 else
56c779bc 19866 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
19867
19868 if (flag_pic == 2)
19869 {
d974312d
DJ
19870 fprintf (file, "\t.align 5\n");
19871
19872 fprintf (file, "%s:\n", stub);
19873 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19874
876455fa 19875 label++;
89da1f32 19876 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 19877 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 19878
ee890fe2
SS
19879 fprintf (file, "\tmflr r0\n");
19880 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
19881 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
19882 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
19883 lazy_ptr_name, local_label_0);
19884 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
19885 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
19886 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
19887 lazy_ptr_name, local_label_0);
19888 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
19889 fprintf (file, "\tbctr\n");
19890 }
19891 else
d974312d
DJ
19892 {
19893 fprintf (file, "\t.align 4\n");
19894
19895 fprintf (file, "%s:\n", stub);
19896 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19897
19898 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
19899 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
19900 (TARGET_64BIT ? "ldu" : "lwzu"),
19901 lazy_ptr_name);
d974312d
DJ
19902 fprintf (file, "\tmtctr r12\n");
19903 fprintf (file, "\tbctr\n");
19904 }
f676971a 19905
56c779bc 19906 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
19907 fprintf (file, "%s:\n", lazy_ptr_name);
19908 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
19909 fprintf (file, "%sdyld_stub_binding_helper\n",
19910 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
19911}
19912
19913/* Legitimize PIC addresses. If the address is already
19914 position-independent, we return ORIG. Newly generated
19915 position-independent addresses go into a reg. This is REG if non
19916 zero, otherwise we allocate register(s) as necessary. */
19917
4fbbe694 19918#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
19919
19920rtx
f676971a 19921rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 19922 rtx reg)
ee890fe2
SS
19923{
19924 rtx base, offset;
19925
19926 if (reg == NULL && ! reload_in_progress && ! reload_completed)
19927 reg = gen_reg_rtx (Pmode);
19928
19929 if (GET_CODE (orig) == CONST)
19930 {
37409796
NS
19931 rtx reg_temp;
19932
ee890fe2
SS
19933 if (GET_CODE (XEXP (orig, 0)) == PLUS
19934 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
19935 return orig;
19936
37409796 19937 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 19938
37409796
NS
19939 /* Use a different reg for the intermediate value, as
19940 it will be marked UNCHANGING. */
b3a13419 19941 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
19942 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
19943 Pmode, reg_temp);
19944 offset =
19945 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
19946 Pmode, reg);
bb8df8a6 19947
ee890fe2
SS
19948 if (GET_CODE (offset) == CONST_INT)
19949 {
19950 if (SMALL_INT (offset))
ed8908e7 19951 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
19952 else if (! reload_in_progress && ! reload_completed)
19953 offset = force_reg (Pmode, offset);
19954 else
c859cda6
DJ
19955 {
19956 rtx mem = force_const_mem (Pmode, orig);
19957 return machopic_legitimize_pic_address (mem, Pmode, reg);
19958 }
ee890fe2 19959 }
f1c25d3b 19960 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
19961 }
19962
19963 /* Fall back on generic machopic code. */
19964 return machopic_legitimize_pic_address (orig, mode, reg);
19965}
19966
c4e18b1c
GK
19967/* Output a .machine directive for the Darwin assembler, and call
19968 the generic start_file routine. */
19969
19970static void
19971rs6000_darwin_file_start (void)
19972{
94ff898d 19973 static const struct
c4e18b1c
GK
19974 {
19975 const char *arg;
19976 const char *name;
19977 int if_set;
19978 } mapping[] = {
55dbfb48 19979 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
19980 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
19981 { "power4", "ppc970", 0 },
19982 { "G5", "ppc970", 0 },
19983 { "7450", "ppc7450", 0 },
19984 { "7400", "ppc7400", MASK_ALTIVEC },
19985 { "G4", "ppc7400", 0 },
19986 { "750", "ppc750", 0 },
19987 { "740", "ppc750", 0 },
19988 { "G3", "ppc750", 0 },
19989 { "604e", "ppc604e", 0 },
19990 { "604", "ppc604", 0 },
19991 { "603e", "ppc603", 0 },
19992 { "603", "ppc603", 0 },
19993 { "601", "ppc601", 0 },
19994 { NULL, "ppc", 0 } };
19995 const char *cpu_id = "";
19996 size_t i;
94ff898d 19997
9390387d 19998 rs6000_file_start ();
192d0f89 19999 darwin_file_start ();
c4e18b1c
GK
20000
20001 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20002 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20003 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20004 && rs6000_select[i].string[0] != '\0')
20005 cpu_id = rs6000_select[i].string;
20006
20007 /* Look through the mapping array. Pick the first name that either
20008 matches the argument, has a bit set in IF_SET that is also set
20009 in the target flags, or has a NULL name. */
20010
20011 i = 0;
20012 while (mapping[i].arg != NULL
20013 && strcmp (mapping[i].arg, cpu_id) != 0
20014 && (mapping[i].if_set & target_flags) == 0)
20015 i++;
20016
20017 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20018}
20019
ee890fe2 20020#endif /* TARGET_MACHO */
7c262518
RH
20021
20022#if TARGET_ELF
9b580a0b
RH
20023static int
20024rs6000_elf_reloc_rw_mask (void)
7c262518 20025{
9b580a0b
RH
20026 if (flag_pic)
20027 return 3;
20028 else if (DEFAULT_ABI == ABI_AIX)
20029 return 2;
20030 else
20031 return 0;
7c262518 20032}
d9f6800d
RH
20033
20034/* Record an element in the table of global constructors. SYMBOL is
20035 a SYMBOL_REF of the function to be called; PRIORITY is a number
20036 between 0 and MAX_INIT_PRIORITY.
20037
20038 This differs from default_named_section_asm_out_constructor in
20039 that we have special handling for -mrelocatable. */
20040
20041static void
a2369ed3 20042rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20043{
20044 const char *section = ".ctors";
20045 char buf[16];
20046
20047 if (priority != DEFAULT_INIT_PRIORITY)
20048 {
20049 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20050 /* Invert the numbering so the linker puts us in the proper
20051 order; constructors are run from right to left, and the
20052 linker sorts in increasing order. */
20053 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20054 section = buf;
20055 }
20056
d6b5193b 20057 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20058 assemble_align (POINTER_SIZE);
d9f6800d
RH
20059
20060 if (TARGET_RELOCATABLE)
20061 {
20062 fputs ("\t.long (", asm_out_file);
20063 output_addr_const (asm_out_file, symbol);
20064 fputs (")@fixup\n", asm_out_file);
20065 }
20066 else
c8af3574 20067 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20068}
20069
20070static void
a2369ed3 20071rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20072{
20073 const char *section = ".dtors";
20074 char buf[16];
20075
20076 if (priority != DEFAULT_INIT_PRIORITY)
20077 {
20078 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20079 /* Invert the numbering so the linker puts us in the proper
20080 order; constructors are run from right to left, and the
20081 linker sorts in increasing order. */
20082 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20083 section = buf;
20084 }
20085
d6b5193b 20086 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20087 assemble_align (POINTER_SIZE);
d9f6800d
RH
20088
20089 if (TARGET_RELOCATABLE)
20090 {
20091 fputs ("\t.long (", asm_out_file);
20092 output_addr_const (asm_out_file, symbol);
20093 fputs (")@fixup\n", asm_out_file);
20094 }
20095 else
c8af3574 20096 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20097}
9739c90c
JJ
20098
20099void
a2369ed3 20100rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20101{
20102 if (TARGET_64BIT)
20103 {
20104 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20105 ASM_OUTPUT_LABEL (file, name);
20106 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20107 rs6000_output_function_entry (file, name);
20108 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20109 if (DOT_SYMBOLS)
9739c90c 20110 {
85b776df 20111 fputs ("\t.size\t", file);
9739c90c 20112 assemble_name (file, name);
85b776df
AM
20113 fputs (",24\n\t.type\t.", file);
20114 assemble_name (file, name);
20115 fputs (",@function\n", file);
20116 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20117 {
20118 fputs ("\t.globl\t.", file);
20119 assemble_name (file, name);
20120 putc ('\n', file);
20121 }
9739c90c 20122 }
85b776df
AM
20123 else
20124 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20125 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20126 rs6000_output_function_entry (file, name);
20127 fputs (":\n", file);
9739c90c
JJ
20128 return;
20129 }
20130
20131 if (TARGET_RELOCATABLE
7f970b70 20132 && !TARGET_SECURE_PLT
9739c90c 20133 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20134 && uses_TOC ())
9739c90c
JJ
20135 {
20136 char buf[256];
20137
20138 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20139
20140 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20141 fprintf (file, "\t.long ");
20142 assemble_name (file, buf);
20143 putc ('-', file);
20144 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20145 assemble_name (file, buf);
20146 putc ('\n', file);
20147 }
20148
20149 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20150 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20151
20152 if (DEFAULT_ABI == ABI_AIX)
20153 {
20154 const char *desc_name, *orig_name;
20155
20156 orig_name = (*targetm.strip_name_encoding) (name);
20157 desc_name = orig_name;
20158 while (*desc_name == '.')
20159 desc_name++;
20160
20161 if (TREE_PUBLIC (decl))
20162 fprintf (file, "\t.globl %s\n", desc_name);
20163
20164 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20165 fprintf (file, "%s:\n", desc_name);
20166 fprintf (file, "\t.long %s\n", orig_name);
20167 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20168 if (DEFAULT_ABI == ABI_AIX)
20169 fputs ("\t.long 0\n", file);
20170 fprintf (file, "\t.previous\n");
20171 }
20172 ASM_OUTPUT_LABEL (file, name);
20173}
1334b570
AM
20174
20175static void
20176rs6000_elf_end_indicate_exec_stack (void)
20177{
20178 if (TARGET_32BIT)
20179 file_end_indicate_exec_stack ();
20180}
7c262518
RH
20181#endif
20182
cbaaba19 20183#if TARGET_XCOFF
0d5817b2
DE
20184static void
20185rs6000_xcoff_asm_output_anchor (rtx symbol)
20186{
20187 char buffer[100];
20188
20189 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20190 SYMBOL_REF_BLOCK_OFFSET (symbol));
20191 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20192}
20193
7c262518 20194static void
a2369ed3 20195rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20196{
20197 fputs (GLOBAL_ASM_OP, stream);
20198 RS6000_OUTPUT_BASENAME (stream, name);
20199 putc ('\n', stream);
20200}
20201
d6b5193b
RS
20202/* A get_unnamed_decl callback, used for read-only sections. PTR
20203 points to the section string variable. */
20204
20205static void
20206rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20207{
890f9edf
OH
20208 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20209 *(const char *const *) directive,
20210 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20211}
20212
20213/* Likewise for read-write sections. */
20214
20215static void
20216rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20217{
890f9edf
OH
20218 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20219 *(const char *const *) directive,
20220 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20221}
20222
20223/* A get_unnamed_section callback, used for switching to toc_section. */
20224
20225static void
20226rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20227{
20228 if (TARGET_MINIMAL_TOC)
20229 {
20230 /* toc_section is always selected at least once from
20231 rs6000_xcoff_file_start, so this is guaranteed to
20232 always be defined once and only once in each file. */
20233 if (!toc_initialized)
20234 {
20235 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20236 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20237 toc_initialized = 1;
20238 }
20239 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20240 (TARGET_32BIT ? "" : ",3"));
20241 }
20242 else
20243 fputs ("\t.toc\n", asm_out_file);
20244}
20245
20246/* Implement TARGET_ASM_INIT_SECTIONS. */
20247
20248static void
20249rs6000_xcoff_asm_init_sections (void)
20250{
20251 read_only_data_section
20252 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20253 &xcoff_read_only_section_name);
20254
20255 private_data_section
20256 = get_unnamed_section (SECTION_WRITE,
20257 rs6000_xcoff_output_readwrite_section_asm_op,
20258 &xcoff_private_data_section_name);
20259
20260 read_only_private_data_section
20261 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20262 &xcoff_private_data_section_name);
20263
20264 toc_section
20265 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20266
20267 readonly_data_section = read_only_data_section;
20268 exception_section = data_section;
20269}
20270
9b580a0b
RH
20271static int
20272rs6000_xcoff_reloc_rw_mask (void)
20273{
20274 return 3;
20275}
20276
b275d088 20277static void
c18a5b6c
MM
20278rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20279 tree decl ATTRIBUTE_UNUSED)
7c262518 20280{
0e5dbd9b
DE
20281 int smclass;
20282 static const char * const suffix[3] = { "PR", "RO", "RW" };
20283
20284 if (flags & SECTION_CODE)
20285 smclass = 0;
20286 else if (flags & SECTION_WRITE)
20287 smclass = 2;
20288 else
20289 smclass = 1;
20290
5b5198f7 20291 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20292 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20293 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20294}
ae46c4e0 20295
d6b5193b 20296static section *
f676971a 20297rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20298 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20299{
9b580a0b 20300 if (decl_readonly_section (decl, reloc))
ae46c4e0 20301 {
0e5dbd9b 20302 if (TREE_PUBLIC (decl))
d6b5193b 20303 return read_only_data_section;
ae46c4e0 20304 else
d6b5193b 20305 return read_only_private_data_section;
ae46c4e0
RH
20306 }
20307 else
20308 {
0e5dbd9b 20309 if (TREE_PUBLIC (decl))
d6b5193b 20310 return data_section;
ae46c4e0 20311 else
d6b5193b 20312 return private_data_section;
ae46c4e0
RH
20313 }
20314}
20315
20316static void
a2369ed3 20317rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20318{
20319 const char *name;
ae46c4e0 20320
5b5198f7
DE
20321 /* Use select_section for private and uninitialized data. */
20322 if (!TREE_PUBLIC (decl)
20323 || DECL_COMMON (decl)
0e5dbd9b
DE
20324 || DECL_INITIAL (decl) == NULL_TREE
20325 || DECL_INITIAL (decl) == error_mark_node
20326 || (flag_zero_initialized_in_bss
20327 && initializer_zerop (DECL_INITIAL (decl))))
20328 return;
20329
20330 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20331 name = (*targetm.strip_name_encoding) (name);
20332 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20333}
b64a1b53 20334
fb49053f
RH
20335/* Select section for constant in constant pool.
20336
20337 On RS/6000, all constants are in the private read-only data area.
20338 However, if this is being placed in the TOC it must be output as a
20339 toc entry. */
20340
d6b5193b 20341static section *
f676971a 20342rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20343 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20344{
20345 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20346 return toc_section;
b64a1b53 20347 else
d6b5193b 20348 return read_only_private_data_section;
b64a1b53 20349}
772c5265
RH
20350
20351/* Remove any trailing [DS] or the like from the symbol name. */
20352
20353static const char *
a2369ed3 20354rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20355{
20356 size_t len;
20357 if (*name == '*')
20358 name++;
20359 len = strlen (name);
20360 if (name[len - 1] == ']')
20361 return ggc_alloc_string (name, len - 4);
20362 else
20363 return name;
20364}
20365
5add3202
DE
20366/* Section attributes. AIX is always PIC. */
20367
20368static unsigned int
a2369ed3 20369rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20370{
5b5198f7 20371 unsigned int align;
9b580a0b 20372 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20373
20374 /* Align to at least UNIT size. */
20375 if (flags & SECTION_CODE)
20376 align = MIN_UNITS_PER_WORD;
20377 else
20378 /* Increase alignment of large objects if not already stricter. */
20379 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20380 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20381 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20382
20383 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20384}
a5fe455b 20385
1bc7c5b6
ZW
20386/* Output at beginning of assembler file.
20387
20388 Initialize the section names for the RS/6000 at this point.
20389
20390 Specify filename, including full path, to assembler.
20391
20392 We want to go into the TOC section so at least one .toc will be emitted.
20393 Also, in order to output proper .bs/.es pairs, we need at least one static
20394 [RW] section emitted.
20395
20396 Finally, declare mcount when profiling to make the assembler happy. */
20397
20398static void
863d938c 20399rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20400{
20401 rs6000_gen_section_name (&xcoff_bss_section_name,
20402 main_input_filename, ".bss_");
20403 rs6000_gen_section_name (&xcoff_private_data_section_name,
20404 main_input_filename, ".rw_");
20405 rs6000_gen_section_name (&xcoff_read_only_section_name,
20406 main_input_filename, ".ro_");
20407
20408 fputs ("\t.file\t", asm_out_file);
20409 output_quoted_string (asm_out_file, main_input_filename);
20410 fputc ('\n', asm_out_file);
1bc7c5b6 20411 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20412 switch_to_section (private_data_section);
20413 switch_to_section (text_section);
1bc7c5b6
ZW
20414 if (profile_flag)
20415 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20416 rs6000_file_start ();
20417}
20418
a5fe455b
ZW
20419/* Output at end of assembler file.
20420 On the RS/6000, referencing data should automatically pull in text. */
20421
20422static void
863d938c 20423rs6000_xcoff_file_end (void)
a5fe455b 20424{
d6b5193b 20425 switch_to_section (text_section);
a5fe455b 20426 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20427 switch_to_section (data_section);
a5fe455b
ZW
20428 fputs (TARGET_32BIT
20429 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20430 asm_out_file);
20431}
f1384257 20432#endif /* TARGET_XCOFF */
0e5dbd9b 20433
3c50106f
RH
20434/* Compute a (partial) cost for rtx X. Return true if the complete
20435 cost has been computed, and false if subexpressions should be
20436 scanned. In either case, *TOTAL contains the cost result. */
20437
20438static bool
1494c534 20439rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20440{
f0517163
RS
20441 enum machine_mode mode = GET_MODE (x);
20442
3c50106f
RH
20443 switch (code)
20444 {
30a555d9 20445 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20446 case CONST_INT:
066cd967
DE
20447 if (((outer_code == SET
20448 || outer_code == PLUS
20449 || outer_code == MINUS)
279bb624
DE
20450 && (satisfies_constraint_I (x)
20451 || satisfies_constraint_L (x)))
066cd967 20452 || (outer_code == AND
279bb624
DE
20453 && (satisfies_constraint_K (x)
20454 || (mode == SImode
20455 ? satisfies_constraint_L (x)
20456 : satisfies_constraint_J (x))
1990cd79
AM
20457 || mask_operand (x, mode)
20458 || (mode == DImode
20459 && mask64_operand (x, DImode))))
22e54023 20460 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20461 && (satisfies_constraint_K (x)
20462 || (mode == SImode
20463 ? satisfies_constraint_L (x)
20464 : satisfies_constraint_J (x))))
066cd967
DE
20465 || outer_code == ASHIFT
20466 || outer_code == ASHIFTRT
20467 || outer_code == LSHIFTRT
20468 || outer_code == ROTATE
20469 || outer_code == ROTATERT
d5861a7a 20470 || outer_code == ZERO_EXTRACT
066cd967 20471 || (outer_code == MULT
279bb624 20472 && satisfies_constraint_I (x))
22e54023
DE
20473 || ((outer_code == DIV || outer_code == UDIV
20474 || outer_code == MOD || outer_code == UMOD)
20475 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20476 || (outer_code == COMPARE
279bb624
DE
20477 && (satisfies_constraint_I (x)
20478 || satisfies_constraint_K (x)))
22e54023 20479 || (outer_code == EQ
279bb624
DE
20480 && (satisfies_constraint_I (x)
20481 || satisfies_constraint_K (x)
20482 || (mode == SImode
20483 ? satisfies_constraint_L (x)
20484 : satisfies_constraint_J (x))))
22e54023 20485 || (outer_code == GTU
279bb624 20486 && satisfies_constraint_I (x))
22e54023 20487 || (outer_code == LTU
279bb624 20488 && satisfies_constraint_P (x)))
066cd967
DE
20489 {
20490 *total = 0;
20491 return true;
20492 }
20493 else if ((outer_code == PLUS
4ae234b0 20494 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20495 || (outer_code == MINUS
4ae234b0 20496 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20497 || ((outer_code == SET
20498 || outer_code == IOR
20499 || outer_code == XOR)
20500 && (INTVAL (x)
20501 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20502 {
20503 *total = COSTS_N_INSNS (1);
20504 return true;
20505 }
20506 /* FALLTHRU */
20507
20508 case CONST_DOUBLE:
f6fe3a22 20509 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20510 {
f6fe3a22
DE
20511 if ((outer_code == IOR || outer_code == XOR)
20512 && CONST_DOUBLE_HIGH (x) == 0
20513 && (CONST_DOUBLE_LOW (x)
20514 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20515 {
20516 *total = 0;
20517 return true;
20518 }
20519 else if ((outer_code == AND && and64_2_operand (x, DImode))
20520 || ((outer_code == SET
20521 || outer_code == IOR
20522 || outer_code == XOR)
20523 && CONST_DOUBLE_HIGH (x) == 0))
20524 {
20525 *total = COSTS_N_INSNS (1);
20526 return true;
20527 }
066cd967
DE
20528 }
20529 /* FALLTHRU */
20530
3c50106f 20531 case CONST:
066cd967 20532 case HIGH:
3c50106f 20533 case SYMBOL_REF:
066cd967
DE
20534 case MEM:
20535 /* When optimizing for size, MEM should be slightly more expensive
20536 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20537 L1 cache latency is about two instructions. */
066cd967 20538 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20539 return true;
20540
30a555d9
DE
20541 case LABEL_REF:
20542 *total = 0;
20543 return true;
20544
3c50106f 20545 case PLUS:
f0517163 20546 if (mode == DFmode)
066cd967
DE
20547 {
20548 if (GET_CODE (XEXP (x, 0)) == MULT)
20549 {
20550 /* FNMA accounted in outer NEG. */
20551 if (outer_code == NEG)
20552 *total = rs6000_cost->dmul - rs6000_cost->fp;
20553 else
20554 *total = rs6000_cost->dmul;
20555 }
20556 else
20557 *total = rs6000_cost->fp;
20558 }
f0517163 20559 else if (mode == SFmode)
066cd967
DE
20560 {
20561 /* FNMA accounted in outer NEG. */
20562 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20563 *total = 0;
20564 else
20565 *total = rs6000_cost->fp;
20566 }
f0517163 20567 else
066cd967
DE
20568 *total = COSTS_N_INSNS (1);
20569 return false;
3c50106f 20570
52190329 20571 case MINUS:
f0517163 20572 if (mode == DFmode)
066cd967 20573 {
762c919f
JM
20574 if (GET_CODE (XEXP (x, 0)) == MULT
20575 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20576 {
20577 /* FNMA accounted in outer NEG. */
20578 if (outer_code == NEG)
762c919f 20579 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20580 else
20581 *total = rs6000_cost->dmul;
20582 }
20583 else
20584 *total = rs6000_cost->fp;
20585 }
f0517163 20586 else if (mode == SFmode)
066cd967
DE
20587 {
20588 /* FNMA accounted in outer NEG. */
20589 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20590 *total = 0;
20591 else
20592 *total = rs6000_cost->fp;
20593 }
f0517163 20594 else
c4ad648e 20595 *total = COSTS_N_INSNS (1);
066cd967 20596 return false;
3c50106f
RH
20597
20598 case MULT:
c9dbf840 20599 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 20600 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 20601 {
8b897cfa
RS
20602 if (INTVAL (XEXP (x, 1)) >= -256
20603 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20604 *total = rs6000_cost->mulsi_const9;
8b897cfa 20605 else
06a67bdd 20606 *total = rs6000_cost->mulsi_const;
3c50106f 20607 }
066cd967
DE
20608 /* FMA accounted in outer PLUS/MINUS. */
20609 else if ((mode == DFmode || mode == SFmode)
20610 && (outer_code == PLUS || outer_code == MINUS))
20611 *total = 0;
f0517163 20612 else if (mode == DFmode)
06a67bdd 20613 *total = rs6000_cost->dmul;
f0517163 20614 else if (mode == SFmode)
06a67bdd 20615 *total = rs6000_cost->fp;
f0517163 20616 else if (mode == DImode)
06a67bdd 20617 *total = rs6000_cost->muldi;
8b897cfa 20618 else
06a67bdd 20619 *total = rs6000_cost->mulsi;
066cd967 20620 return false;
3c50106f
RH
20621
20622 case DIV:
20623 case MOD:
f0517163
RS
20624 if (FLOAT_MODE_P (mode))
20625 {
06a67bdd
RS
20626 *total = mode == DFmode ? rs6000_cost->ddiv
20627 : rs6000_cost->sdiv;
066cd967 20628 return false;
f0517163 20629 }
5efb1046 20630 /* FALLTHRU */
3c50106f
RH
20631
20632 case UDIV:
20633 case UMOD:
627b6fe2
DJ
20634 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20635 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20636 {
20637 if (code == DIV || code == MOD)
20638 /* Shift, addze */
20639 *total = COSTS_N_INSNS (2);
20640 else
20641 /* Shift */
20642 *total = COSTS_N_INSNS (1);
20643 }
c4ad648e 20644 else
627b6fe2
DJ
20645 {
20646 if (GET_MODE (XEXP (x, 1)) == DImode)
20647 *total = rs6000_cost->divdi;
20648 else
20649 *total = rs6000_cost->divsi;
20650 }
20651 /* Add in shift and subtract for MOD. */
20652 if (code == MOD || code == UMOD)
20653 *total += COSTS_N_INSNS (2);
066cd967 20654 return false;
3c50106f 20655
32f56aad 20656 case CTZ:
3c50106f
RH
20657 case FFS:
20658 *total = COSTS_N_INSNS (4);
066cd967 20659 return false;
3c50106f 20660
32f56aad
DE
20661 case POPCOUNT:
20662 *total = COSTS_N_INSNS (6);
20663 return false;
20664
06a67bdd 20665 case NOT:
066cd967
DE
20666 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20667 {
20668 *total = 0;
20669 return false;
20670 }
20671 /* FALLTHRU */
20672
20673 case AND:
32f56aad 20674 case CLZ:
066cd967
DE
20675 case IOR:
20676 case XOR:
d5861a7a
DE
20677 case ZERO_EXTRACT:
20678 *total = COSTS_N_INSNS (1);
20679 return false;
20680
066cd967
DE
20681 case ASHIFT:
20682 case ASHIFTRT:
20683 case LSHIFTRT:
20684 case ROTATE:
20685 case ROTATERT:
d5861a7a 20686 /* Handle mul_highpart. */
066cd967
DE
20687 if (outer_code == TRUNCATE
20688 && GET_CODE (XEXP (x, 0)) == MULT)
20689 {
20690 if (mode == DImode)
20691 *total = rs6000_cost->muldi;
20692 else
20693 *total = rs6000_cost->mulsi;
20694 return true;
20695 }
d5861a7a
DE
20696 else if (outer_code == AND)
20697 *total = 0;
20698 else
20699 *total = COSTS_N_INSNS (1);
20700 return false;
20701
20702 case SIGN_EXTEND:
20703 case ZERO_EXTEND:
20704 if (GET_CODE (XEXP (x, 0)) == MEM)
20705 *total = 0;
20706 else
20707 *total = COSTS_N_INSNS (1);
066cd967 20708 return false;
06a67bdd 20709
066cd967
DE
20710 case COMPARE:
20711 case NEG:
20712 case ABS:
20713 if (!FLOAT_MODE_P (mode))
20714 {
20715 *total = COSTS_N_INSNS (1);
20716 return false;
20717 }
20718 /* FALLTHRU */
20719
20720 case FLOAT:
20721 case UNSIGNED_FLOAT:
20722 case FIX:
20723 case UNSIGNED_FIX:
06a67bdd
RS
20724 case FLOAT_TRUNCATE:
20725 *total = rs6000_cost->fp;
066cd967 20726 return false;
06a67bdd 20727
a2af5043
DJ
20728 case FLOAT_EXTEND:
20729 if (mode == DFmode)
20730 *total = 0;
20731 else
20732 *total = rs6000_cost->fp;
20733 return false;
20734
06a67bdd
RS
20735 case UNSPEC:
20736 switch (XINT (x, 1))
20737 {
20738 case UNSPEC_FRSP:
20739 *total = rs6000_cost->fp;
20740 return true;
20741
20742 default:
20743 break;
20744 }
20745 break;
20746
20747 case CALL:
20748 case IF_THEN_ELSE:
20749 if (optimize_size)
20750 {
20751 *total = COSTS_N_INSNS (1);
20752 return true;
20753 }
066cd967
DE
20754 else if (FLOAT_MODE_P (mode)
20755 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20756 {
20757 *total = rs6000_cost->fp;
20758 return false;
20759 }
06a67bdd
RS
20760 break;
20761
c0600ecd
DE
20762 case EQ:
20763 case GTU:
20764 case LTU:
22e54023
DE
20765 /* Carry bit requires mode == Pmode.
20766 NEG or PLUS already counted so only add one. */
20767 if (mode == Pmode
20768 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 20769 {
22e54023
DE
20770 *total = COSTS_N_INSNS (1);
20771 return true;
20772 }
20773 if (outer_code == SET)
20774 {
20775 if (XEXP (x, 1) == const0_rtx)
c0600ecd 20776 {
22e54023 20777 *total = COSTS_N_INSNS (2);
c0600ecd 20778 return true;
c0600ecd 20779 }
22e54023
DE
20780 else if (mode == Pmode)
20781 {
20782 *total = COSTS_N_INSNS (3);
20783 return false;
20784 }
20785 }
20786 /* FALLTHRU */
20787
20788 case GT:
20789 case LT:
20790 case UNORDERED:
20791 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
20792 {
20793 *total = COSTS_N_INSNS (2);
20794 return true;
c0600ecd 20795 }
22e54023
DE
20796 /* CC COMPARE. */
20797 if (outer_code == COMPARE)
20798 {
20799 *total = 0;
20800 return true;
20801 }
20802 break;
c0600ecd 20803
3c50106f 20804 default:
06a67bdd 20805 break;
3c50106f 20806 }
06a67bdd
RS
20807
20808 return false;
3c50106f
RH
20809}
20810
34bb030a
DE
20811/* A C expression returning the cost of moving data from a register of class
20812 CLASS1 to one of CLASS2. */
20813
20814int
f676971a 20815rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 20816 enum reg_class from, enum reg_class to)
34bb030a
DE
20817{
20818 /* Moves from/to GENERAL_REGS. */
20819 if (reg_classes_intersect_p (to, GENERAL_REGS)
20820 || reg_classes_intersect_p (from, GENERAL_REGS))
20821 {
20822 if (! reg_classes_intersect_p (to, GENERAL_REGS))
20823 from = to;
20824
20825 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
20826 return (rs6000_memory_move_cost (mode, from, 0)
20827 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
20828
c4ad648e
AM
20829 /* It's more expensive to move CR_REGS than CR0_REGS because of the
20830 shift. */
34bb030a
DE
20831 else if (from == CR_REGS)
20832 return 4;
20833
20834 else
c4ad648e 20835 /* A move will cost one instruction per GPR moved. */
c8b622ff 20836 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
20837 }
20838
c4ad648e 20839 /* Moving between two similar registers is just one instruction. */
34bb030a 20840 else if (reg_classes_intersect_p (to, from))
7393f7f8 20841 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 20842
c4ad648e 20843 /* Everything else has to go through GENERAL_REGS. */
34bb030a 20844 else
f676971a 20845 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
20846 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
20847}
20848
20849/* A C expressions returning the cost of moving data of MODE from a register to
20850 or from memory. */
20851
20852int
f676971a 20853rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 20854 int in ATTRIBUTE_UNUSED)
34bb030a
DE
20855{
20856 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 20857 return 4 * hard_regno_nregs[0][mode];
34bb030a 20858 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 20859 return 4 * hard_regno_nregs[32][mode];
34bb030a 20860 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 20861 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
20862 else
20863 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
20864}
20865
ef765ea9
DE
20866/* Newton-Raphson approximation of single-precision floating point divide n/d.
20867 Assumes no trapping math and finite arguments. */
20868
20869void
20870rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
20871{
20872 rtx x0, e0, e1, y1, u0, v0, one;
20873
20874 x0 = gen_reg_rtx (SFmode);
20875 e0 = gen_reg_rtx (SFmode);
20876 e1 = gen_reg_rtx (SFmode);
20877 y1 = gen_reg_rtx (SFmode);
20878 u0 = gen_reg_rtx (SFmode);
20879 v0 = gen_reg_rtx (SFmode);
20880 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
20881
20882 /* x0 = 1./d estimate */
20883 emit_insn (gen_rtx_SET (VOIDmode, x0,
20884 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
20885 UNSPEC_FRES)));
20886 /* e0 = 1. - d * x0 */
20887 emit_insn (gen_rtx_SET (VOIDmode, e0,
20888 gen_rtx_MINUS (SFmode, one,
20889 gen_rtx_MULT (SFmode, d, x0))));
20890 /* e1 = e0 + e0 * e0 */
20891 emit_insn (gen_rtx_SET (VOIDmode, e1,
20892 gen_rtx_PLUS (SFmode,
20893 gen_rtx_MULT (SFmode, e0, e0), e0)));
20894 /* y1 = x0 + e1 * x0 */
20895 emit_insn (gen_rtx_SET (VOIDmode, y1,
20896 gen_rtx_PLUS (SFmode,
20897 gen_rtx_MULT (SFmode, e1, x0), x0)));
20898 /* u0 = n * y1 */
20899 emit_insn (gen_rtx_SET (VOIDmode, u0,
20900 gen_rtx_MULT (SFmode, n, y1)));
20901 /* v0 = n - d * u0 */
20902 emit_insn (gen_rtx_SET (VOIDmode, v0,
20903 gen_rtx_MINUS (SFmode, n,
20904 gen_rtx_MULT (SFmode, d, u0))));
20905 /* res = u0 + v0 * y1 */
20906 emit_insn (gen_rtx_SET (VOIDmode, res,
20907 gen_rtx_PLUS (SFmode,
20908 gen_rtx_MULT (SFmode, v0, y1), u0)));
20909}
20910
20911/* Newton-Raphson approximation of double-precision floating point divide n/d.
20912 Assumes no trapping math and finite arguments. */
20913
20914void
20915rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
20916{
20917 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
20918
20919 x0 = gen_reg_rtx (DFmode);
20920 e0 = gen_reg_rtx (DFmode);
20921 e1 = gen_reg_rtx (DFmode);
20922 e2 = gen_reg_rtx (DFmode);
20923 y1 = gen_reg_rtx (DFmode);
20924 y2 = gen_reg_rtx (DFmode);
20925 y3 = gen_reg_rtx (DFmode);
20926 u0 = gen_reg_rtx (DFmode);
20927 v0 = gen_reg_rtx (DFmode);
20928 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
20929
20930 /* x0 = 1./d estimate */
20931 emit_insn (gen_rtx_SET (VOIDmode, x0,
20932 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
20933 UNSPEC_FRES)));
20934 /* e0 = 1. - d * x0 */
20935 emit_insn (gen_rtx_SET (VOIDmode, e0,
20936 gen_rtx_MINUS (DFmode, one,
20937 gen_rtx_MULT (SFmode, d, x0))));
20938 /* y1 = x0 + e0 * x0 */
20939 emit_insn (gen_rtx_SET (VOIDmode, y1,
20940 gen_rtx_PLUS (DFmode,
20941 gen_rtx_MULT (DFmode, e0, x0), x0)));
20942 /* e1 = e0 * e0 */
20943 emit_insn (gen_rtx_SET (VOIDmode, e1,
20944 gen_rtx_MULT (DFmode, e0, e0)));
20945 /* y2 = y1 + e1 * y1 */
20946 emit_insn (gen_rtx_SET (VOIDmode, y2,
20947 gen_rtx_PLUS (DFmode,
20948 gen_rtx_MULT (DFmode, e1, y1), y1)));
20949 /* e2 = e1 * e1 */
20950 emit_insn (gen_rtx_SET (VOIDmode, e2,
20951 gen_rtx_MULT (DFmode, e1, e1)));
20952 /* y3 = y2 + e2 * y2 */
20953 emit_insn (gen_rtx_SET (VOIDmode, y3,
20954 gen_rtx_PLUS (DFmode,
20955 gen_rtx_MULT (DFmode, e2, y2), y2)));
20956 /* u0 = n * y3 */
20957 emit_insn (gen_rtx_SET (VOIDmode, u0,
20958 gen_rtx_MULT (DFmode, n, y3)));
20959 /* v0 = n - d * u0 */
20960 emit_insn (gen_rtx_SET (VOIDmode, v0,
20961 gen_rtx_MINUS (DFmode, n,
20962 gen_rtx_MULT (DFmode, d, u0))));
20963 /* res = u0 + v0 * y3 */
20964 emit_insn (gen_rtx_SET (VOIDmode, res,
20965 gen_rtx_PLUS (DFmode,
20966 gen_rtx_MULT (DFmode, v0, y3), u0)));
20967}
20968
565ef4ba
RS
20969
20970/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
20971 target, and SRC is the argument operand. */
20972
20973void
20974rs6000_emit_popcount (rtx dst, rtx src)
20975{
20976 enum machine_mode mode = GET_MODE (dst);
20977 rtx tmp1, tmp2;
20978
20979 tmp1 = gen_reg_rtx (mode);
20980
20981 if (mode == SImode)
20982 {
20983 emit_insn (gen_popcntbsi2 (tmp1, src));
20984 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
20985 NULL_RTX, 0);
20986 tmp2 = force_reg (SImode, tmp2);
20987 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
20988 }
20989 else
20990 {
20991 emit_insn (gen_popcntbdi2 (tmp1, src));
20992 tmp2 = expand_mult (DImode, tmp1,
20993 GEN_INT ((HOST_WIDE_INT)
20994 0x01010101 << 32 | 0x01010101),
20995 NULL_RTX, 0);
20996 tmp2 = force_reg (DImode, tmp2);
20997 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
20998 }
20999}
21000
21001
21002/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21003 target, and SRC is the argument operand. */
21004
21005void
21006rs6000_emit_parity (rtx dst, rtx src)
21007{
21008 enum machine_mode mode = GET_MODE (dst);
21009 rtx tmp;
21010
21011 tmp = gen_reg_rtx (mode);
21012 if (mode == SImode)
21013 {
21014 /* Is mult+shift >= shift+xor+shift+xor? */
21015 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21016 {
21017 rtx tmp1, tmp2, tmp3, tmp4;
21018
21019 tmp1 = gen_reg_rtx (SImode);
21020 emit_insn (gen_popcntbsi2 (tmp1, src));
21021
21022 tmp2 = gen_reg_rtx (SImode);
21023 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21024 tmp3 = gen_reg_rtx (SImode);
21025 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21026
21027 tmp4 = gen_reg_rtx (SImode);
21028 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21029 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21030 }
21031 else
21032 rs6000_emit_popcount (tmp, src);
21033 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21034 }
21035 else
21036 {
21037 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21038 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21039 {
21040 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21041
21042 tmp1 = gen_reg_rtx (DImode);
21043 emit_insn (gen_popcntbdi2 (tmp1, src));
21044
21045 tmp2 = gen_reg_rtx (DImode);
21046 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21047 tmp3 = gen_reg_rtx (DImode);
21048 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21049
21050 tmp4 = gen_reg_rtx (DImode);
21051 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21052 tmp5 = gen_reg_rtx (DImode);
21053 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21054
21055 tmp6 = gen_reg_rtx (DImode);
21056 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21057 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21058 }
21059 else
21060 rs6000_emit_popcount (tmp, src);
21061 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21062 }
21063}
21064
ded9bf77
AH
21065/* Return an RTX representing where to find the function value of a
21066 function returning MODE. */
21067static rtx
21068rs6000_complex_function_value (enum machine_mode mode)
21069{
21070 unsigned int regno;
21071 rtx r1, r2;
21072 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21073 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21074
18f63bfa
AH
21075 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21076 regno = FP_ARG_RETURN;
354ed18f
AH
21077 else
21078 {
18f63bfa 21079 regno = GP_ARG_RETURN;
ded9bf77 21080
18f63bfa
AH
21081 /* 32-bit is OK since it'll go in r3/r4. */
21082 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21083 return gen_rtx_REG (mode, regno);
21084 }
21085
18f63bfa
AH
21086 if (inner_bytes >= 8)
21087 return gen_rtx_REG (mode, regno);
21088
ded9bf77
AH
21089 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21090 const0_rtx);
21091 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21092 GEN_INT (inner_bytes));
ded9bf77
AH
21093 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21094}
21095
a6ebc39a
AH
21096/* Define how to find the value returned by a function.
21097 VALTYPE is the data type of the value (as a tree).
21098 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21099 otherwise, FUNC is 0.
21100
21101 On the SPE, both FPs and vectors are returned in r3.
21102
21103 On RS/6000 an integer value is in r3 and a floating-point value is in
21104 fp1, unless -msoft-float. */
21105
21106rtx
586de218 21107rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21108{
21109 enum machine_mode mode;
2a8fa26c 21110 unsigned int regno;
a6ebc39a 21111
594a51fe
SS
21112 /* Special handling for structs in darwin64. */
21113 if (rs6000_darwin64_abi
21114 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21115 && TREE_CODE (valtype) == RECORD_TYPE
21116 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21117 {
21118 CUMULATIVE_ARGS valcum;
21119 rtx valret;
21120
0b5383eb 21121 valcum.words = 0;
594a51fe
SS
21122 valcum.fregno = FP_ARG_MIN_REG;
21123 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21124 /* Do a trial code generation as if this were going to be passed as
21125 an argument; if any part goes in memory, we return NULL. */
21126 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21127 if (valret)
21128 return valret;
21129 /* Otherwise fall through to standard ABI rules. */
21130 }
21131
0e67400a
FJ
21132 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21133 {
21134 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21135 return gen_rtx_PARALLEL (DImode,
21136 gen_rtvec (2,
21137 gen_rtx_EXPR_LIST (VOIDmode,
21138 gen_rtx_REG (SImode, GP_ARG_RETURN),
21139 const0_rtx),
21140 gen_rtx_EXPR_LIST (VOIDmode,
21141 gen_rtx_REG (SImode,
21142 GP_ARG_RETURN + 1),
21143 GEN_INT (4))));
21144 }
0f086e42
FJ
21145 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21146 {
21147 return gen_rtx_PARALLEL (DCmode,
21148 gen_rtvec (4,
21149 gen_rtx_EXPR_LIST (VOIDmode,
21150 gen_rtx_REG (SImode, GP_ARG_RETURN),
21151 const0_rtx),
21152 gen_rtx_EXPR_LIST (VOIDmode,
21153 gen_rtx_REG (SImode,
21154 GP_ARG_RETURN + 1),
21155 GEN_INT (4)),
21156 gen_rtx_EXPR_LIST (VOIDmode,
21157 gen_rtx_REG (SImode,
21158 GP_ARG_RETURN + 2),
21159 GEN_INT (8)),
21160 gen_rtx_EXPR_LIST (VOIDmode,
21161 gen_rtx_REG (SImode,
21162 GP_ARG_RETURN + 3),
21163 GEN_INT (12))));
21164 }
602ea4d3 21165
7348aa7f
FXC
21166 mode = TYPE_MODE (valtype);
21167 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21168 || POINTER_TYPE_P (valtype))
b78d48dd 21169 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21170
00b79d54 21171 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21172 {
21173 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21174 {
21175 switch (mode)
21176 {
21177 default:
21178 gcc_unreachable ();
21179 case SDmode:
21180 regno = GP_ARG_RETURN;
21181 break;
21182 case DDmode:
21183 regno = FP_ARG_RETURN;
21184 break;
21185 case TDmode:
21186 /* Use f2:f3 specified by the ABI. */
21187 regno = FP_ARG_RETURN + 1;
21188 break;
21189 }
21190 }
21191 else
21192 regno = GP_ARG_RETURN;
21193 }
00b79d54 21194 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21195 regno = FP_ARG_RETURN;
ded9bf77 21196 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21197 && targetm.calls.split_complex_arg)
ded9bf77 21198 return rs6000_complex_function_value (mode);
44688022 21199 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21200 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21201 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21202 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21203 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
21204 && (mode == DFmode || mode == DCmode
21205 || mode == TFmode || mode == TCmode))
18f63bfa 21206 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21207 else
21208 regno = GP_ARG_RETURN;
21209
21210 return gen_rtx_REG (mode, regno);
21211}
21212
ded9bf77
AH
21213/* Define how to find the value returned by a library function
21214 assuming the value has mode MODE. */
21215rtx
21216rs6000_libcall_value (enum machine_mode mode)
21217{
21218 unsigned int regno;
21219
2e6c9641
FJ
21220 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21221 {
21222 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21223 return gen_rtx_PARALLEL (DImode,
21224 gen_rtvec (2,
21225 gen_rtx_EXPR_LIST (VOIDmode,
21226 gen_rtx_REG (SImode, GP_ARG_RETURN),
21227 const0_rtx),
21228 gen_rtx_EXPR_LIST (VOIDmode,
21229 gen_rtx_REG (SImode,
21230 GP_ARG_RETURN + 1),
21231 GEN_INT (4))));
21232 }
21233
00b79d54 21234 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21235 {
21236 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21237 {
21238 switch (mode)
21239 {
21240 default:
21241 gcc_unreachable ();
21242 case SDmode:
21243 regno = GP_ARG_RETURN;
21244 break;
21245 case DDmode:
21246 regno = FP_ARG_RETURN;
21247 break;
21248 case TDmode:
21249 /* Use f2:f3 specified by the ABI. */
21250 regno = FP_ARG_RETURN + 1;
21251 break;
21252 }
21253 }
21254 else
21255 regno = GP_ARG_RETURN;
21256 }
00b79d54 21257 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21258 && TARGET_HARD_FLOAT && TARGET_FPRS)
21259 regno = FP_ARG_RETURN;
44688022
AM
21260 else if (ALTIVEC_VECTOR_MODE (mode)
21261 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21262 regno = ALTIVEC_ARG_RETURN;
42ba5130 21263 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21264 return rs6000_complex_function_value (mode);
18f63bfa 21265 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
21266 && (mode == DFmode || mode == DCmode
21267 || mode == TFmode || mode == TCmode))
18f63bfa 21268 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21269 else
21270 regno = GP_ARG_RETURN;
21271
21272 return gen_rtx_REG (mode, regno);
21273}
21274
d1d0c603
JJ
21275/* Define the offset between two registers, FROM to be eliminated and its
21276 replacement TO, at the start of a routine. */
21277HOST_WIDE_INT
21278rs6000_initial_elimination_offset (int from, int to)
21279{
21280 rs6000_stack_t *info = rs6000_stack_info ();
21281 HOST_WIDE_INT offset;
21282
7d5175e1 21283 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21284 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21285 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21286 {
21287 offset = info->push_p ? 0 : -info->total_size;
21288 if (FRAME_GROWS_DOWNWARD)
5b667039 21289 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21290 }
21291 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21292 offset = FRAME_GROWS_DOWNWARD
5b667039 21293 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21294 : 0;
21295 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21296 offset = info->total_size;
21297 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21298 offset = info->push_p ? info->total_size : 0;
21299 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21300 offset = 0;
21301 else
37409796 21302 gcc_unreachable ();
d1d0c603
JJ
21303
21304 return offset;
21305}
21306
58646b77 21307/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21308
c8e4f0e9 21309static bool
3101faab 21310rs6000_is_opaque_type (const_tree type)
62e1dfcf 21311{
58646b77 21312 return (type == opaque_V2SI_type_node
2abe3e28 21313 || type == opaque_V2SF_type_node
58646b77
PB
21314 || type == opaque_p_V2SI_type_node
21315 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21316}
21317
96714395 21318static rtx
a2369ed3 21319rs6000_dwarf_register_span (rtx reg)
96714395
AH
21320{
21321 unsigned regno;
21322
4d4cbc0e
AH
21323 if (TARGET_SPE
21324 && (SPE_VECTOR_MODE (GET_MODE (reg))
21325 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
21326 ;
21327 else
96714395
AH
21328 return NULL_RTX;
21329
21330 regno = REGNO (reg);
21331
21332 /* The duality of the SPE register size wreaks all kinds of havoc.
21333 This is a way of distinguishing r0 in 32-bits from r0 in
21334 64-bits. */
21335 return
21336 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21337 BYTES_BIG_ENDIAN
21338 ? gen_rtvec (2,
21339 gen_rtx_REG (SImode, regno + 1200),
21340 gen_rtx_REG (SImode, regno))
21341 : gen_rtvec (2,
21342 gen_rtx_REG (SImode, regno),
21343 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21344}
21345
37ea0b7e
JM
21346/* Fill in sizes for SPE register high parts in table used by unwinder. */
21347
21348static void
21349rs6000_init_dwarf_reg_sizes_extra (tree address)
21350{
21351 if (TARGET_SPE)
21352 {
21353 int i;
21354 enum machine_mode mode = TYPE_MODE (char_type_node);
21355 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21356 rtx mem = gen_rtx_MEM (BLKmode, addr);
21357 rtx value = gen_int_mode (4, mode);
21358
21359 for (i = 1201; i < 1232; i++)
21360 {
21361 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21362 HOST_WIDE_INT offset
21363 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21364
21365 emit_move_insn (adjust_address (mem, mode, offset), value);
21366 }
21367 }
21368}
21369
93c9d1ba
AM
21370/* Map internal gcc register numbers to DWARF2 register numbers. */
21371
21372unsigned int
21373rs6000_dbx_register_number (unsigned int regno)
21374{
21375 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21376 return regno;
21377 if (regno == MQ_REGNO)
21378 return 100;
1de43f85 21379 if (regno == LR_REGNO)
93c9d1ba 21380 return 108;
1de43f85 21381 if (regno == CTR_REGNO)
93c9d1ba
AM
21382 return 109;
21383 if (CR_REGNO_P (regno))
21384 return regno - CR0_REGNO + 86;
21385 if (regno == XER_REGNO)
21386 return 101;
21387 if (ALTIVEC_REGNO_P (regno))
21388 return regno - FIRST_ALTIVEC_REGNO + 1124;
21389 if (regno == VRSAVE_REGNO)
21390 return 356;
21391 if (regno == VSCR_REGNO)
21392 return 67;
21393 if (regno == SPE_ACC_REGNO)
21394 return 99;
21395 if (regno == SPEFSCR_REGNO)
21396 return 612;
21397 /* SPE high reg number. We get these values of regno from
21398 rs6000_dwarf_register_span. */
37409796
NS
21399 gcc_assert (regno >= 1200 && regno < 1232);
21400 return regno;
93c9d1ba
AM
21401}
21402
93f90be6 21403/* target hook eh_return_filter_mode */
f676971a 21404static enum machine_mode
93f90be6
FJ
21405rs6000_eh_return_filter_mode (void)
21406{
21407 return TARGET_32BIT ? SImode : word_mode;
21408}
21409
00b79d54
BE
21410/* Target hook for scalar_mode_supported_p. */
21411static bool
21412rs6000_scalar_mode_supported_p (enum machine_mode mode)
21413{
21414 if (DECIMAL_FLOAT_MODE_P (mode))
21415 return true;
21416 else
21417 return default_scalar_mode_supported_p (mode);
21418}
21419
f676971a
EC
21420/* Target hook for vector_mode_supported_p. */
21421static bool
21422rs6000_vector_mode_supported_p (enum machine_mode mode)
21423{
21424
96038623
DE
21425 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21426 return true;
21427
f676971a
EC
21428 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21429 return true;
21430
21431 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21432 return true;
21433
21434 else
21435 return false;
21436}
21437
bb8df8a6
EC
21438/* Target hook for invalid_arg_for_unprototyped_fn. */
21439static const char *
3101faab 21440invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21441{
21442 return (!rs6000_darwin64_abi
21443 && typelist == 0
21444 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21445 && (funcdecl == NULL_TREE
21446 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21447 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21448 ? N_("AltiVec argument passed to unprototyped function")
21449 : NULL;
21450}
21451
3aebbe5f
JJ
21452/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21453 setup by using __stack_chk_fail_local hidden function instead of
21454 calling __stack_chk_fail directly. Otherwise it is better to call
21455 __stack_chk_fail directly. */
21456
21457static tree
21458rs6000_stack_protect_fail (void)
21459{
21460 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21461 ? default_hidden_stack_protect_fail ()
21462 : default_external_stack_protect_fail ();
21463}
21464
17211ab5 21465#include "gt-rs6000.h"