]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR c++/34965 (Broken diagnostic: 'truth_and_expr' not supported by dump_expr)
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
279bb624 58#include "tm-constrs.h"
1bc7c5b6
ZW
59#if TARGET_XCOFF
60#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61#endif
93a27b7b
ZW
62#if TARGET_MACHO
63#include "gstab.h" /* for N_SLINE */
64#endif
9b30bae2 65
7509c759
MM
66#ifndef TARGET_NO_PROTOTYPE
67#define TARGET_NO_PROTOTYPE 0
68#endif
69
9878760c
RK
70#define min(A,B) ((A) < (B) ? (A) : (B))
71#define max(A,B) ((A) > (B) ? (A) : (B))
72
d1d0c603
JJ
73/* Structure used to define the rs6000 stack */
74typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
d1d0c603
JJ
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
111} rs6000_stack_t;
112
5b667039
JJ
113/* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */
115typedef struct machine_function GTY(())
116{
117 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
118 int ra_needs_full_frame;
119 /* Some local-dynamic symbol. */
120 const char *some_ld_name;
121 /* Whether the instruction chain has been scanned already. */
122 int insn_chain_scanned_p;
123 /* Flags if __builtin_return_address (0) was used. */
124 int ra_need_lr;
125 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126 varargs save area. */
127 HOST_WIDE_INT varargs_save_offset;
128} machine_function;
129
5248c961
RK
130/* Target cpu type */
131
132enum processor_type rs6000_cpu;
8e3f41e7
MM
133struct rs6000_cpu_select rs6000_select[3] =
134{
815cdc52
MM
135 /* switch name, tune arch */
136 { (const char *)0, "--with-cpu=", 1, 1 },
137 { (const char *)0, "-mcpu=", 1, 1 },
138 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 139};
5248c961 140
d296e02e
AP
141static GTY(()) bool rs6000_cell_dont_microcode;
142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
44cd321e
PS
149/* Align branch targets. */
150static GTY(()) bool rs6000_align_branch_targets;
151
569fa502
DN
152/* Support for -msched-costly-dep option. */
153const char *rs6000_sched_costly_dep_str;
154enum rs6000_dependence_cost rs6000_sched_costly_dep;
155
cbe26ab8
DN
156/* Support for -minsert-sched-nops option. */
157const char *rs6000_sched_insert_nops_str;
158enum rs6000_nop_insertion rs6000_sched_insert_nops;
159
7ccf35ed 160/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 161static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 162
602ea4d3 163/* Size of long double. */
6fa3f289
ZW
164int rs6000_long_double_type_size;
165
602ea4d3
JJ
166/* IEEE quad extended precision long double. */
167int rs6000_ieeequad;
168
169/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
170int rs6000_altivec_abi;
171
a3170dc6
AH
172/* Nonzero if we want SPE ABI extensions. */
173int rs6000_spe_abi;
174
5da702b1
AH
175/* Nonzero if floating point operations are done in the GPRs. */
176int rs6000_float_gprs = 0;
177
594a51fe
SS
178/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
179int rs6000_darwin64_abi;
180
a0ab749a 181/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 182static GTY(()) int common_mode_defined;
c81bebd7 183
9878760c
RK
184/* Save information from a "cmpxx" operation until the branch or scc is
185 emitted. */
9878760c
RK
186rtx rs6000_compare_op0, rs6000_compare_op1;
187int rs6000_compare_fp_p;
874a0744 188
874a0744
MM
189/* Label number of label created for -mrelocatable, to call to so we can
190 get the address of the GOT section */
191int rs6000_pic_labelno;
c81bebd7 192
b91da81f 193#ifdef USING_ELFOS_H
c81bebd7 194/* Which abi to adhere to */
9739c90c 195const char *rs6000_abi_name;
d9407988
MM
196
197/* Semantics of the small data area */
198enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199
200/* Which small data model to use */
815cdc52 201const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
202
203/* Counter for labels which are to be placed in .fixup. */
204int fixuplabelno = 0;
874a0744 205#endif
4697a36c 206
c4501e62
JJ
207/* Bit size of immediate TLS offsets and string from which it is decoded. */
208int rs6000_tls_size = 32;
209const char *rs6000_tls_size_string;
210
b6c9286a
MM
211/* ABI enumeration available for subtarget to use. */
212enum rs6000_abi rs6000_current_abi;
213
85b776df
AM
214/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
215int dot_symbols;
216
38c1f2d7 217/* Debug flags */
815cdc52 218const char *rs6000_debug_name;
38c1f2d7
MM
219int rs6000_debug_stack; /* debug stack applications */
220int rs6000_debug_arg; /* debug argument handling */
221
aabcd309 222/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
223bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
224
58646b77
PB
225/* Built in types. */
226
227tree rs6000_builtin_types[RS6000_BTI_MAX];
228tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 229
57ac7be9
AM
230const char *rs6000_traceback_name;
231static enum {
232 traceback_default = 0,
233 traceback_none,
234 traceback_part,
235 traceback_full
236} rs6000_traceback;
237
38c1f2d7
MM
238/* Flag to say the TOC is initialized */
239int toc_initialized;
9ebbca7d 240char toc_label_name[10];
38c1f2d7 241
44cd321e
PS
242/* Cached value of rs6000_variable_issue. This is cached in
243 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
244static short cached_can_issue_more;
245
d6b5193b
RS
246static GTY(()) section *read_only_data_section;
247static GTY(()) section *private_data_section;
248static GTY(()) section *read_only_private_data_section;
249static GTY(()) section *sdata2_section;
250static GTY(()) section *toc_section;
251
a3c9585f
KH
252/* Control alignment for fields within structures. */
253/* String from -malign-XXXXX. */
025d9908
KH
254int rs6000_alignment_flags;
255
78f5898b
AH
256/* True for any options that were explicitly set. */
257struct {
df01da37 258 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 259 bool alignment; /* True if -malign- was used. */
d3603e8c 260 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
261 bool spe; /* True if -mspe= was used. */
262 bool float_gprs; /* True if -mfloat-gprs= was used. */
263 bool isel; /* True if -misel was used. */
264 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 265 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
266} rs6000_explicit_options;
267
a3170dc6
AH
268struct builtin_description
269{
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
273 unsigned int mask;
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
277};
8b897cfa
RS
278\f
279/* Target cpu costs. */
280
281struct processor_costs {
c4ad648e 282 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
283 const int mulsi_const; /* cost of SImode multiplication by constant. */
284 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
285 const int muldi; /* cost of DImode multiplication. */
286 const int divsi; /* cost of SImode division. */
287 const int divdi; /* cost of DImode division. */
288 const int fp; /* cost of simple SFmode and DFmode insns. */
289 const int dmul; /* cost of DFmode multiplication (and fmadd). */
290 const int sdiv; /* cost of SFmode division (fdivs). */
291 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
292 const int cache_line_size; /* cache line size in bytes. */
293 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
294 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
295 const int simultaneous_prefetches; /* number of parallel prefetch
296 operations. */
8b897cfa
RS
297};
298
299const struct processor_costs *rs6000_cost;
300
301/* Processor costs (relative to an add) */
302
303/* Instruction size costs on 32bit processors. */
304static const
305struct processor_costs size32_cost = {
06a67bdd
RS
306 COSTS_N_INSNS (1), /* mulsi */
307 COSTS_N_INSNS (1), /* mulsi_const */
308 COSTS_N_INSNS (1), /* mulsi_const9 */
309 COSTS_N_INSNS (1), /* muldi */
310 COSTS_N_INSNS (1), /* divsi */
311 COSTS_N_INSNS (1), /* divdi */
312 COSTS_N_INSNS (1), /* fp */
313 COSTS_N_INSNS (1), /* dmul */
314 COSTS_N_INSNS (1), /* sdiv */
315 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
316 32,
317 0,
318 0,
5f732aba 319 0,
8b897cfa
RS
320};
321
322/* Instruction size costs on 64bit processors. */
323static const
324struct processor_costs size64_cost = {
06a67bdd
RS
325 COSTS_N_INSNS (1), /* mulsi */
326 COSTS_N_INSNS (1), /* mulsi_const */
327 COSTS_N_INSNS (1), /* mulsi_const9 */
328 COSTS_N_INSNS (1), /* muldi */
329 COSTS_N_INSNS (1), /* divsi */
330 COSTS_N_INSNS (1), /* divdi */
331 COSTS_N_INSNS (1), /* fp */
332 COSTS_N_INSNS (1), /* dmul */
333 COSTS_N_INSNS (1), /* sdiv */
334 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
335 128,
336 0,
337 0,
5f732aba 338 0,
8b897cfa
RS
339};
340
341/* Instruction costs on RIOS1 processors. */
342static const
343struct processor_costs rios1_cost = {
06a67bdd
RS
344 COSTS_N_INSNS (5), /* mulsi */
345 COSTS_N_INSNS (4), /* mulsi_const */
346 COSTS_N_INSNS (3), /* mulsi_const9 */
347 COSTS_N_INSNS (5), /* muldi */
348 COSTS_N_INSNS (19), /* divsi */
349 COSTS_N_INSNS (19), /* divdi */
350 COSTS_N_INSNS (2), /* fp */
351 COSTS_N_INSNS (2), /* dmul */
352 COSTS_N_INSNS (19), /* sdiv */
353 COSTS_N_INSNS (19), /* ddiv */
5f732aba
DE
354 128,
355 64, /* l1 cache */
356 512, /* l2 cache */
0b11da67 357 0, /* streams */
8b897cfa
RS
358};
359
360/* Instruction costs on RIOS2 processors. */
361static const
362struct processor_costs rios2_cost = {
06a67bdd
RS
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (13), /* divsi */
368 COSTS_N_INSNS (13), /* divdi */
369 COSTS_N_INSNS (2), /* fp */
370 COSTS_N_INSNS (2), /* dmul */
371 COSTS_N_INSNS (17), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
373 256,
374 256, /* l1 cache */
375 1024, /* l2 cache */
0b11da67 376 0, /* streams */
8b897cfa
RS
377};
378
379/* Instruction costs on RS64A processors. */
380static const
381struct processor_costs rs64a_cost = {
06a67bdd
RS
382 COSTS_N_INSNS (20), /* mulsi */
383 COSTS_N_INSNS (12), /* mulsi_const */
384 COSTS_N_INSNS (8), /* mulsi_const9 */
385 COSTS_N_INSNS (34), /* muldi */
386 COSTS_N_INSNS (65), /* divsi */
387 COSTS_N_INSNS (67), /* divdi */
388 COSTS_N_INSNS (4), /* fp */
389 COSTS_N_INSNS (4), /* dmul */
390 COSTS_N_INSNS (31), /* sdiv */
391 COSTS_N_INSNS (31), /* ddiv */
0b11da67 392 128,
5f732aba
DE
393 128, /* l1 cache */
394 2048, /* l2 cache */
0b11da67 395 1, /* streams */
8b897cfa
RS
396};
397
398/* Instruction costs on MPCCORE processors. */
399static const
400struct processor_costs mpccore_cost = {
06a67bdd
RS
401 COSTS_N_INSNS (2), /* mulsi */
402 COSTS_N_INSNS (2), /* mulsi_const */
403 COSTS_N_INSNS (2), /* mulsi_const9 */
404 COSTS_N_INSNS (2), /* muldi */
405 COSTS_N_INSNS (6), /* divsi */
406 COSTS_N_INSNS (6), /* divdi */
407 COSTS_N_INSNS (4), /* fp */
408 COSTS_N_INSNS (5), /* dmul */
409 COSTS_N_INSNS (10), /* sdiv */
410 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
411 32,
412 4, /* l1 cache */
413 16, /* l2 cache */
0b11da67 414 1, /* streams */
8b897cfa
RS
415};
416
417/* Instruction costs on PPC403 processors. */
418static const
419struct processor_costs ppc403_cost = {
06a67bdd
RS
420 COSTS_N_INSNS (4), /* mulsi */
421 COSTS_N_INSNS (4), /* mulsi_const */
422 COSTS_N_INSNS (4), /* mulsi_const9 */
423 COSTS_N_INSNS (4), /* muldi */
424 COSTS_N_INSNS (33), /* divsi */
425 COSTS_N_INSNS (33), /* divdi */
426 COSTS_N_INSNS (11), /* fp */
427 COSTS_N_INSNS (11), /* dmul */
428 COSTS_N_INSNS (11), /* sdiv */
429 COSTS_N_INSNS (11), /* ddiv */
0b11da67 430 32,
5f732aba
DE
431 4, /* l1 cache */
432 16, /* l2 cache */
0b11da67 433 1, /* streams */
8b897cfa
RS
434};
435
436/* Instruction costs on PPC405 processors. */
437static const
438struct processor_costs ppc405_cost = {
06a67bdd
RS
439 COSTS_N_INSNS (5), /* mulsi */
440 COSTS_N_INSNS (4), /* mulsi_const */
441 COSTS_N_INSNS (3), /* mulsi_const9 */
442 COSTS_N_INSNS (5), /* muldi */
443 COSTS_N_INSNS (35), /* divsi */
444 COSTS_N_INSNS (35), /* divdi */
445 COSTS_N_INSNS (11), /* fp */
446 COSTS_N_INSNS (11), /* dmul */
447 COSTS_N_INSNS (11), /* sdiv */
448 COSTS_N_INSNS (11), /* ddiv */
0b11da67 449 32,
5f732aba
DE
450 16, /* l1 cache */
451 128, /* l2 cache */
0b11da67 452 1, /* streams */
8b897cfa
RS
453};
454
455/* Instruction costs on PPC440 processors. */
456static const
457struct processor_costs ppc440_cost = {
06a67bdd
RS
458 COSTS_N_INSNS (3), /* mulsi */
459 COSTS_N_INSNS (2), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (3), /* muldi */
462 COSTS_N_INSNS (34), /* divsi */
463 COSTS_N_INSNS (34), /* divdi */
464 COSTS_N_INSNS (5), /* fp */
465 COSTS_N_INSNS (5), /* dmul */
466 COSTS_N_INSNS (19), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
0b11da67 468 32,
5f732aba
DE
469 32, /* l1 cache */
470 256, /* l2 cache */
0b11da67 471 1, /* streams */
8b897cfa
RS
472};
473
474/* Instruction costs on PPC601 processors. */
475static const
476struct processor_costs ppc601_cost = {
06a67bdd
RS
477 COSTS_N_INSNS (5), /* mulsi */
478 COSTS_N_INSNS (5), /* mulsi_const */
479 COSTS_N_INSNS (5), /* mulsi_const9 */
480 COSTS_N_INSNS (5), /* muldi */
481 COSTS_N_INSNS (36), /* divsi */
482 COSTS_N_INSNS (36), /* divdi */
483 COSTS_N_INSNS (4), /* fp */
484 COSTS_N_INSNS (5), /* dmul */
485 COSTS_N_INSNS (17), /* sdiv */
486 COSTS_N_INSNS (31), /* ddiv */
0b11da67 487 32,
5f732aba
DE
488 32, /* l1 cache */
489 256, /* l2 cache */
0b11da67 490 1, /* streams */
8b897cfa
RS
491};
492
493/* Instruction costs on PPC603 processors. */
494static const
495struct processor_costs ppc603_cost = {
06a67bdd
RS
496 COSTS_N_INSNS (5), /* mulsi */
497 COSTS_N_INSNS (3), /* mulsi_const */
498 COSTS_N_INSNS (2), /* mulsi_const9 */
499 COSTS_N_INSNS (5), /* muldi */
500 COSTS_N_INSNS (37), /* divsi */
501 COSTS_N_INSNS (37), /* divdi */
502 COSTS_N_INSNS (3), /* fp */
503 COSTS_N_INSNS (4), /* dmul */
504 COSTS_N_INSNS (18), /* sdiv */
505 COSTS_N_INSNS (33), /* ddiv */
0b11da67 506 32,
5f732aba
DE
507 8, /* l1 cache */
508 64, /* l2 cache */
0b11da67 509 1, /* streams */
8b897cfa
RS
510};
511
512/* Instruction costs on PPC604 processors. */
513static const
514struct processor_costs ppc604_cost = {
06a67bdd
RS
515 COSTS_N_INSNS (4), /* mulsi */
516 COSTS_N_INSNS (4), /* mulsi_const */
517 COSTS_N_INSNS (4), /* mulsi_const9 */
518 COSTS_N_INSNS (4), /* muldi */
519 COSTS_N_INSNS (20), /* divsi */
520 COSTS_N_INSNS (20), /* divdi */
521 COSTS_N_INSNS (3), /* fp */
522 COSTS_N_INSNS (3), /* dmul */
523 COSTS_N_INSNS (18), /* sdiv */
524 COSTS_N_INSNS (32), /* ddiv */
0b11da67 525 32,
5f732aba
DE
526 16, /* l1 cache */
527 512, /* l2 cache */
0b11da67 528 1, /* streams */
8b897cfa
RS
529};
530
531/* Instruction costs on PPC604e processors. */
532static const
533struct processor_costs ppc604e_cost = {
06a67bdd
RS
534 COSTS_N_INSNS (2), /* mulsi */
535 COSTS_N_INSNS (2), /* mulsi_const */
536 COSTS_N_INSNS (2), /* mulsi_const9 */
537 COSTS_N_INSNS (2), /* muldi */
538 COSTS_N_INSNS (20), /* divsi */
539 COSTS_N_INSNS (20), /* divdi */
540 COSTS_N_INSNS (3), /* fp */
541 COSTS_N_INSNS (3), /* dmul */
542 COSTS_N_INSNS (18), /* sdiv */
543 COSTS_N_INSNS (32), /* ddiv */
0b11da67 544 32,
5f732aba
DE
545 32, /* l1 cache */
546 1024, /* l2 cache */
0b11da67 547 1, /* streams */
8b897cfa
RS
548};
549
f0517163 550/* Instruction costs on PPC620 processors. */
8b897cfa
RS
551static const
552struct processor_costs ppc620_cost = {
06a67bdd
RS
553 COSTS_N_INSNS (5), /* mulsi */
554 COSTS_N_INSNS (4), /* mulsi_const */
555 COSTS_N_INSNS (3), /* mulsi_const9 */
556 COSTS_N_INSNS (7), /* muldi */
557 COSTS_N_INSNS (21), /* divsi */
558 COSTS_N_INSNS (37), /* divdi */
559 COSTS_N_INSNS (3), /* fp */
560 COSTS_N_INSNS (3), /* dmul */
561 COSTS_N_INSNS (18), /* sdiv */
562 COSTS_N_INSNS (32), /* ddiv */
0b11da67 563 128,
5f732aba
DE
564 32, /* l1 cache */
565 1024, /* l2 cache */
0b11da67 566 1, /* streams */
f0517163
RS
567};
568
569/* Instruction costs on PPC630 processors. */
570static const
571struct processor_costs ppc630_cost = {
06a67bdd
RS
572 COSTS_N_INSNS (5), /* mulsi */
573 COSTS_N_INSNS (4), /* mulsi_const */
574 COSTS_N_INSNS (3), /* mulsi_const9 */
575 COSTS_N_INSNS (7), /* muldi */
576 COSTS_N_INSNS (21), /* divsi */
577 COSTS_N_INSNS (37), /* divdi */
578 COSTS_N_INSNS (3), /* fp */
579 COSTS_N_INSNS (3), /* dmul */
580 COSTS_N_INSNS (17), /* sdiv */
581 COSTS_N_INSNS (21), /* ddiv */
0b11da67 582 128,
5f732aba
DE
583 64, /* l1 cache */
584 1024, /* l2 cache */
0b11da67 585 1, /* streams */
8b897cfa
RS
586};
587
d296e02e
AP
588/* Instruction costs on Cell processor. */
589/* COSTS_N_INSNS (1) ~ one add. */
590static const
591struct processor_costs ppccell_cost = {
592 COSTS_N_INSNS (9/2)+2, /* mulsi */
593 COSTS_N_INSNS (6/2), /* mulsi_const */
594 COSTS_N_INSNS (6/2), /* mulsi_const9 */
595 COSTS_N_INSNS (15/2)+2, /* muldi */
596 COSTS_N_INSNS (38/2), /* divsi */
597 COSTS_N_INSNS (70/2), /* divdi */
598 COSTS_N_INSNS (10/2), /* fp */
599 COSTS_N_INSNS (10/2), /* dmul */
600 COSTS_N_INSNS (74/2), /* sdiv */
601 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67 602 128,
5f732aba
DE
603 32, /* l1 cache */
604 512, /* l2 cache */
605 6, /* streams */
d296e02e
AP
606};
607
8b897cfa
RS
608/* Instruction costs on PPC750 and PPC7400 processors. */
609static const
610struct processor_costs ppc750_cost = {
06a67bdd
RS
611 COSTS_N_INSNS (5), /* mulsi */
612 COSTS_N_INSNS (3), /* mulsi_const */
613 COSTS_N_INSNS (2), /* mulsi_const9 */
614 COSTS_N_INSNS (5), /* muldi */
615 COSTS_N_INSNS (17), /* divsi */
616 COSTS_N_INSNS (17), /* divdi */
617 COSTS_N_INSNS (3), /* fp */
618 COSTS_N_INSNS (3), /* dmul */
619 COSTS_N_INSNS (17), /* sdiv */
620 COSTS_N_INSNS (31), /* ddiv */
0b11da67 621 32,
5f732aba
DE
622 32, /* l1 cache */
623 512, /* l2 cache */
0b11da67 624 1, /* streams */
8b897cfa
RS
625};
626
627/* Instruction costs on PPC7450 processors. */
628static const
629struct processor_costs ppc7450_cost = {
06a67bdd
RS
630 COSTS_N_INSNS (4), /* mulsi */
631 COSTS_N_INSNS (3), /* mulsi_const */
632 COSTS_N_INSNS (3), /* mulsi_const9 */
633 COSTS_N_INSNS (4), /* muldi */
634 COSTS_N_INSNS (23), /* divsi */
635 COSTS_N_INSNS (23), /* divdi */
636 COSTS_N_INSNS (5), /* fp */
637 COSTS_N_INSNS (5), /* dmul */
638 COSTS_N_INSNS (21), /* sdiv */
639 COSTS_N_INSNS (35), /* ddiv */
0b11da67 640 32,
5f732aba
DE
641 32, /* l1 cache */
642 1024, /* l2 cache */
0b11da67 643 1, /* streams */
8b897cfa 644};
a3170dc6 645
8b897cfa
RS
646/* Instruction costs on PPC8540 processors. */
647static const
648struct processor_costs ppc8540_cost = {
06a67bdd
RS
649 COSTS_N_INSNS (4), /* mulsi */
650 COSTS_N_INSNS (4), /* mulsi_const */
651 COSTS_N_INSNS (4), /* mulsi_const9 */
652 COSTS_N_INSNS (4), /* muldi */
653 COSTS_N_INSNS (19), /* divsi */
654 COSTS_N_INSNS (19), /* divdi */
655 COSTS_N_INSNS (4), /* fp */
656 COSTS_N_INSNS (4), /* dmul */
657 COSTS_N_INSNS (29), /* sdiv */
658 COSTS_N_INSNS (29), /* ddiv */
0b11da67 659 32,
5f732aba
DE
660 32, /* l1 cache */
661 256, /* l2 cache */
0b11da67 662 1, /* prefetch streams /*/
8b897cfa
RS
663};
664
665/* Instruction costs on POWER4 and POWER5 processors. */
666static const
667struct processor_costs power4_cost = {
06a67bdd
RS
668 COSTS_N_INSNS (3), /* mulsi */
669 COSTS_N_INSNS (2), /* mulsi_const */
670 COSTS_N_INSNS (2), /* mulsi_const9 */
671 COSTS_N_INSNS (4), /* muldi */
672 COSTS_N_INSNS (18), /* divsi */
673 COSTS_N_INSNS (34), /* divdi */
674 COSTS_N_INSNS (3), /* fp */
675 COSTS_N_INSNS (3), /* dmul */
676 COSTS_N_INSNS (17), /* sdiv */
677 COSTS_N_INSNS (17), /* ddiv */
0b11da67 678 128,
5f732aba
DE
679 32, /* l1 cache */
680 1024, /* l2 cache */
0b11da67 681 8, /* prefetch streams /*/
8b897cfa
RS
682};
683
44cd321e
PS
684/* Instruction costs on POWER6 processors. */
685static const
686struct processor_costs power6_cost = {
687 COSTS_N_INSNS (8), /* mulsi */
688 COSTS_N_INSNS (8), /* mulsi_const */
689 COSTS_N_INSNS (8), /* mulsi_const9 */
690 COSTS_N_INSNS (8), /* muldi */
691 COSTS_N_INSNS (22), /* divsi */
692 COSTS_N_INSNS (28), /* divdi */
693 COSTS_N_INSNS (3), /* fp */
694 COSTS_N_INSNS (3), /* dmul */
695 COSTS_N_INSNS (13), /* sdiv */
696 COSTS_N_INSNS (16), /* ddiv */
0b11da67 697 128,
5f732aba
DE
698 64, /* l1 cache */
699 2048, /* l2 cache */
0b11da67 700 16, /* prefetch streams */
44cd321e
PS
701};
702
8b897cfa 703\f
a2369ed3 704static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 705static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 706static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
707static void rs6000_emit_stack_tie (void);
708static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
709static rtx spe_synthesize_frame_save (rtx);
710static bool spe_func_has_64bit_regs_p (void);
b20a9cca 711static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 712 int, HOST_WIDE_INT);
a2369ed3
DJ
713static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
714static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
715static unsigned rs6000_hash_constant (rtx);
716static unsigned toc_hash_function (const void *);
717static int toc_hash_eq (const void *, const void *);
718static int constant_pool_expr_1 (rtx, int *, int *);
719static bool constant_pool_expr_p (rtx);
d04b6e6e 720static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
721static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
722static struct machine_function * rs6000_init_machine_status (void);
723static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 724static bool no_global_regs_above (int);
5add3202 725#ifdef HAVE_GAS_HIDDEN
a2369ed3 726static void rs6000_assemble_visibility (tree, int);
5add3202 727#endif
a2369ed3
DJ
728static int rs6000_ra_ever_killed (void);
729static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 730static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 731static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 732static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 733static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 734static const char *rs6000_mangle_type (const_tree);
b86fe7b4 735extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 736static void rs6000_set_default_type_attributes (tree);
52ff33d0 737static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
738static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
739static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
740static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
741 tree);
a2369ed3 742static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 743static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 744static void rs6000_file_start (void);
7c262518 745#if TARGET_ELF
9b580a0b 746static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
747static void rs6000_elf_asm_out_constructor (rtx, int);
748static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 749static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 750static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
751static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
752 unsigned HOST_WIDE_INT);
a56d7372 753static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 754 ATTRIBUTE_UNUSED;
7c262518 755#endif
3101faab 756static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
cbaaba19 757#if TARGET_XCOFF
0d5817b2 758static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 759static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 760static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 761static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 762static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 763static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 764 unsigned HOST_WIDE_INT);
d6b5193b
RS
765static void rs6000_xcoff_unique_section (tree, int);
766static section *rs6000_xcoff_select_rtx_section
767 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
768static const char * rs6000_xcoff_strip_name_encoding (const char *);
769static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
770static void rs6000_xcoff_file_start (void);
771static void rs6000_xcoff_file_end (void);
f1384257 772#endif
a2369ed3
DJ
773static int rs6000_variable_issue (FILE *, int, rtx, int);
774static bool rs6000_rtx_costs (rtx, int, int, int *);
775static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 776static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 777static bool is_microcoded_insn (rtx);
d296e02e 778static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
779static bool is_cracked_insn (rtx);
780static bool is_branch_slot_insn (rtx);
44cd321e 781static bool is_load_insn (rtx);
e3a0e200 782static rtx get_store_dest (rtx pat);
44cd321e
PS
783static bool is_store_insn (rtx);
784static bool set_to_load_agen (rtx,rtx);
982afe02 785static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
786static int rs6000_adjust_priority (rtx, int);
787static int rs6000_issue_rate (void);
b198261f 788static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
789static rtx get_next_active_insn (rtx, rtx);
790static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
791static bool insn_must_be_first_in_group (rtx);
792static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
793static bool is_costly_group (rtx *, rtx);
794static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
795static int redefine_groups (FILE *, int, rtx, rtx);
796static int pad_groups (FILE *, int, rtx, rtx);
797static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
798static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
799static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 800static int rs6000_use_sched_lookahead (void);
d296e02e 801static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 802static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 803static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
804static tree rs6000_builtin_mul_widen_even (tree);
805static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 806static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 807
58646b77 808static void def_builtin (int, const char *, tree, int);
3101faab 809static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
810static void rs6000_init_builtins (void);
811static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
812static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
813static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
814static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
815static void altivec_init_builtins (void);
816static void rs6000_common_init_builtins (void);
c15c90bb 817static void rs6000_init_libfuncs (void);
a2369ed3 818
96038623
DE
819static void paired_init_builtins (void);
820static rtx paired_expand_builtin (tree, rtx, bool *);
821static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
822static rtx paired_expand_stv_builtin (enum insn_code, tree);
823static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
824
b20a9cca
AM
825static void enable_mask_for_builtins (struct builtin_description *, int,
826 enum rs6000_builtins,
827 enum rs6000_builtins);
7c62e993 828static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
829static void spe_init_builtins (void);
830static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 831static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
832static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
833static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
834static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
835static rs6000_stack_t *rs6000_stack_info (void);
836static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
837
838static rtx altivec_expand_builtin (tree, rtx, bool *);
839static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
840static rtx altivec_expand_st_builtin (tree, rtx, bool *);
841static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
842static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 843static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 844 const char *, tree, rtx);
b4a62fa0 845static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 846static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
847static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
848static rtx altivec_expand_vec_set_builtin (tree);
849static rtx altivec_expand_vec_ext_builtin (tree, rtx);
850static int get_element_number (tree, tree);
78f5898b 851static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 852static void rs6000_parse_tls_size_option (void);
5da702b1 853static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
854static int first_altivec_reg_to_save (void);
855static unsigned int compute_vrsave_mask (void);
9390387d 856static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
857static void is_altivec_return_reg (rtx, void *);
858static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
859int easy_vector_constant (rtx, enum machine_mode);
3101faab 860static bool rs6000_is_opaque_type (const_tree);
a2369ed3 861static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 862static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 863static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 864static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
865static rtx rs6000_tls_get_addr (void);
866static rtx rs6000_got_sym (void);
9390387d 867static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
868static const char *rs6000_get_some_local_dynamic_name (void);
869static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 870static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 871static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 872 enum machine_mode, tree);
0b5383eb
DJ
873static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
874 HOST_WIDE_INT);
875static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
876 tree, HOST_WIDE_INT);
877static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
878 HOST_WIDE_INT,
879 rtx[], int *);
880static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
881 const_tree, HOST_WIDE_INT,
882 rtx[], int *);
883static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 884static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 885static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
886static void setup_incoming_varargs (CUMULATIVE_ARGS *,
887 enum machine_mode, tree,
888 int *, int);
8cd5a4e0 889static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 890 const_tree, bool);
78a52f11
RH
891static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
892 tree, bool);
3101faab 893static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
894#if TARGET_MACHO
895static void macho_branch_islands (void);
efdba735
SH
896static int no_previous_def (tree function_name);
897static tree get_prev_label (tree function_name);
c4e18b1c 898static void rs6000_darwin_file_start (void);
efdba735
SH
899#endif
900
c35d187f 901static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 902static void rs6000_va_start (tree, rtx);
23a60a04 903static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 904static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 905static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 906static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 907static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 908 enum machine_mode);
94ff898d 909static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
910 enum machine_mode);
911static int get_vsel_insn (enum machine_mode);
912static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 913static tree rs6000_stack_protect_fail (void);
21213b4c
DP
914
915const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
916static enum machine_mode rs6000_eh_return_filter_mode (void);
917
17211ab5
GK
918/* Hash table stuff for keeping track of TOC entries. */
919
920struct toc_hash_struct GTY(())
921{
922 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
923 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
924 rtx key;
925 enum machine_mode key_mode;
926 int labelno;
927};
928
929static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
930\f
931/* Default register names. */
932char rs6000_reg_names[][8] =
933{
802a0058
MM
934 "0", "1", "2", "3", "4", "5", "6", "7",
935 "8", "9", "10", "11", "12", "13", "14", "15",
936 "16", "17", "18", "19", "20", "21", "22", "23",
937 "24", "25", "26", "27", "28", "29", "30", "31",
938 "0", "1", "2", "3", "4", "5", "6", "7",
939 "8", "9", "10", "11", "12", "13", "14", "15",
940 "16", "17", "18", "19", "20", "21", "22", "23",
941 "24", "25", "26", "27", "28", "29", "30", "31",
942 "mq", "lr", "ctr","ap",
943 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
944 "xer",
945 /* AltiVec registers. */
0cd5e3a1
AH
946 "0", "1", "2", "3", "4", "5", "6", "7",
947 "8", "9", "10", "11", "12", "13", "14", "15",
948 "16", "17", "18", "19", "20", "21", "22", "23",
949 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
950 "vrsave", "vscr",
951 /* SPE registers. */
7d5175e1
JJ
952 "spe_acc", "spefscr",
953 /* Soft frame pointer. */
954 "sfp"
c81bebd7
MM
955};
956
957#ifdef TARGET_REGNAMES
8b60264b 958static const char alt_reg_names[][8] =
c81bebd7 959{
802a0058
MM
960 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
961 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
962 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
963 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
964 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
965 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
966 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
967 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
968 "mq", "lr", "ctr", "ap",
969 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 970 "xer",
59a4c851 971 /* AltiVec registers. */
0ac081f6 972 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
973 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
974 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
975 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
976 "vrsave", "vscr",
977 /* SPE registers. */
7d5175e1
JJ
978 "spe_acc", "spefscr",
979 /* Soft frame pointer. */
980 "sfp"
c81bebd7
MM
981};
982#endif
9878760c 983\f
daf11973
MM
984#ifndef MASK_STRICT_ALIGN
985#define MASK_STRICT_ALIGN 0
986#endif
ffcfcb5f
AM
987#ifndef TARGET_PROFILE_KERNEL
988#define TARGET_PROFILE_KERNEL 0
989#endif
3961e8fe
RH
990
991/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
992#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
993\f
994/* Initialize the GCC target structure. */
91d231cb
JM
995#undef TARGET_ATTRIBUTE_TABLE
996#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
997#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
998#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 999
301d03af
RS
1000#undef TARGET_ASM_ALIGNED_DI_OP
1001#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1002
1003/* Default unaligned ops are only provided for ELF. Find the ops needed
1004 for non-ELF systems. */
1005#ifndef OBJECT_FORMAT_ELF
cbaaba19 1006#if TARGET_XCOFF
ae6c1efd 1007/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1008 64-bit targets. */
1009#undef TARGET_ASM_UNALIGNED_HI_OP
1010#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1011#undef TARGET_ASM_UNALIGNED_SI_OP
1012#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1013#undef TARGET_ASM_UNALIGNED_DI_OP
1014#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1015#else
1016/* For Darwin. */
1017#undef TARGET_ASM_UNALIGNED_HI_OP
1018#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1019#undef TARGET_ASM_UNALIGNED_SI_OP
1020#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1021#undef TARGET_ASM_UNALIGNED_DI_OP
1022#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1023#undef TARGET_ASM_ALIGNED_DI_OP
1024#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1025#endif
1026#endif
1027
1028/* This hook deals with fixups for relocatable code and DI-mode objects
1029 in 64-bit code. */
1030#undef TARGET_ASM_INTEGER
1031#define TARGET_ASM_INTEGER rs6000_assemble_integer
1032
93638d7a
AM
1033#ifdef HAVE_GAS_HIDDEN
1034#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1035#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1036#endif
1037
c4501e62
JJ
1038#undef TARGET_HAVE_TLS
1039#define TARGET_HAVE_TLS HAVE_AS_TLS
1040
1041#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1042#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1043
08c148a8
NB
1044#undef TARGET_ASM_FUNCTION_PROLOGUE
1045#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1046#undef TARGET_ASM_FUNCTION_EPILOGUE
1047#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1048
b54cf83a
DE
1049#undef TARGET_SCHED_VARIABLE_ISSUE
1050#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1051
c237e94a
ZW
1052#undef TARGET_SCHED_ISSUE_RATE
1053#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1054#undef TARGET_SCHED_ADJUST_COST
1055#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1056#undef TARGET_SCHED_ADJUST_PRIORITY
1057#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1058#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1059#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1060#undef TARGET_SCHED_INIT
1061#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1062#undef TARGET_SCHED_FINISH
1063#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1064#undef TARGET_SCHED_REORDER
1065#define TARGET_SCHED_REORDER rs6000_sched_reorder
1066#undef TARGET_SCHED_REORDER2
1067#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1068
be12c2b0
VM
1069#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1070#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1071
d296e02e
AP
1072#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1073#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1074
7ccf35ed
DN
1075#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1076#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1077#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1078#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1079#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1080#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1081#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1082#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1083
5b900a4c
DN
1084#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1085#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1086
0ac081f6
AH
1087#undef TARGET_INIT_BUILTINS
1088#define TARGET_INIT_BUILTINS rs6000_init_builtins
1089
1090#undef TARGET_EXPAND_BUILTIN
1091#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1092
608063c3
JB
1093#undef TARGET_MANGLE_TYPE
1094#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1095
c15c90bb
ZW
1096#undef TARGET_INIT_LIBFUNCS
1097#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1098
f1384257 1099#if TARGET_MACHO
0e5dbd9b 1100#undef TARGET_BINDS_LOCAL_P
31920d83 1101#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1102#endif
0e5dbd9b 1103
77ccdfed
EC
1104#undef TARGET_MS_BITFIELD_LAYOUT_P
1105#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1106
3961e8fe
RH
1107#undef TARGET_ASM_OUTPUT_MI_THUNK
1108#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1109
3961e8fe 1110#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1111#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1112
4977bab6
ZW
1113#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1114#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1115
2e3f0db6
DJ
1116#undef TARGET_INVALID_WITHIN_DOLOOP
1117#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1118
3c50106f
RH
1119#undef TARGET_RTX_COSTS
1120#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1121#undef TARGET_ADDRESS_COST
1122#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1123
c8e4f0e9 1124#undef TARGET_VECTOR_OPAQUE_P
58646b77 1125#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1126
96714395
AH
1127#undef TARGET_DWARF_REGISTER_SPAN
1128#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1129
37ea0b7e
JM
1130#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1131#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1132
c6e8c921
GK
1133/* On rs6000, function arguments are promoted, as are function return
1134 values. */
1135#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1136#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1137#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1138#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1139
c6e8c921
GK
1140#undef TARGET_RETURN_IN_MEMORY
1141#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1142
1143#undef TARGET_SETUP_INCOMING_VARARGS
1144#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1145
1146/* Always strict argument naming on rs6000. */
1147#undef TARGET_STRICT_ARGUMENT_NAMING
1148#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1149#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1150#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1151#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1152#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1153#undef TARGET_MUST_PASS_IN_STACK
1154#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1155#undef TARGET_PASS_BY_REFERENCE
1156#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1157#undef TARGET_ARG_PARTIAL_BYTES
1158#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1159
c35d187f
RH
1160#undef TARGET_BUILD_BUILTIN_VA_LIST
1161#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1162
d7bd8aeb
JJ
1163#undef TARGET_EXPAND_BUILTIN_VA_START
1164#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1165
cd3ce9b4
JM
1166#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1167#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1168
93f90be6
FJ
1169#undef TARGET_EH_RETURN_FILTER_MODE
1170#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1171
00b79d54
BE
1172#undef TARGET_SCALAR_MODE_SUPPORTED_P
1173#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1174
f676971a
EC
1175#undef TARGET_VECTOR_MODE_SUPPORTED_P
1176#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1177
4d3e6fae
FJ
1178#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1179#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1180
78f5898b
AH
1181#undef TARGET_HANDLE_OPTION
1182#define TARGET_HANDLE_OPTION rs6000_handle_option
1183
1184#undef TARGET_DEFAULT_TARGET_FLAGS
1185#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1186 (TARGET_DEFAULT)
78f5898b 1187
3aebbe5f
JJ
1188#undef TARGET_STACK_PROTECT_FAIL
1189#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1190
445cf5eb
JM
1191/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1192 The PowerPC architecture requires only weak consistency among
1193 processors--that is, memory accesses between processors need not be
1194 sequentially consistent and memory accesses among processors can occur
1195 in any order. The ability to order memory accesses weakly provides
1196 opportunities for more efficient use of the system bus. Unless a
1197 dependency exists, the 604e allows read operations to precede store
1198 operations. */
1199#undef TARGET_RELAXED_ORDERING
1200#define TARGET_RELAXED_ORDERING true
1201
fdbe66f2
EB
1202#ifdef HAVE_AS_TLS
1203#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1204#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1205#endif
1206
aacd3885
RS
1207/* Use a 32-bit anchor range. This leads to sequences like:
1208
1209 addis tmp,anchor,high
1210 add dest,tmp,low
1211
1212 where tmp itself acts as an anchor, and can be shared between
1213 accesses to the same 64k page. */
1214#undef TARGET_MIN_ANCHOR_OFFSET
1215#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1216#undef TARGET_MAX_ANCHOR_OFFSET
1217#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1218#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1219#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1220
9c78b944
DE
1221#undef TARGET_BUILTIN_RECIPROCAL
1222#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1223
f6897b10 1224struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1225\f
0d1fbc8c
AH
1226
1227/* Value is 1 if hard register REGNO can hold a value of machine-mode
1228 MODE. */
1229static int
1230rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1231{
1232 /* The GPRs can hold any mode, but values bigger than one register
1233 cannot go past R31. */
1234 if (INT_REGNO_P (regno))
1235 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1236
a5a97921 1237 /* The float registers can only hold floating modes and DImode.
7393f7f8 1238 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1239 if (FP_REGNO_P (regno))
1240 return
96038623 1241 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1242 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1243 && mode != SDmode
0d1fbc8c
AH
1244 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1245 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1246 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1247 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1248 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1249
1250 /* The CR register can only hold CC modes. */
1251 if (CR_REGNO_P (regno))
1252 return GET_MODE_CLASS (mode) == MODE_CC;
1253
1254 if (XER_REGNO_P (regno))
1255 return mode == PSImode;
1256
1257 /* AltiVec only in AldyVec registers. */
1258 if (ALTIVEC_REGNO_P (regno))
1259 return ALTIVEC_VECTOR_MODE (mode);
1260
1261 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1262 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1263 return 1;
1264
1265 /* We cannot put TImode anywhere except general register and it must be
1266 able to fit within the register set. */
1267
1268 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1269}
1270
1271/* Initialize rs6000_hard_regno_mode_ok_p table. */
1272static void
1273rs6000_init_hard_regno_mode_ok (void)
1274{
1275 int r, m;
1276
1277 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1278 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1279 if (rs6000_hard_regno_mode_ok (r, m))
1280 rs6000_hard_regno_mode_ok_p[m][r] = true;
1281}
1282
e4cad568
GK
1283#if TARGET_MACHO
1284/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1285
1286static void
1287darwin_rs6000_override_options (void)
1288{
1289 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1290 off. */
1291 rs6000_altivec_abi = 1;
1292 TARGET_ALTIVEC_VRSAVE = 1;
1293 if (DEFAULT_ABI == ABI_DARWIN)
1294 {
1295 if (MACHO_DYNAMIC_NO_PIC_P)
1296 {
1297 if (flag_pic)
1298 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1299 flag_pic = 0;
1300 }
1301 else if (flag_pic == 1)
1302 {
1303 flag_pic = 2;
1304 }
1305 }
1306 if (TARGET_64BIT && ! TARGET_POWERPC64)
1307 {
1308 target_flags |= MASK_POWERPC64;
1309 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1310 }
1311 if (flag_mkernel)
1312 {
1313 rs6000_default_long_calls = 1;
1314 target_flags |= MASK_SOFT_FLOAT;
1315 }
1316
1317 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1318 Altivec. */
1319 if (!flag_mkernel && !flag_apple_kext
1320 && TARGET_64BIT
1321 && ! (target_flags_explicit & MASK_ALTIVEC))
1322 target_flags |= MASK_ALTIVEC;
1323
1324 /* Unless the user (not the configurer) has explicitly overridden
1325 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1326 G4 unless targetting the kernel. */
1327 if (!flag_mkernel
1328 && !flag_apple_kext
1329 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1330 && ! (target_flags_explicit & MASK_ALTIVEC)
1331 && ! rs6000_select[1].string)
1332 {
1333 target_flags |= MASK_ALTIVEC;
1334 }
1335}
1336#endif
1337
c1e55850
GK
1338/* If not otherwise specified by a target, make 'long double' equivalent to
1339 'double'. */
1340
1341#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1342#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1343#endif
1344
5248c961
RK
1345/* Override command line options. Mostly we process the processor
1346 type and sometimes adjust other TARGET_ options. */
1347
1348void
d779d0dc 1349rs6000_override_options (const char *default_cpu)
5248c961 1350{
c4d38ccb 1351 size_t i, j;
8e3f41e7 1352 struct rs6000_cpu_select *ptr;
66188a7e 1353 int set_masks;
5248c961 1354
66188a7e 1355 /* Simplifications for entries below. */
85638c0d 1356
66188a7e
GK
1357 enum {
1358 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1359 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1360 };
85638c0d 1361
66188a7e
GK
1362 /* This table occasionally claims that a processor does not support
1363 a particular feature even though it does, but the feature is slower
1364 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1365 complete description of the processor's support.
66188a7e
GK
1366
1367 Please keep this list in order, and don't forget to update the
1368 documentation in invoke.texi when adding a new processor or
1369 flag. */
5248c961
RK
1370 static struct ptt
1371 {
8b60264b
KG
1372 const char *const name; /* Canonical processor name. */
1373 const enum processor_type processor; /* Processor type enum value. */
1374 const int target_enable; /* Target flags to enable. */
8b60264b 1375 } const processor_target_table[]
66188a7e 1376 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1377 {"403", PROCESSOR_PPC403,
66188a7e 1378 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1379 {"405", PROCESSOR_PPC405,
716019c0
JM
1380 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1381 {"405fp", PROCESSOR_PPC405,
1382 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1383 {"440", PROCESSOR_PPC440,
716019c0
JM
1384 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1385 {"440fp", PROCESSOR_PPC440,
1386 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1387 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1388 {"601", PROCESSOR_PPC601,
66188a7e
GK
1389 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1390 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1391 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1392 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1393 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1394 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1395 {"620", PROCESSOR_PPC620,
1396 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1397 {"630", PROCESSOR_PPC630,
1398 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1399 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1400 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1401 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1402 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1403 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1404 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1405 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1406 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1407 /* 8548 has a dummy entry for now. */
a45bce6e 1408 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1409 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1410 {"970", PROCESSOR_POWER4,
66188a7e 1411 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1412 {"cell", PROCESSOR_CELL,
1413 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1414 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1415 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1416 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1417 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1418 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1419 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1420 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1421 {"power2", PROCESSOR_POWER,
1422 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1423 {"power3", PROCESSOR_PPC630,
1424 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1425 {"power4", PROCESSOR_POWER4,
fc091c8e 1426 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1427 {"power5", PROCESSOR_POWER5,
432218ba
DE
1428 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1429 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1430 {"power5+", PROCESSOR_POWER5,
1431 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1432 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1433 {"power6", PROCESSOR_POWER6,
e118597e 1434 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1435 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1436 {"power6x", PROCESSOR_POWER6,
1437 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1438 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1439 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1440 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1441 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1442 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1443 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1444 {"rios2", PROCESSOR_RIOS2,
1445 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1446 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1447 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1448 {"rs64", PROCESSOR_RS64A,
1449 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1450 };
5248c961 1451
ca7558fc 1452 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1453
66188a7e
GK
1454 /* Some OSs don't support saving the high part of 64-bit registers on
1455 context switch. Other OSs don't support saving Altivec registers.
1456 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1457 settings; if the user wants either, the user must explicitly specify
1458 them and we won't interfere with the user's specification. */
1459
1460 enum {
1461 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1462 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1463 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1464 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1465 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1466 };
0d1fbc8c
AH
1467
1468 rs6000_init_hard_regno_mode_ok ();
1469
c4ad648e 1470 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1471#ifdef OS_MISSING_POWERPC64
1472 if (OS_MISSING_POWERPC64)
1473 set_masks &= ~MASK_POWERPC64;
1474#endif
1475#ifdef OS_MISSING_ALTIVEC
1476 if (OS_MISSING_ALTIVEC)
1477 set_masks &= ~MASK_ALTIVEC;
1478#endif
1479
768875a8
AM
1480 /* Don't override by the processor default if given explicitly. */
1481 set_masks &= ~target_flags_explicit;
957211c3 1482
a4f6c312 1483 /* Identify the processor type. */
8e3f41e7 1484 rs6000_select[0].string = default_cpu;
3cb999d8 1485 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1486
b6a1cbae 1487 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1488 {
8e3f41e7
MM
1489 ptr = &rs6000_select[i];
1490 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1491 {
8e3f41e7
MM
1492 for (j = 0; j < ptt_size; j++)
1493 if (! strcmp (ptr->string, processor_target_table[j].name))
1494 {
1495 if (ptr->set_tune_p)
1496 rs6000_cpu = processor_target_table[j].processor;
1497
1498 if (ptr->set_arch_p)
1499 {
66188a7e
GK
1500 target_flags &= ~set_masks;
1501 target_flags |= (processor_target_table[j].target_enable
1502 & set_masks);
8e3f41e7
MM
1503 }
1504 break;
1505 }
1506
4406229e 1507 if (j == ptt_size)
8e3f41e7 1508 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1509 }
1510 }
8a61d227 1511
993f19a8 1512 if (TARGET_E500)
a3170dc6
AH
1513 rs6000_isel = 1;
1514
dff9f1b6
DE
1515 /* If we are optimizing big endian systems for space, use the load/store
1516 multiple and string instructions. */
ef792183 1517 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1518 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1519
a4f6c312
SS
1520 /* Don't allow -mmultiple or -mstring on little endian systems
1521 unless the cpu is a 750, because the hardware doesn't support the
1522 instructions used in little endian mode, and causes an alignment
1523 trap. The 750 does not cause an alignment trap (except when the
1524 target is unaligned). */
bef84347 1525
b21fb038 1526 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1527 {
1528 if (TARGET_MULTIPLE)
1529 {
1530 target_flags &= ~MASK_MULTIPLE;
b21fb038 1531 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1532 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1533 }
1534
1535 if (TARGET_STRING)
1536 {
1537 target_flags &= ~MASK_STRING;
b21fb038 1538 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1539 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1540 }
1541 }
3933e0e1 1542
38c1f2d7
MM
1543 /* Set debug flags */
1544 if (rs6000_debug_name)
1545 {
bfc79d3b 1546 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1547 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1548 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1549 rs6000_debug_stack = 1;
bfc79d3b 1550 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1551 rs6000_debug_arg = 1;
1552 else
c725bd79 1553 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1554 }
1555
57ac7be9
AM
1556 if (rs6000_traceback_name)
1557 {
1558 if (! strncmp (rs6000_traceback_name, "full", 4))
1559 rs6000_traceback = traceback_full;
1560 else if (! strncmp (rs6000_traceback_name, "part", 4))
1561 rs6000_traceback = traceback_part;
1562 else if (! strncmp (rs6000_traceback_name, "no", 2))
1563 rs6000_traceback = traceback_none;
1564 else
9e637a26 1565 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1566 rs6000_traceback_name);
1567 }
1568
78f5898b
AH
1569 if (!rs6000_explicit_options.long_double)
1570 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1571
602ea4d3 1572#ifndef POWERPC_LINUX
d3603e8c 1573 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1574 rs6000_ieeequad = 1;
1575#endif
1576
0db747be
DE
1577 /* Enable Altivec ABI for AIX -maltivec. */
1578 if (TARGET_XCOFF && TARGET_ALTIVEC)
1579 rs6000_altivec_abi = 1;
1580
1581 /* Set Altivec ABI as default for PowerPC64 Linux. */
6d0ef01e
HP
1582 if (TARGET_ELF && TARGET_64BIT)
1583 {
1584 rs6000_altivec_abi = 1;
78f5898b 1585 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1586 }
1587
594a51fe
SS
1588 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1589 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1590 {
1591 rs6000_darwin64_abi = 1;
9c7956fd 1592#if TARGET_MACHO
6ac49599 1593 darwin_one_byte_bool = 1;
9c7956fd 1594#endif
d9168963
SS
1595 /* Default to natural alignment, for better performance. */
1596 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1597 }
1598
194c524a
DE
1599 /* Place FP constants in the constant pool instead of TOC
1600 if section anchors enabled. */
1601 if (flag_section_anchors)
1602 TARGET_NO_FP_IN_TOC = 1;
1603
c4501e62
JJ
1604 /* Handle -mtls-size option. */
1605 rs6000_parse_tls_size_option ();
1606
a7ae18e2
AH
1607#ifdef SUBTARGET_OVERRIDE_OPTIONS
1608 SUBTARGET_OVERRIDE_OPTIONS;
1609#endif
1610#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1611 SUBSUBTARGET_OVERRIDE_OPTIONS;
1612#endif
4d4cbc0e
AH
1613#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1614 SUB3TARGET_OVERRIDE_OPTIONS;
1615#endif
a7ae18e2 1616
5da702b1
AH
1617 if (TARGET_E500)
1618 {
1619 /* The e500 does not have string instructions, and we set
1620 MASK_STRING above when optimizing for size. */
1621 if ((target_flags & MASK_STRING) != 0)
1622 target_flags = target_flags & ~MASK_STRING;
1623 }
1624 else if (rs6000_select[1].string != NULL)
1625 {
1626 /* For the powerpc-eabispe configuration, we set all these by
1627 default, so let's unset them if we manually set another
1628 CPU that is not the E500. */
78f5898b 1629 if (!rs6000_explicit_options.abi)
5da702b1 1630 rs6000_spe_abi = 0;
78f5898b 1631 if (!rs6000_explicit_options.spe)
5da702b1 1632 rs6000_spe = 0;
78f5898b 1633 if (!rs6000_explicit_options.float_gprs)
5da702b1 1634 rs6000_float_gprs = 0;
78f5898b 1635 if (!rs6000_explicit_options.isel)
5da702b1
AH
1636 rs6000_isel = 0;
1637 }
b5044283 1638
eca0d5e8
JM
1639 /* Detect invalid option combinations with E500. */
1640 CHECK_E500_OPTIONS;
1641
ec507f2d 1642 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1643 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1644 && rs6000_cpu != PROCESSOR_POWER6
1645 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1646 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1647 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1648 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1649 || rs6000_cpu == PROCESSOR_POWER5
1650 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1651
ec507f2d
DE
1652 rs6000_sched_restricted_insns_priority
1653 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1654
569fa502 1655 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1656 rs6000_sched_costly_dep
1657 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1658
569fa502
DN
1659 if (rs6000_sched_costly_dep_str)
1660 {
f676971a 1661 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1662 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1663 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1664 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1665 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1666 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1667 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1668 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1669 else
c4ad648e 1670 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1671 }
1672
1673 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1674 rs6000_sched_insert_nops
1675 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1676
cbe26ab8
DN
1677 if (rs6000_sched_insert_nops_str)
1678 {
1679 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1680 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1681 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1682 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1683 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1684 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1685 else
c4ad648e 1686 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1687 }
1688
c81bebd7 1689#ifdef TARGET_REGNAMES
a4f6c312
SS
1690 /* If the user desires alternate register names, copy in the
1691 alternate names now. */
c81bebd7 1692 if (TARGET_REGNAMES)
4e135bdd 1693 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1694#endif
1695
df01da37 1696 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1697 If -maix-struct-return or -msvr4-struct-return was explicitly
1698 used, don't override with the ABI default. */
df01da37
DE
1699 if (!rs6000_explicit_options.aix_struct_ret)
1700 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1701
602ea4d3 1702 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1703 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1704
f676971a 1705 if (TARGET_TOC)
9ebbca7d 1706 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1707
301d03af
RS
1708 /* We can only guarantee the availability of DI pseudo-ops when
1709 assembling for 64-bit targets. */
ae6c1efd 1710 if (!TARGET_64BIT)
301d03af
RS
1711 {
1712 targetm.asm_out.aligned_op.di = NULL;
1713 targetm.asm_out.unaligned_op.di = NULL;
1714 }
1715
1494c534
DE
1716 /* Set branch target alignment, if not optimizing for size. */
1717 if (!optimize_size)
1718 {
d296e02e
AP
1719 /* Cell wants to be aligned 8byte for dual issue. */
1720 if (rs6000_cpu == PROCESSOR_CELL)
1721 {
1722 if (align_functions <= 0)
1723 align_functions = 8;
1724 if (align_jumps <= 0)
1725 align_jumps = 8;
1726 if (align_loops <= 0)
1727 align_loops = 8;
1728 }
44cd321e 1729 if (rs6000_align_branch_targets)
1494c534
DE
1730 {
1731 if (align_functions <= 0)
1732 align_functions = 16;
1733 if (align_jumps <= 0)
1734 align_jumps = 16;
1735 if (align_loops <= 0)
1736 align_loops = 16;
1737 }
1738 if (align_jumps_max_skip <= 0)
1739 align_jumps_max_skip = 15;
1740 if (align_loops_max_skip <= 0)
1741 align_loops_max_skip = 15;
1742 }
2792d578 1743
71f123ca
FS
1744 /* Arrange to save and restore machine status around nested functions. */
1745 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1746
1747 /* We should always be splitting complex arguments, but we can't break
1748 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1749 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1750 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1751
1752 /* Initialize rs6000_cost with the appropriate target costs. */
1753 if (optimize_size)
1754 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1755 else
1756 switch (rs6000_cpu)
1757 {
1758 case PROCESSOR_RIOS1:
1759 rs6000_cost = &rios1_cost;
1760 break;
1761
1762 case PROCESSOR_RIOS2:
1763 rs6000_cost = &rios2_cost;
1764 break;
1765
1766 case PROCESSOR_RS64A:
1767 rs6000_cost = &rs64a_cost;
1768 break;
1769
1770 case PROCESSOR_MPCCORE:
1771 rs6000_cost = &mpccore_cost;
1772 break;
1773
1774 case PROCESSOR_PPC403:
1775 rs6000_cost = &ppc403_cost;
1776 break;
1777
1778 case PROCESSOR_PPC405:
1779 rs6000_cost = &ppc405_cost;
1780 break;
1781
1782 case PROCESSOR_PPC440:
1783 rs6000_cost = &ppc440_cost;
1784 break;
1785
1786 case PROCESSOR_PPC601:
1787 rs6000_cost = &ppc601_cost;
1788 break;
1789
1790 case PROCESSOR_PPC603:
1791 rs6000_cost = &ppc603_cost;
1792 break;
1793
1794 case PROCESSOR_PPC604:
1795 rs6000_cost = &ppc604_cost;
1796 break;
1797
1798 case PROCESSOR_PPC604e:
1799 rs6000_cost = &ppc604e_cost;
1800 break;
1801
1802 case PROCESSOR_PPC620:
8b897cfa
RS
1803 rs6000_cost = &ppc620_cost;
1804 break;
1805
f0517163
RS
1806 case PROCESSOR_PPC630:
1807 rs6000_cost = &ppc630_cost;
1808 break;
1809
982afe02 1810 case PROCESSOR_CELL:
d296e02e
AP
1811 rs6000_cost = &ppccell_cost;
1812 break;
1813
8b897cfa
RS
1814 case PROCESSOR_PPC750:
1815 case PROCESSOR_PPC7400:
1816 rs6000_cost = &ppc750_cost;
1817 break;
1818
1819 case PROCESSOR_PPC7450:
1820 rs6000_cost = &ppc7450_cost;
1821 break;
1822
1823 case PROCESSOR_PPC8540:
1824 rs6000_cost = &ppc8540_cost;
1825 break;
1826
1827 case PROCESSOR_POWER4:
1828 case PROCESSOR_POWER5:
1829 rs6000_cost = &power4_cost;
1830 break;
1831
44cd321e
PS
1832 case PROCESSOR_POWER6:
1833 rs6000_cost = &power6_cost;
1834 break;
1835
8b897cfa 1836 default:
37409796 1837 gcc_unreachable ();
8b897cfa 1838 }
0b11da67
DE
1839
1840 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1841 set_param_value ("simultaneous-prefetches",
1842 rs6000_cost->simultaneous_prefetches);
1843 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1844 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1845 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1846 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1847 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1848 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1849
1850 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1851 can be optimized to ap = __builtin_next_arg (0). */
1852 if (DEFAULT_ABI != ABI_V4)
1853 targetm.expand_builtin_va_start = NULL;
5248c961 1854}
5accd822 1855
7ccf35ed
DN
1856/* Implement targetm.vectorize.builtin_mask_for_load. */
1857static tree
1858rs6000_builtin_mask_for_load (void)
1859{
1860 if (TARGET_ALTIVEC)
1861 return altivec_builtin_mask_for_load;
1862 else
1863 return 0;
1864}
1865
f57d17f1
TM
1866/* Implement targetm.vectorize.builtin_conversion. */
1867static tree
1868rs6000_builtin_conversion (enum tree_code code, tree type)
1869{
1870 if (!TARGET_ALTIVEC)
1871 return NULL_TREE;
982afe02 1872
f57d17f1
TM
1873 switch (code)
1874 {
1875 case FLOAT_EXPR:
1876 switch (TYPE_MODE (type))
1877 {
1878 case V4SImode:
982afe02 1879 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1880 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1881 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1882 default:
1883 return NULL_TREE;
1884 }
1885 default:
1886 return NULL_TREE;
1887 }
1888}
1889
89d67cca
DN
1890/* Implement targetm.vectorize.builtin_mul_widen_even. */
1891static tree
1892rs6000_builtin_mul_widen_even (tree type)
1893{
1894 if (!TARGET_ALTIVEC)
1895 return NULL_TREE;
1896
1897 switch (TYPE_MODE (type))
1898 {
1899 case V8HImode:
982afe02 1900 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1901 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1902 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1903
1904 case V16QImode:
1905 return TYPE_UNSIGNED (type) ?
1906 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1907 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1908 default:
1909 return NULL_TREE;
1910 }
1911}
1912
1913/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1914static tree
1915rs6000_builtin_mul_widen_odd (tree type)
1916{
1917 if (!TARGET_ALTIVEC)
1918 return NULL_TREE;
1919
1920 switch (TYPE_MODE (type))
1921 {
1922 case V8HImode:
1923 return TYPE_UNSIGNED (type) ?
1924 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1925 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1926
1927 case V16QImode:
1928 return TYPE_UNSIGNED (type) ?
1929 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1930 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1931 default:
1932 return NULL_TREE;
1933 }
1934}
1935
5b900a4c
DN
1936
1937/* Return true iff, data reference of TYPE can reach vector alignment (16)
1938 after applying N number of iterations. This routine does not determine
1939 how may iterations are required to reach desired alignment. */
1940
1941static bool
3101faab 1942rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1943{
1944 if (is_packed)
1945 return false;
1946
1947 if (TARGET_32BIT)
1948 {
1949 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1950 return true;
1951
1952 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1953 return true;
1954
1955 return false;
1956 }
1957 else
1958 {
1959 if (TARGET_MACHO)
1960 return false;
1961
1962 /* Assuming that all other types are naturally aligned. CHECKME! */
1963 return true;
1964 }
1965}
1966
5da702b1
AH
1967/* Handle generic options of the form -mfoo=yes/no.
1968 NAME is the option name.
1969 VALUE is the option value.
1970 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1971 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1972static void
5da702b1 1973rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1974{
5da702b1 1975 if (value == 0)
993f19a8 1976 return;
5da702b1
AH
1977 else if (!strcmp (value, "yes"))
1978 *flag = 1;
1979 else if (!strcmp (value, "no"))
1980 *flag = 0;
08b57fb3 1981 else
5da702b1 1982 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1983}
1984
c4501e62
JJ
1985/* Validate and record the size specified with the -mtls-size option. */
1986
1987static void
863d938c 1988rs6000_parse_tls_size_option (void)
c4501e62
JJ
1989{
1990 if (rs6000_tls_size_string == 0)
1991 return;
1992 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1993 rs6000_tls_size = 16;
1994 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1995 rs6000_tls_size = 32;
1996 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1997 rs6000_tls_size = 64;
1998 else
9e637a26 1999 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2000}
2001
5accd822 2002void
a2369ed3 2003optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2004{
2e3f0db6
DJ
2005 if (DEFAULT_ABI == ABI_DARWIN)
2006 /* The Darwin libraries never set errno, so we might as well
2007 avoid calling them when that's the only reason we would. */
2008 flag_errno_math = 0;
59d6560b
DE
2009
2010 /* Double growth factor to counter reduced min jump length. */
2011 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2012
2013 /* Enable section anchors by default.
2014 Skip section anchors for Objective C and Objective C++
2015 until front-ends fixed. */
23f99493 2016 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2017 flag_section_anchors = 1;
5accd822 2018}
78f5898b
AH
2019
2020/* Implement TARGET_HANDLE_OPTION. */
2021
2022static bool
2023rs6000_handle_option (size_t code, const char *arg, int value)
2024{
2025 switch (code)
2026 {
2027 case OPT_mno_power:
2028 target_flags &= ~(MASK_POWER | MASK_POWER2
2029 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2030 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2031 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2032 break;
2033 case OPT_mno_powerpc:
2034 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2035 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2036 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2037 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2038 break;
2039 case OPT_mfull_toc:
d2894ab5
DE
2040 target_flags &= ~MASK_MINIMAL_TOC;
2041 TARGET_NO_FP_IN_TOC = 0;
2042 TARGET_NO_SUM_IN_TOC = 0;
2043 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2044#ifdef TARGET_USES_SYSV4_OPT
2045 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2046 just the same as -mminimal-toc. */
2047 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2048 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2049#endif
2050 break;
2051
2052#ifdef TARGET_USES_SYSV4_OPT
2053 case OPT_mtoc:
2054 /* Make -mtoc behave like -mminimal-toc. */
2055 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2056 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2057 break;
2058#endif
2059
2060#ifdef TARGET_USES_AIX64_OPT
2061 case OPT_maix64:
2062#else
2063 case OPT_m64:
2064#endif
2c9c9afd
AM
2065 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2066 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2067 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2068 break;
2069
2070#ifdef TARGET_USES_AIX64_OPT
2071 case OPT_maix32:
2072#else
2073 case OPT_m32:
2074#endif
2075 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2076 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2077 break;
2078
2079 case OPT_minsert_sched_nops_:
2080 rs6000_sched_insert_nops_str = arg;
2081 break;
2082
2083 case OPT_mminimal_toc:
2084 if (value == 1)
2085 {
d2894ab5
DE
2086 TARGET_NO_FP_IN_TOC = 0;
2087 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2088 }
2089 break;
2090
2091 case OPT_mpower:
2092 if (value == 1)
c2dba4ab
AH
2093 {
2094 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2095 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2096 }
78f5898b
AH
2097 break;
2098
2099 case OPT_mpower2:
2100 if (value == 1)
c2dba4ab
AH
2101 {
2102 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2103 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2104 }
78f5898b
AH
2105 break;
2106
2107 case OPT_mpowerpc_gpopt:
2108 case OPT_mpowerpc_gfxopt:
2109 if (value == 1)
c2dba4ab
AH
2110 {
2111 target_flags |= MASK_POWERPC;
2112 target_flags_explicit |= MASK_POWERPC;
2113 }
78f5898b
AH
2114 break;
2115
df01da37
DE
2116 case OPT_maix_struct_return:
2117 case OPT_msvr4_struct_return:
2118 rs6000_explicit_options.aix_struct_ret = true;
2119 break;
2120
78f5898b
AH
2121 case OPT_mvrsave_:
2122 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2123 break;
78f5898b
AH
2124
2125 case OPT_misel_:
2126 rs6000_explicit_options.isel = true;
2127 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2128 break;
2129
2130 case OPT_mspe_:
2131 rs6000_explicit_options.spe = true;
2132 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2133 break;
2134
2135 case OPT_mdebug_:
2136 rs6000_debug_name = arg;
2137 break;
2138
2139#ifdef TARGET_USES_SYSV4_OPT
2140 case OPT_mcall_:
2141 rs6000_abi_name = arg;
2142 break;
2143
2144 case OPT_msdata_:
2145 rs6000_sdata_name = arg;
2146 break;
2147
2148 case OPT_mtls_size_:
2149 rs6000_tls_size_string = arg;
2150 break;
2151
2152 case OPT_mrelocatable:
2153 if (value == 1)
c2dba4ab 2154 {
e0bf274f
AM
2155 target_flags |= MASK_MINIMAL_TOC;
2156 target_flags_explicit |= MASK_MINIMAL_TOC;
2157 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2158 }
78f5898b
AH
2159 break;
2160
2161 case OPT_mrelocatable_lib:
2162 if (value == 1)
c2dba4ab 2163 {
e0bf274f
AM
2164 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2165 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2166 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2167 }
78f5898b 2168 else
c2dba4ab
AH
2169 {
2170 target_flags &= ~MASK_RELOCATABLE;
2171 target_flags_explicit |= MASK_RELOCATABLE;
2172 }
78f5898b
AH
2173 break;
2174#endif
2175
2176 case OPT_mabi_:
78f5898b
AH
2177 if (!strcmp (arg, "altivec"))
2178 {
d3603e8c 2179 rs6000_explicit_options.abi = true;
78f5898b
AH
2180 rs6000_altivec_abi = 1;
2181 rs6000_spe_abi = 0;
2182 }
2183 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2184 {
2185 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2186 the default for rs6000_spe_abi to be chosen later. */
2187 rs6000_altivec_abi = 0;
2188 }
78f5898b
AH
2189 else if (! strcmp (arg, "spe"))
2190 {
d3603e8c 2191 rs6000_explicit_options.abi = true;
78f5898b
AH
2192 rs6000_spe_abi = 1;
2193 rs6000_altivec_abi = 0;
2194 if (!TARGET_SPE_ABI)
2195 error ("not configured for ABI: '%s'", arg);
2196 }
2197 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2198 {
2199 rs6000_explicit_options.abi = true;
2200 rs6000_spe_abi = 0;
2201 }
78f5898b
AH
2202
2203 /* These are here for testing during development only, do not
2204 document in the manual please. */
2205 else if (! strcmp (arg, "d64"))
2206 {
2207 rs6000_darwin64_abi = 1;
2208 warning (0, "Using darwin64 ABI");
2209 }
2210 else if (! strcmp (arg, "d32"))
2211 {
2212 rs6000_darwin64_abi = 0;
2213 warning (0, "Using old darwin ABI");
2214 }
2215
602ea4d3
JJ
2216 else if (! strcmp (arg, "ibmlongdouble"))
2217 {
d3603e8c 2218 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2219 rs6000_ieeequad = 0;
2220 warning (0, "Using IBM extended precision long double");
2221 }
2222 else if (! strcmp (arg, "ieeelongdouble"))
2223 {
d3603e8c 2224 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2225 rs6000_ieeequad = 1;
2226 warning (0, "Using IEEE extended precision long double");
2227 }
2228
78f5898b
AH
2229 else
2230 {
2231 error ("unknown ABI specified: '%s'", arg);
2232 return false;
2233 }
2234 break;
2235
2236 case OPT_mcpu_:
2237 rs6000_select[1].string = arg;
2238 break;
2239
2240 case OPT_mtune_:
2241 rs6000_select[2].string = arg;
2242 break;
2243
2244 case OPT_mtraceback_:
2245 rs6000_traceback_name = arg;
2246 break;
2247
2248 case OPT_mfloat_gprs_:
2249 rs6000_explicit_options.float_gprs = true;
2250 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2251 rs6000_float_gprs = 1;
2252 else if (! strcmp (arg, "double"))
2253 rs6000_float_gprs = 2;
2254 else if (! strcmp (arg, "no"))
2255 rs6000_float_gprs = 0;
2256 else
2257 {
2258 error ("invalid option for -mfloat-gprs: '%s'", arg);
2259 return false;
2260 }
2261 break;
2262
2263 case OPT_mlong_double_:
2264 rs6000_explicit_options.long_double = true;
2265 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2266 if (value != 64 && value != 128)
2267 {
2268 error ("Unknown switch -mlong-double-%s", arg);
2269 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2270 return false;
2271 }
2272 else
2273 rs6000_long_double_type_size = value;
2274 break;
2275
2276 case OPT_msched_costly_dep_:
2277 rs6000_sched_costly_dep_str = arg;
2278 break;
2279
2280 case OPT_malign_:
2281 rs6000_explicit_options.alignment = true;
2282 if (! strcmp (arg, "power"))
2283 {
2284 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2285 some C library functions, so warn about it. The flag may be
2286 useful for performance studies from time to time though, so
2287 don't disable it entirely. */
2288 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2289 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2290 " it is incompatible with the installed C and C++ libraries");
2291 rs6000_alignment_flags = MASK_ALIGN_POWER;
2292 }
2293 else if (! strcmp (arg, "natural"))
2294 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2295 else
2296 {
2297 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2298 return false;
2299 }
2300 break;
2301 }
2302 return true;
2303}
3cfa4909
MM
2304\f
2305/* Do anything needed at the start of the asm file. */
2306
1bc7c5b6 2307static void
863d938c 2308rs6000_file_start (void)
3cfa4909 2309{
c4d38ccb 2310 size_t i;
3cfa4909 2311 char buffer[80];
d330fd93 2312 const char *start = buffer;
3cfa4909 2313 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2314 const char *default_cpu = TARGET_CPU_DEFAULT;
2315 FILE *file = asm_out_file;
2316
2317 default_file_start ();
2318
2319#ifdef TARGET_BI_ARCH
2320 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2321 default_cpu = 0;
2322#endif
3cfa4909
MM
2323
2324 if (flag_verbose_asm)
2325 {
2326 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2327 rs6000_select[0].string = default_cpu;
2328
b6a1cbae 2329 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2330 {
2331 ptr = &rs6000_select[i];
2332 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2333 {
2334 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2335 start = "";
2336 }
2337 }
2338
9c6b4ed9 2339 if (PPC405_ERRATUM77)
b0bfee6e 2340 {
9c6b4ed9 2341 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2342 start = "";
2343 }
b0bfee6e 2344
b91da81f 2345#ifdef USING_ELFOS_H
3cfa4909
MM
2346 switch (rs6000_sdata)
2347 {
2348 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2349 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2350 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2351 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2352 }
2353
2354 if (rs6000_sdata && g_switch_value)
2355 {
307b599c
MK
2356 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2357 g_switch_value);
3cfa4909
MM
2358 start = "";
2359 }
2360#endif
2361
2362 if (*start == '\0')
949ea356 2363 putc ('\n', file);
3cfa4909 2364 }
b723e82f 2365
e51917ae
JM
2366#ifdef HAVE_AS_GNU_ATTRIBUTE
2367 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2368 {
2369 fprintf (file, "\t.gnu_attribute 4, %d\n",
2370 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2371 fprintf (file, "\t.gnu_attribute 8, %d\n",
2372 (TARGET_ALTIVEC_ABI ? 2
2373 : TARGET_SPE_ABI ? 3
2374 : 1));
2375 }
e51917ae
JM
2376#endif
2377
b723e82f
JJ
2378 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2379 {
d6b5193b
RS
2380 switch_to_section (toc_section);
2381 switch_to_section (text_section);
b723e82f 2382 }
3cfa4909 2383}
c4e18b1c 2384
5248c961 2385\f
a0ab749a 2386/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2387
2388int
863d938c 2389direct_return (void)
9878760c 2390{
4697a36c
MM
2391 if (reload_completed)
2392 {
2393 rs6000_stack_t *info = rs6000_stack_info ();
2394
2395 if (info->first_gp_reg_save == 32
2396 && info->first_fp_reg_save == 64
00b960c7 2397 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2398 && ! info->lr_save_p
2399 && ! info->cr_save_p
00b960c7 2400 && info->vrsave_mask == 0
c81fc13e 2401 && ! info->push_p)
4697a36c
MM
2402 return 1;
2403 }
2404
2405 return 0;
9878760c
RK
2406}
2407
4e74d8ec
MM
2408/* Return the number of instructions it takes to form a constant in an
2409 integer register. */
2410
48d72335 2411int
a2369ed3 2412num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2413{
2414 /* signed constant loadable with {cal|addi} */
547b216d 2415 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2416 return 1;
2417
4e74d8ec 2418 /* constant loadable with {cau|addis} */
547b216d
DE
2419 else if ((value & 0xffff) == 0
2420 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2421 return 1;
2422
5f59ecb7 2423#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2424 else if (TARGET_POWERPC64)
4e74d8ec 2425 {
a65c591c
DE
2426 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2427 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2428
a65c591c 2429 if (high == 0 || high == -1)
4e74d8ec
MM
2430 return 2;
2431
a65c591c 2432 high >>= 1;
4e74d8ec 2433
a65c591c 2434 if (low == 0)
4e74d8ec 2435 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2436 else
2437 return (num_insns_constant_wide (high)
e396202a 2438 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2439 }
2440#endif
2441
2442 else
2443 return 2;
2444}
2445
2446int
a2369ed3 2447num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2448{
37409796 2449 HOST_WIDE_INT low, high;
bb8df8a6 2450
37409796 2451 switch (GET_CODE (op))
0d30d435 2452 {
37409796 2453 case CONST_INT:
0d30d435 2454#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2455 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2456 && mask64_operand (op, mode))
c4ad648e 2457 return 2;
0d30d435
DE
2458 else
2459#endif
2460 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2461
37409796
NS
2462 case CONST_DOUBLE:
2463 if (mode == SFmode)
2464 {
2465 long l;
2466 REAL_VALUE_TYPE rv;
bb8df8a6 2467
37409796
NS
2468 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2469 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2470 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2471 }
a260abc9 2472
37409796
NS
2473 if (mode == VOIDmode || mode == DImode)
2474 {
2475 high = CONST_DOUBLE_HIGH (op);
2476 low = CONST_DOUBLE_LOW (op);
2477 }
2478 else
2479 {
2480 long l[2];
2481 REAL_VALUE_TYPE rv;
bb8df8a6 2482
37409796 2483 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2484 if (DECIMAL_FLOAT_MODE_P (mode))
2485 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2486 else
2487 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2488 high = l[WORDS_BIG_ENDIAN == 0];
2489 low = l[WORDS_BIG_ENDIAN != 0];
2490 }
47ad8c61 2491
37409796
NS
2492 if (TARGET_32BIT)
2493 return (num_insns_constant_wide (low)
2494 + num_insns_constant_wide (high));
2495 else
2496 {
2497 if ((high == 0 && low >= 0)
2498 || (high == -1 && low < 0))
2499 return num_insns_constant_wide (low);
bb8df8a6 2500
1990cd79 2501 else if (mask64_operand (op, mode))
37409796 2502 return 2;
bb8df8a6 2503
37409796
NS
2504 else if (low == 0)
2505 return num_insns_constant_wide (high) + 1;
bb8df8a6 2506
37409796
NS
2507 else
2508 return (num_insns_constant_wide (high)
2509 + num_insns_constant_wide (low) + 1);
2510 }
bb8df8a6 2511
37409796
NS
2512 default:
2513 gcc_unreachable ();
4e74d8ec 2514 }
4e74d8ec
MM
2515}
2516
0972012c
RS
2517/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2518 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2519 corresponding element of the vector, but for V4SFmode and V2SFmode,
2520 the corresponding "float" is interpreted as an SImode integer. */
2521
2522static HOST_WIDE_INT
2523const_vector_elt_as_int (rtx op, unsigned int elt)
2524{
2525 rtx tmp = CONST_VECTOR_ELT (op, elt);
2526 if (GET_MODE (op) == V4SFmode
2527 || GET_MODE (op) == V2SFmode)
2528 tmp = gen_lowpart (SImode, tmp);
2529 return INTVAL (tmp);
2530}
452a7d36 2531
77ccdfed 2532/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2533 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2534 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2535 all items are set to the same value and contain COPIES replicas of the
2536 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2537 operand and the others are set to the value of the operand's msb. */
2538
2539static bool
2540vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2541{
66180ff3
PB
2542 enum machine_mode mode = GET_MODE (op);
2543 enum machine_mode inner = GET_MODE_INNER (mode);
2544
2545 unsigned i;
2546 unsigned nunits = GET_MODE_NUNITS (mode);
2547 unsigned bitsize = GET_MODE_BITSIZE (inner);
2548 unsigned mask = GET_MODE_MASK (inner);
2549
0972012c 2550 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2551 HOST_WIDE_INT splat_val = val;
2552 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2553
2554 /* Construct the value to be splatted, if possible. If not, return 0. */
2555 for (i = 2; i <= copies; i *= 2)
452a7d36 2556 {
66180ff3
PB
2557 HOST_WIDE_INT small_val;
2558 bitsize /= 2;
2559 small_val = splat_val >> bitsize;
2560 mask >>= bitsize;
2561 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2562 return false;
2563 splat_val = small_val;
2564 }
c4ad648e 2565
66180ff3
PB
2566 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2567 if (EASY_VECTOR_15 (splat_val))
2568 ;
2569
2570 /* Also check if we can splat, and then add the result to itself. Do so if
2571 the value is positive, of if the splat instruction is using OP's mode;
2572 for splat_val < 0, the splat and the add should use the same mode. */
2573 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2574 && (splat_val >= 0 || (step == 1 && copies == 1)))
2575 ;
2576
2577 else
2578 return false;
2579
2580 /* Check if VAL is present in every STEP-th element, and the
2581 other elements are filled with its most significant bit. */
2582 for (i = 0; i < nunits - 1; ++i)
2583 {
2584 HOST_WIDE_INT desired_val;
2585 if (((i + 1) & (step - 1)) == 0)
2586 desired_val = val;
2587 else
2588 desired_val = msb_val;
2589
0972012c 2590 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2591 return false;
452a7d36 2592 }
66180ff3
PB
2593
2594 return true;
452a7d36
HP
2595}
2596
69ef87e2 2597
77ccdfed 2598/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2599 with a vspltisb, vspltish or vspltisw. */
2600
2601bool
2602easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2603{
66180ff3 2604 unsigned step, copies;
d744e06e 2605
66180ff3
PB
2606 if (mode == VOIDmode)
2607 mode = GET_MODE (op);
2608 else if (mode != GET_MODE (op))
2609 return false;
d744e06e 2610
66180ff3
PB
2611 /* Start with a vspltisw. */
2612 step = GET_MODE_NUNITS (mode) / 4;
2613 copies = 1;
2614
2615 if (vspltis_constant (op, step, copies))
2616 return true;
2617
2618 /* Then try with a vspltish. */
2619 if (step == 1)
2620 copies <<= 1;
2621 else
2622 step >>= 1;
2623
2624 if (vspltis_constant (op, step, copies))
2625 return true;
2626
2627 /* And finally a vspltisb. */
2628 if (step == 1)
2629 copies <<= 1;
2630 else
2631 step >>= 1;
2632
2633 if (vspltis_constant (op, step, copies))
2634 return true;
2635
2636 return false;
d744e06e
AH
2637}
2638
66180ff3
PB
2639/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2640 result is OP. Abort if it is not possible. */
d744e06e 2641
f676971a 2642rtx
66180ff3 2643gen_easy_altivec_constant (rtx op)
452a7d36 2644{
66180ff3
PB
2645 enum machine_mode mode = GET_MODE (op);
2646 int nunits = GET_MODE_NUNITS (mode);
2647 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2648 unsigned step = nunits / 4;
2649 unsigned copies = 1;
2650
2651 /* Start with a vspltisw. */
2652 if (vspltis_constant (op, step, copies))
2653 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2654
2655 /* Then try with a vspltish. */
2656 if (step == 1)
2657 copies <<= 1;
2658 else
2659 step >>= 1;
2660
2661 if (vspltis_constant (op, step, copies))
2662 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2663
2664 /* And finally a vspltisb. */
2665 if (step == 1)
2666 copies <<= 1;
2667 else
2668 step >>= 1;
2669
2670 if (vspltis_constant (op, step, copies))
2671 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2672
2673 gcc_unreachable ();
d744e06e
AH
2674}
2675
2676const char *
a2369ed3 2677output_vec_const_move (rtx *operands)
d744e06e
AH
2678{
2679 int cst, cst2;
2680 enum machine_mode mode;
2681 rtx dest, vec;
2682
2683 dest = operands[0];
2684 vec = operands[1];
d744e06e 2685 mode = GET_MODE (dest);
69ef87e2 2686
d744e06e
AH
2687 if (TARGET_ALTIVEC)
2688 {
66180ff3 2689 rtx splat_vec;
d744e06e
AH
2690 if (zero_constant (vec, mode))
2691 return "vxor %0,%0,%0";
37409796 2692
66180ff3
PB
2693 splat_vec = gen_easy_altivec_constant (vec);
2694 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2695 operands[1] = XEXP (splat_vec, 0);
2696 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2697 return "#";
bb8df8a6 2698
66180ff3 2699 switch (GET_MODE (splat_vec))
98ef3137 2700 {
37409796 2701 case V4SImode:
66180ff3 2702 return "vspltisw %0,%1";
c4ad648e 2703
37409796 2704 case V8HImode:
66180ff3 2705 return "vspltish %0,%1";
c4ad648e 2706
37409796 2707 case V16QImode:
66180ff3 2708 return "vspltisb %0,%1";
bb8df8a6 2709
37409796
NS
2710 default:
2711 gcc_unreachable ();
98ef3137 2712 }
69ef87e2
AH
2713 }
2714
37409796 2715 gcc_assert (TARGET_SPE);
bb8df8a6 2716
37409796
NS
2717 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2718 pattern of V1DI, V4HI, and V2SF.
2719
2720 FIXME: We should probably return # and add post reload
2721 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2722 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2723 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2724 operands[1] = CONST_VECTOR_ELT (vec, 0);
2725 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2726 if (cst == cst2)
2727 return "li %0,%1\n\tevmergelo %0,%0,%0";
2728 else
2729 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2730}
2731
f5027409
RE
2732/* Initialize TARGET of vector PAIRED to VALS. */
2733
2734void
2735paired_expand_vector_init (rtx target, rtx vals)
2736{
2737 enum machine_mode mode = GET_MODE (target);
2738 int n_elts = GET_MODE_NUNITS (mode);
2739 int n_var = 0;
2740 rtx x, new, tmp, constant_op, op1, op2;
2741 int i;
2742
2743 for (i = 0; i < n_elts; ++i)
2744 {
2745 x = XVECEXP (vals, 0, i);
2746 if (!CONSTANT_P (x))
2747 ++n_var;
2748 }
2749 if (n_var == 0)
2750 {
2751 /* Load from constant pool. */
2752 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2753 return;
2754 }
2755
2756 if (n_var == 2)
2757 {
2758 /* The vector is initialized only with non-constants. */
2759 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2760 XVECEXP (vals, 0, 1));
2761
2762 emit_move_insn (target, new);
2763 return;
2764 }
2765
2766 /* One field is non-constant and the other one is a constant. Load the
2767 constant from the constant pool and use ps_merge instruction to
2768 construct the whole vector. */
2769 op1 = XVECEXP (vals, 0, 0);
2770 op2 = XVECEXP (vals, 0, 1);
2771
2772 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2773
2774 tmp = gen_reg_rtx (GET_MODE (constant_op));
2775 emit_move_insn (tmp, constant_op);
2776
2777 if (CONSTANT_P (op1))
2778 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2779 else
2780 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2781
2782 emit_move_insn (target, new);
2783}
2784
e2e95f45
RE
2785void
2786paired_expand_vector_move (rtx operands[])
2787{
2788 rtx op0 = operands[0], op1 = operands[1];
2789
2790 emit_move_insn (op0, op1);
2791}
2792
2793/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2794 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2795 operands for the relation operation COND. This is a recursive
2796 function. */
2797
2798static void
2799paired_emit_vector_compare (enum rtx_code rcode,
2800 rtx dest, rtx op0, rtx op1,
2801 rtx cc_op0, rtx cc_op1)
2802{
2803 rtx tmp = gen_reg_rtx (V2SFmode);
2804 rtx tmp1, max, min, equal_zero;
2805
2806 gcc_assert (TARGET_PAIRED_FLOAT);
2807 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2808
2809 switch (rcode)
2810 {
2811 case LT:
2812 case LTU:
2813 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2814 return;
2815 case GE:
2816 case GEU:
2817 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2818 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2819 return;
2820 case LE:
2821 case LEU:
2822 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2823 return;
2824 case GT:
2825 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2826 return;
2827 case EQ:
2828 tmp1 = gen_reg_rtx (V2SFmode);
2829 max = gen_reg_rtx (V2SFmode);
2830 min = gen_reg_rtx (V2SFmode);
2831 equal_zero = gen_reg_rtx (V2SFmode);
2832
2833 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2834 emit_insn (gen_selv2sf4
2835 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2836 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2837 emit_insn (gen_selv2sf4
2838 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2839 emit_insn (gen_subv2sf3 (tmp1, min, max));
2840 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2841 return;
2842 case NE:
2843 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2844 return;
2845 case UNLE:
2846 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2847 return;
2848 case UNLT:
2849 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2850 return;
2851 case UNGE:
2852 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2853 return;
2854 case UNGT:
2855 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2856 return;
2857 default:
2858 gcc_unreachable ();
2859 }
2860
2861 return;
2862}
2863
2864/* Emit vector conditional expression.
2865 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2866 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2867
2868int
2869paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2870 rtx cond, rtx cc_op0, rtx cc_op1)
2871{
2872 enum rtx_code rcode = GET_CODE (cond);
2873
2874 if (!TARGET_PAIRED_FLOAT)
2875 return 0;
2876
2877 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2878
2879 return 1;
2880}
2881
7a4eca66
DE
2882/* Initialize vector TARGET to VALS. */
2883
2884void
2885rs6000_expand_vector_init (rtx target, rtx vals)
2886{
2887 enum machine_mode mode = GET_MODE (target);
2888 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2889 int n_elts = GET_MODE_NUNITS (mode);
2890 int n_var = 0, one_var = -1;
2891 bool all_same = true, all_const_zero = true;
2892 rtx x, mem;
2893 int i;
2894
2895 for (i = 0; i < n_elts; ++i)
2896 {
2897 x = XVECEXP (vals, 0, i);
2898 if (!CONSTANT_P (x))
2899 ++n_var, one_var = i;
2900 else if (x != CONST0_RTX (inner_mode))
2901 all_const_zero = false;
2902
2903 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2904 all_same = false;
2905 }
2906
2907 if (n_var == 0)
2908 {
2909 if (mode != V4SFmode && all_const_zero)
2910 {
2911 /* Zero register. */
2912 emit_insn (gen_rtx_SET (VOIDmode, target,
2913 gen_rtx_XOR (mode, target, target)));
2914 return;
2915 }
66180ff3 2916 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2917 {
2918 /* Splat immediate. */
66180ff3 2919 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2920 return;
2921 }
2922 else if (all_same)
2923 ; /* Splat vector element. */
2924 else
2925 {
2926 /* Load from constant pool. */
2927 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2928 return;
2929 }
2930 }
2931
2932 /* Store value to stack temp. Load vector element. Splat. */
2933 if (all_same)
2934 {
2935 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2936 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2937 XVECEXP (vals, 0, 0));
2938 x = gen_rtx_UNSPEC (VOIDmode,
2939 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2940 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2941 gen_rtvec (2,
2942 gen_rtx_SET (VOIDmode,
2943 target, mem),
2944 x)));
2945 x = gen_rtx_VEC_SELECT (inner_mode, target,
2946 gen_rtx_PARALLEL (VOIDmode,
2947 gen_rtvec (1, const0_rtx)));
2948 emit_insn (gen_rtx_SET (VOIDmode, target,
2949 gen_rtx_VEC_DUPLICATE (mode, x)));
2950 return;
2951 }
2952
2953 /* One field is non-constant. Load constant then overwrite
2954 varying field. */
2955 if (n_var == 1)
2956 {
2957 rtx copy = copy_rtx (vals);
2958
57b51d4d 2959 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2960 varying element. */
2961 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2962 rs6000_expand_vector_init (target, copy);
2963
2964 /* Insert variable. */
2965 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2966 return;
2967 }
2968
2969 /* Construct the vector in memory one field at a time
2970 and load the whole vector. */
2971 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2972 for (i = 0; i < n_elts; i++)
2973 emit_move_insn (adjust_address_nv (mem, inner_mode,
2974 i * GET_MODE_SIZE (inner_mode)),
2975 XVECEXP (vals, 0, i));
2976 emit_move_insn (target, mem);
2977}
2978
2979/* Set field ELT of TARGET to VAL. */
2980
2981void
2982rs6000_expand_vector_set (rtx target, rtx val, int elt)
2983{
2984 enum machine_mode mode = GET_MODE (target);
2985 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2986 rtx reg = gen_reg_rtx (mode);
2987 rtx mask, mem, x;
2988 int width = GET_MODE_SIZE (inner_mode);
2989 int i;
2990
2991 /* Load single variable value. */
2992 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2993 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2994 x = gen_rtx_UNSPEC (VOIDmode,
2995 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2996 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2997 gen_rtvec (2,
2998 gen_rtx_SET (VOIDmode,
2999 reg, mem),
3000 x)));
3001
3002 /* Linear sequence. */
3003 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3004 for (i = 0; i < 16; ++i)
3005 XVECEXP (mask, 0, i) = GEN_INT (i);
3006
3007 /* Set permute mask to insert element into target. */
3008 for (i = 0; i < width; ++i)
3009 XVECEXP (mask, 0, elt*width + i)
3010 = GEN_INT (i + 0x10);
3011 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3012 x = gen_rtx_UNSPEC (mode,
3013 gen_rtvec (3, target, reg,
3014 force_reg (V16QImode, x)),
3015 UNSPEC_VPERM);
3016 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3017}
3018
3019/* Extract field ELT from VEC into TARGET. */
3020
3021void
3022rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3023{
3024 enum machine_mode mode = GET_MODE (vec);
3025 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3026 rtx mem, x;
3027
3028 /* Allocate mode-sized buffer. */
3029 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3030
3031 /* Add offset to field within buffer matching vector element. */
3032 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3033
3034 /* Store single field into mode-sized buffer. */
3035 x = gen_rtx_UNSPEC (VOIDmode,
3036 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3037 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3038 gen_rtvec (2,
3039 gen_rtx_SET (VOIDmode,
3040 mem, vec),
3041 x)));
3042 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3043}
3044
0ba1b2ff
AM
3045/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3046 implement ANDing by the mask IN. */
3047void
a2369ed3 3048build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3049{
3050#if HOST_BITS_PER_WIDE_INT >= 64
3051 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3052 int shift;
3053
37409796 3054 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3055
3056 c = INTVAL (in);
3057 if (c & 1)
3058 {
3059 /* Assume c initially something like 0x00fff000000fffff. The idea
3060 is to rotate the word so that the middle ^^^^^^ group of zeros
3061 is at the MS end and can be cleared with an rldicl mask. We then
3062 rotate back and clear off the MS ^^ group of zeros with a
3063 second rldicl. */
3064 c = ~c; /* c == 0xff000ffffff00000 */
3065 lsb = c & -c; /* lsb == 0x0000000000100000 */
3066 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3067 c = ~c; /* c == 0x00fff000000fffff */
3068 c &= -lsb; /* c == 0x00fff00000000000 */
3069 lsb = c & -c; /* lsb == 0x0000100000000000 */
3070 c = ~c; /* c == 0xff000fffffffffff */
3071 c &= -lsb; /* c == 0xff00000000000000 */
3072 shift = 0;
3073 while ((lsb >>= 1) != 0)
3074 shift++; /* shift == 44 on exit from loop */
3075 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3076 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3077 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3078 }
3079 else
0ba1b2ff
AM
3080 {
3081 /* Assume c initially something like 0xff000f0000000000. The idea
3082 is to rotate the word so that the ^^^ middle group of zeros
3083 is at the LS end and can be cleared with an rldicr mask. We then
3084 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3085 a second rldicr. */
3086 lsb = c & -c; /* lsb == 0x0000010000000000 */
3087 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3088 c = ~c; /* c == 0x00fff0ffffffffff */
3089 c &= -lsb; /* c == 0x00fff00000000000 */
3090 lsb = c & -c; /* lsb == 0x0000100000000000 */
3091 c = ~c; /* c == 0xff000fffffffffff */
3092 c &= -lsb; /* c == 0xff00000000000000 */
3093 shift = 0;
3094 while ((lsb >>= 1) != 0)
3095 shift++; /* shift == 44 on exit from loop */
3096 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3097 m1 >>= shift; /* m1 == 0x0000000000000fff */
3098 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3099 }
3100
3101 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3102 masks will be all 1's. We are guaranteed more than one transition. */
3103 out[0] = GEN_INT (64 - shift);
3104 out[1] = GEN_INT (m1);
3105 out[2] = GEN_INT (shift);
3106 out[3] = GEN_INT (m2);
3107#else
045572c7
GK
3108 (void)in;
3109 (void)out;
37409796 3110 gcc_unreachable ();
0ba1b2ff 3111#endif
a260abc9
DE
3112}
3113
54b695e7 3114/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3115
3116bool
54b695e7
AH
3117invalid_e500_subreg (rtx op, enum machine_mode mode)
3118{
61c76239
JM
3119 if (TARGET_E500_DOUBLE)
3120 {
17caeff2
JM
3121 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3122 subreg:TI and reg:TF. */
61c76239 3123 if (GET_CODE (op) == SUBREG
17caeff2 3124 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3125 && REG_P (SUBREG_REG (op))
17caeff2 3126 && (GET_MODE (SUBREG_REG (op)) == DFmode
4d4447b5
PB
3127 || GET_MODE (SUBREG_REG (op)) == TFmode
3128 || GET_MODE (SUBREG_REG (op)) == DDmode
3129 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3130 return true;
3131
17caeff2
JM
3132 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3133 reg:TI. */
61c76239 3134 if (GET_CODE (op) == SUBREG
4d4447b5
PB
3135 && (mode == DFmode || mode == TFmode
3136 || mode == DDmode || mode == TDmode)
61c76239 3137 && REG_P (SUBREG_REG (op))
17caeff2
JM
3138 && (GET_MODE (SUBREG_REG (op)) == DImode
3139 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3140 return true;
3141 }
54b695e7 3142
61c76239
JM
3143 if (TARGET_SPE
3144 && GET_CODE (op) == SUBREG
3145 && mode == SImode
54b695e7 3146 && REG_P (SUBREG_REG (op))
14502dad 3147 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3148 return true;
3149
3150 return false;
3151}
3152
58182de3 3153/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3154 field is an FP double while the FP fields remain word aligned. */
3155
19d66194 3156unsigned int
fa5b0972
AM
3157rs6000_special_round_type_align (tree type, unsigned int computed,
3158 unsigned int specified)
95727fb8 3159{
fa5b0972 3160 unsigned int align = MAX (computed, specified);
95727fb8 3161 tree field = TYPE_FIELDS (type);
95727fb8 3162
bb8df8a6 3163 /* Skip all non field decls */
85962ac8 3164 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3165 field = TREE_CHAIN (field);
3166
fa5b0972
AM
3167 if (field != NULL && field != type)
3168 {
3169 type = TREE_TYPE (field);
3170 while (TREE_CODE (type) == ARRAY_TYPE)
3171 type = TREE_TYPE (type);
3172
3173 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3174 align = MAX (align, 64);
3175 }
95727fb8 3176
fa5b0972 3177 return align;
95727fb8
AP
3178}
3179
58182de3
GK
3180/* Darwin increases record alignment to the natural alignment of
3181 the first field. */
3182
3183unsigned int
3184darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3185 unsigned int specified)
3186{
3187 unsigned int align = MAX (computed, specified);
3188
3189 if (TYPE_PACKED (type))
3190 return align;
3191
3192 /* Find the first field, looking down into aggregates. */
3193 do {
3194 tree field = TYPE_FIELDS (type);
3195 /* Skip all non field decls */
3196 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3197 field = TREE_CHAIN (field);
3198 if (! field)
3199 break;
3200 type = TREE_TYPE (field);
3201 while (TREE_CODE (type) == ARRAY_TYPE)
3202 type = TREE_TYPE (type);
3203 } while (AGGREGATE_TYPE_P (type));
3204
3205 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3206 align = MAX (align, TYPE_ALIGN (type));
3207
3208 return align;
3209}
3210
a4f6c312 3211/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3212
3213int
f676971a 3214small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3215 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3216{
38c1f2d7 3217#if TARGET_ELF
5f59ecb7 3218 rtx sym_ref;
7509c759 3219
d9407988 3220 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3221 return 0;
a54d04b7 3222
f607bc57 3223 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3224 return 0;
3225
88228c4b
MM
3226 if (GET_CODE (op) == SYMBOL_REF)
3227 sym_ref = op;
3228
3229 else if (GET_CODE (op) != CONST
3230 || GET_CODE (XEXP (op, 0)) != PLUS
3231 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3232 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3233 return 0;
3234
88228c4b 3235 else
dbf55e53
MM
3236 {
3237 rtx sum = XEXP (op, 0);
3238 HOST_WIDE_INT summand;
3239
3240 /* We have to be careful here, because it is the referenced address
c4ad648e 3241 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3242 summand = INTVAL (XEXP (sum, 1));
307b599c 3243 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3244 return 0;
dbf55e53
MM
3245
3246 sym_ref = XEXP (sum, 0);
3247 }
88228c4b 3248
20bfcd69 3249 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3250#else
3251 return 0;
3252#endif
7509c759 3253}
46c07df8 3254
3a1f863f 3255/* Return true if either operand is a general purpose register. */
46c07df8 3256
3a1f863f
DE
3257bool
3258gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3259{
3a1f863f
DE
3260 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3261 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3262}
3263
9ebbca7d 3264\f
4d588c14
RH
3265/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3266
f676971a
EC
3267static int
3268constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3269{
9390387d 3270 switch (GET_CODE (op))
9ebbca7d
GK
3271 {
3272 case SYMBOL_REF:
c4501e62
JJ
3273 if (RS6000_SYMBOL_REF_TLS_P (op))
3274 return 0;
3275 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3276 {
3277 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3278 {
3279 *have_sym = 1;
3280 return 1;
3281 }
3282 else
3283 return 0;
3284 }
3285 else if (! strcmp (XSTR (op, 0), toc_label_name))
3286 {
3287 *have_toc = 1;
3288 return 1;
3289 }
3290 else
3291 return 0;
9ebbca7d
GK
3292 case PLUS:
3293 case MINUS:
c1f11548
DE
3294 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3295 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3296 case CONST:
a4f6c312 3297 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3298 case CONST_INT:
a4f6c312 3299 return 1;
9ebbca7d 3300 default:
a4f6c312 3301 return 0;
9ebbca7d
GK
3302 }
3303}
3304
4d588c14 3305static bool
a2369ed3 3306constant_pool_expr_p (rtx op)
9ebbca7d
GK
3307{
3308 int have_sym = 0;
3309 int have_toc = 0;
3310 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3311}
3312
48d72335 3313bool
a2369ed3 3314toc_relative_expr_p (rtx op)
9ebbca7d 3315{
4d588c14
RH
3316 int have_sym = 0;
3317 int have_toc = 0;
3318 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3319}
3320
4d588c14 3321bool
a2369ed3 3322legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3323{
3324 return (TARGET_TOC
3325 && GET_CODE (x) == PLUS
3326 && GET_CODE (XEXP (x, 0)) == REG
3327 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3328 && constant_pool_expr_p (XEXP (x, 1)));
3329}
3330
d04b6e6e
EB
3331static bool
3332legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3333{
3334 return (DEFAULT_ABI == ABI_V4
3335 && !flag_pic && !TARGET_TOC
3336 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3337 && small_data_operand (x, mode));
3338}
3339
60cdabab
DE
3340/* SPE offset addressing is limited to 5-bits worth of double words. */
3341#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3342
76d2b81d
DJ
3343bool
3344rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3345{
3346 unsigned HOST_WIDE_INT offset, extra;
3347
3348 if (GET_CODE (x) != PLUS)
3349 return false;
3350 if (GET_CODE (XEXP (x, 0)) != REG)
3351 return false;
3352 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3353 return false;
60cdabab
DE
3354 if (legitimate_constant_pool_address_p (x))
3355 return true;
4d588c14
RH
3356 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3357 return false;
3358
3359 offset = INTVAL (XEXP (x, 1));
3360 extra = 0;
3361 switch (mode)
3362 {
3363 case V16QImode:
3364 case V8HImode:
3365 case V4SFmode:
3366 case V4SImode:
7a4eca66 3367 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3368 constant offset zero should not occur due to canonicalization. */
3369 return false;
4d588c14
RH
3370
3371 case V4HImode:
3372 case V2SImode:
3373 case V1DImode:
3374 case V2SFmode:
d42a3bae 3375 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3376 constant offset zero should not occur due to canonicalization. */
d42a3bae 3377 if (TARGET_PAIRED_FLOAT)
1a23970d 3378 return false;
4d588c14
RH
3379 /* SPE vector modes. */
3380 return SPE_CONST_OFFSET_OK (offset);
3381
3382 case DFmode:
7393f7f8 3383 case DDmode:
4d4cbc0e
AH
3384 if (TARGET_E500_DOUBLE)
3385 return SPE_CONST_OFFSET_OK (offset);
3386
4d588c14 3387 case DImode:
54b695e7
AH
3388 /* On e500v2, we may have:
3389
3390 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3391
3392 Which gets addressed with evldd instructions. */
3393 if (TARGET_E500_DOUBLE)
3394 return SPE_CONST_OFFSET_OK (offset);
3395
7393f7f8 3396 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3397 extra = 4;
3398 else if (offset & 3)
3399 return false;
3400 break;
3401
3402 case TFmode:
4d4447b5 3403 case TDmode:
17caeff2
JM
3404 if (TARGET_E500_DOUBLE)
3405 return (SPE_CONST_OFFSET_OK (offset)
3406 && SPE_CONST_OFFSET_OK (offset + 8));
3407
4d588c14 3408 case TImode:
7393f7f8 3409 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3410 extra = 12;
3411 else if (offset & 3)
3412 return false;
3413 else
3414 extra = 8;
3415 break;
3416
3417 default:
3418 break;
3419 }
3420
b1917422
AM
3421 offset += 0x8000;
3422 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3423}
3424
6fb5fa3c 3425bool
a2369ed3 3426legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3427{
3428 rtx op0, op1;
3429
3430 if (GET_CODE (x) != PLUS)
3431 return false;
850e8d3d 3432
4d588c14
RH
3433 op0 = XEXP (x, 0);
3434 op1 = XEXP (x, 1);
3435
bf00cc0f 3436 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3437 replaced with proper base and index regs. */
3438 if (!strict
3439 && reload_in_progress
3440 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3441 && REG_P (op1))
3442 return true;
3443
3444 return (REG_P (op0) && REG_P (op1)
3445 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3446 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3447 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3448 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3449}
3450
48d72335 3451inline bool
a2369ed3 3452legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3453{
3454 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3455}
3456
48d72335 3457bool
4c81e946
FJ
3458macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3459{
c4ad648e 3460 if (!TARGET_MACHO || !flag_pic
9390387d 3461 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3462 return false;
3463 x = XEXP (x, 0);
4c81e946
FJ
3464
3465 if (GET_CODE (x) != LO_SUM)
3466 return false;
3467 if (GET_CODE (XEXP (x, 0)) != REG)
3468 return false;
3469 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3470 return false;
3471 x = XEXP (x, 1);
3472
3473 return CONSTANT_P (x);
3474}
3475
4d588c14 3476static bool
a2369ed3 3477legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3478{
3479 if (GET_CODE (x) != LO_SUM)
3480 return false;
3481 if (GET_CODE (XEXP (x, 0)) != REG)
3482 return false;
3483 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3484 return false;
54b695e7 3485 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3486 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3487 || mode == DDmode || mode == TDmode
17caeff2 3488 || mode == DImode))
f82f556d 3489 return false;
4d588c14
RH
3490 x = XEXP (x, 1);
3491
8622e235 3492 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3493 {
a29077da 3494 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3495 return false;
3496 if (TARGET_TOC)
3497 return false;
3498 if (GET_MODE_NUNITS (mode) != 1)
3499 return false;
5e5f01b9 3500 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3501 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3502 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3503 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3504 return false;
3505
3506 return CONSTANT_P (x);
3507 }
3508
3509 return false;
3510}
3511
3512
9ebbca7d
GK
3513/* Try machine-dependent ways of modifying an illegitimate address
3514 to be legitimate. If we find one, return the new, valid address.
3515 This is used from only one place: `memory_address' in explow.c.
3516
a4f6c312
SS
3517 OLDX is the address as it was before break_out_memory_refs was
3518 called. In some cases it is useful to look at this to decide what
3519 needs to be done.
9ebbca7d 3520
a4f6c312 3521 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3522
a4f6c312
SS
3523 It is always safe for this function to do nothing. It exists to
3524 recognize opportunities to optimize the output.
9ebbca7d
GK
3525
3526 On RS/6000, first check for the sum of a register with a constant
3527 integer that is out of range. If so, generate code to add the
3528 constant with the low-order 16 bits masked to the register and force
3529 this result into another register (this can be done with `cau').
3530 Then generate an address of REG+(CONST&0xffff), allowing for the
3531 possibility of bit 16 being a one.
3532
3533 Then check for the sum of a register and something not constant, try to
3534 load the other things into a register and return the sum. */
4d588c14 3535
9ebbca7d 3536rtx
a2369ed3
DJ
3537rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3538 enum machine_mode mode)
0ac081f6 3539{
c4501e62
JJ
3540 if (GET_CODE (x) == SYMBOL_REF)
3541 {
3542 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3543 if (model != 0)
3544 return rs6000_legitimize_tls_address (x, model);
3545 }
3546
f676971a 3547 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3548 && GET_CODE (XEXP (x, 0)) == REG
3549 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3550 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3551 && !(SPE_VECTOR_MODE (mode)
3552 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3553 || mode == DImode))))
f676971a 3554 {
9ebbca7d
GK
3555 HOST_WIDE_INT high_int, low_int;
3556 rtx sum;
a65c591c
DE
3557 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3558 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3559 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3560 GEN_INT (high_int)), 0);
3561 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3562 }
f676971a 3563 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3564 && GET_CODE (XEXP (x, 0)) == REG
3565 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3566 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3567 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3568 || TARGET_POWERPC64
7393f7f8
BE
3569 || (((mode != DImode && mode != DFmode && mode != DDmode)
3570 || TARGET_E500_DOUBLE)
3571 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3572 && (TARGET_POWERPC64 || mode != DImode)
3573 && mode != TImode)
3574 {
3575 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3576 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3577 }
0ac081f6
AH
3578 else if (ALTIVEC_VECTOR_MODE (mode))
3579 {
3580 rtx reg;
3581
3582 /* Make sure both operands are registers. */
3583 if (GET_CODE (x) == PLUS)
9f85ed45 3584 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3585 force_reg (Pmode, XEXP (x, 1)));
3586
3587 reg = force_reg (Pmode, x);
3588 return reg;
3589 }
4d4cbc0e 3590 else if (SPE_VECTOR_MODE (mode)
17caeff2 3591 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3592 || mode == DDmode || mode == TDmode
54b695e7 3593 || mode == DImode)))
a3170dc6 3594 {
54b695e7
AH
3595 if (mode == DImode)
3596 return NULL_RTX;
a3170dc6
AH
3597 /* We accept [reg + reg] and [reg + OFFSET]. */
3598
3599 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3600 {
3601 rtx op1 = XEXP (x, 0);
3602 rtx op2 = XEXP (x, 1);
a3170dc6 3603
c4ad648e 3604 op1 = force_reg (Pmode, op1);
a3170dc6 3605
c4ad648e
AM
3606 if (GET_CODE (op2) != REG
3607 && (GET_CODE (op2) != CONST_INT
3608 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3609 op2 = force_reg (Pmode, op2);
a3170dc6 3610
c4ad648e
AM
3611 return gen_rtx_PLUS (Pmode, op1, op2);
3612 }
a3170dc6
AH
3613
3614 return force_reg (Pmode, x);
3615 }
f1384257
AM
3616 else if (TARGET_ELF
3617 && TARGET_32BIT
3618 && TARGET_NO_TOC
3619 && ! flag_pic
9ebbca7d 3620 && GET_CODE (x) != CONST_INT
f676971a 3621 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3622 && CONSTANT_P (x)
6ac7bf2c
GK
3623 && GET_MODE_NUNITS (mode) == 1
3624 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3625 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3626 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3627 {
3628 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3629 emit_insn (gen_elf_high (reg, x));
3630 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3631 }
ee890fe2
SS
3632 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3633 && ! flag_pic
ab82a49f
AP
3634#if TARGET_MACHO
3635 && ! MACHO_DYNAMIC_NO_PIC_P
3636#endif
ee890fe2 3637 && GET_CODE (x) != CONST_INT
f676971a 3638 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3639 && CONSTANT_P (x)
4d4447b5
PB
3640 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3641 || (mode != DFmode && mode != DDmode))
f676971a 3642 && mode != DImode
ee890fe2
SS
3643 && mode != TImode)
3644 {
3645 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3646 emit_insn (gen_macho_high (reg, x));
3647 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3648 }
f676971a 3649 else if (TARGET_TOC
4d588c14 3650 && constant_pool_expr_p (x)
a9098fd0 3651 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3652 {
3653 return create_TOC_reference (x);
3654 }
3655 else
3656 return NULL_RTX;
3657}
258bfae2 3658
fdbe66f2 3659/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3660 We need to emit DTP-relative relocations. */
3661
fdbe66f2 3662static void
c973d557
JJ
3663rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3664{
3665 switch (size)
3666 {
3667 case 4:
3668 fputs ("\t.long\t", file);
3669 break;
3670 case 8:
3671 fputs (DOUBLE_INT_ASM_OP, file);
3672 break;
3673 default:
37409796 3674 gcc_unreachable ();
c973d557
JJ
3675 }
3676 output_addr_const (file, x);
3677 fputs ("@dtprel+0x8000", file);
3678}
3679
c4501e62
JJ
3680/* Construct the SYMBOL_REF for the tls_get_addr function. */
3681
3682static GTY(()) rtx rs6000_tls_symbol;
3683static rtx
863d938c 3684rs6000_tls_get_addr (void)
c4501e62
JJ
3685{
3686 if (!rs6000_tls_symbol)
3687 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3688
3689 return rs6000_tls_symbol;
3690}
3691
3692/* Construct the SYMBOL_REF for TLS GOT references. */
3693
3694static GTY(()) rtx rs6000_got_symbol;
3695static rtx
863d938c 3696rs6000_got_sym (void)
c4501e62
JJ
3697{
3698 if (!rs6000_got_symbol)
3699 {
3700 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3701 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3702 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3703 }
c4501e62
JJ
3704
3705 return rs6000_got_symbol;
3706}
3707
3708/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3709 this (thread-local) address. */
3710
3711static rtx
a2369ed3 3712rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3713{
3714 rtx dest, insn;
3715
3716 dest = gen_reg_rtx (Pmode);
3717 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3718 {
3719 rtx tlsreg;
3720
3721 if (TARGET_64BIT)
3722 {
3723 tlsreg = gen_rtx_REG (Pmode, 13);
3724 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3725 }
3726 else
3727 {
3728 tlsreg = gen_rtx_REG (Pmode, 2);
3729 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3730 }
3731 emit_insn (insn);
3732 }
3733 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3734 {
3735 rtx tlsreg, tmp;
3736
3737 tmp = gen_reg_rtx (Pmode);
3738 if (TARGET_64BIT)
3739 {
3740 tlsreg = gen_rtx_REG (Pmode, 13);
3741 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3742 }
3743 else
3744 {
3745 tlsreg = gen_rtx_REG (Pmode, 2);
3746 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3747 }
3748 emit_insn (insn);
3749 if (TARGET_64BIT)
3750 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3751 else
3752 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3753 emit_insn (insn);
3754 }
3755 else
3756 {
3757 rtx r3, got, tga, tmp1, tmp2, eqv;
3758
4fed8f8f
AM
3759 /* We currently use relocations like @got@tlsgd for tls, which
3760 means the linker will handle allocation of tls entries, placing
3761 them in the .got section. So use a pointer to the .got section,
3762 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3763 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3764 if (TARGET_64BIT)
972f427b 3765 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3766 else
3767 {
3768 if (flag_pic == 1)
3769 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3770 else
3771 {
3772 rtx gsym = rs6000_got_sym ();
3773 got = gen_reg_rtx (Pmode);
3774 if (flag_pic == 0)
3775 rs6000_emit_move (got, gsym, Pmode);
3776 else
3777 {
e65a3857 3778 rtx tmp3, mem;
c4501e62
JJ
3779 rtx first, last;
3780
c4501e62
JJ
3781 tmp1 = gen_reg_rtx (Pmode);
3782 tmp2 = gen_reg_rtx (Pmode);
3783 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3784 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3785
e65a3857
DE
3786 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3787 emit_move_insn (tmp1,
1de43f85 3788 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3789 emit_move_insn (tmp2, mem);
3790 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3791 last = emit_move_insn (got, tmp3);
bd94cb6e 3792 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3793 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3794 }
3795 }
3796 }
3797
3798 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3799 {
3800 r3 = gen_rtx_REG (Pmode, 3);
3801 if (TARGET_64BIT)
3802 insn = gen_tls_gd_64 (r3, got, addr);
3803 else
3804 insn = gen_tls_gd_32 (r3, got, addr);
3805 start_sequence ();
3806 emit_insn (insn);
3807 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3808 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3809 insn = emit_call_insn (insn);
3810 CONST_OR_PURE_CALL_P (insn) = 1;
3811 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3812 insn = get_insns ();
3813 end_sequence ();
3814 emit_libcall_block (insn, dest, r3, addr);
3815 }
3816 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3817 {
3818 r3 = gen_rtx_REG (Pmode, 3);
3819 if (TARGET_64BIT)
3820 insn = gen_tls_ld_64 (r3, got);
3821 else
3822 insn = gen_tls_ld_32 (r3, got);
3823 start_sequence ();
3824 emit_insn (insn);
3825 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3826 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3827 insn = emit_call_insn (insn);
3828 CONST_OR_PURE_CALL_P (insn) = 1;
3829 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3830 insn = get_insns ();
3831 end_sequence ();
3832 tmp1 = gen_reg_rtx (Pmode);
3833 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3834 UNSPEC_TLSLD);
3835 emit_libcall_block (insn, tmp1, r3, eqv);
3836 if (rs6000_tls_size == 16)
3837 {
3838 if (TARGET_64BIT)
3839 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3840 else
3841 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3842 }
3843 else if (rs6000_tls_size == 32)
3844 {
3845 tmp2 = gen_reg_rtx (Pmode);
3846 if (TARGET_64BIT)
3847 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3848 else
3849 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3850 emit_insn (insn);
3851 if (TARGET_64BIT)
3852 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3853 else
3854 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3855 }
3856 else
3857 {
3858 tmp2 = gen_reg_rtx (Pmode);
3859 if (TARGET_64BIT)
3860 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3861 else
3862 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3863 emit_insn (insn);
3864 insn = gen_rtx_SET (Pmode, dest,
3865 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3866 }
3867 emit_insn (insn);
3868 }
3869 else
3870 {
a7b376ee 3871 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3872 tmp2 = gen_reg_rtx (Pmode);
3873 if (TARGET_64BIT)
3874 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3875 else
3876 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3877 emit_insn (insn);
3878 if (TARGET_64BIT)
3879 insn = gen_tls_tls_64 (dest, tmp2, addr);
3880 else
3881 insn = gen_tls_tls_32 (dest, tmp2, addr);
3882 emit_insn (insn);
3883 }
3884 }
3885
3886 return dest;
3887}
3888
c4501e62
JJ
3889/* Return 1 if X contains a thread-local symbol. */
3890
3891bool
a2369ed3 3892rs6000_tls_referenced_p (rtx x)
c4501e62 3893{
cd413cab
AP
3894 if (! TARGET_HAVE_TLS)
3895 return false;
3896
c4501e62
JJ
3897 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3898}
3899
3900/* Return 1 if *X is a thread-local symbol. This is the same as
3901 rs6000_tls_symbol_ref except for the type of the unused argument. */
3902
9390387d 3903static int
a2369ed3 3904rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3905{
3906 return RS6000_SYMBOL_REF_TLS_P (*x);
3907}
3908
24ea750e
DJ
3909/* The convention appears to be to define this wherever it is used.
3910 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3911 is now used here. */
3912#ifndef REG_MODE_OK_FOR_BASE_P
3913#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3914#endif
3915
3916/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3917 replace the input X, or the original X if no replacement is called for.
3918 The output parameter *WIN is 1 if the calling macro should goto WIN,
3919 0 if it should not.
3920
3921 For RS/6000, we wish to handle large displacements off a base
3922 register by splitting the addend across an addiu/addis and the mem insn.
3923 This cuts number of extra insns needed from 3 to 1.
3924
3925 On Darwin, we use this to generate code for floating point constants.
3926 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3927 The Darwin code is inside #if TARGET_MACHO because only then is
3928 machopic_function_base_name() defined. */
3929rtx
f676971a 3930rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3931 int opnum, int type,
3932 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3933{
f676971a 3934 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3935 if (GET_CODE (x) == PLUS
3936 && GET_CODE (XEXP (x, 0)) == PLUS
3937 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3938 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3939 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3940 {
3941 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3942 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3943 opnum, (enum reload_type)type);
24ea750e
DJ
3944 *win = 1;
3945 return x;
3946 }
3deb2758 3947
24ea750e
DJ
3948#if TARGET_MACHO
3949 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3950 && GET_CODE (x) == LO_SUM
3951 && GET_CODE (XEXP (x, 0)) == PLUS
3952 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3953 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3954 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3955 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3956 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3957 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3958 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3959 {
3960 /* Result of previous invocation of this function on Darwin
6f317ef3 3961 floating point constant. */
24ea750e 3962 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3963 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3964 opnum, (enum reload_type)type);
24ea750e
DJ
3965 *win = 1;
3966 return x;
3967 }
3968#endif
4937d02d
DE
3969
3970 /* Force ld/std non-word aligned offset into base register by wrapping
3971 in offset 0. */
3972 if (GET_CODE (x) == PLUS
3973 && GET_CODE (XEXP (x, 0)) == REG
3974 && REGNO (XEXP (x, 0)) < 32
3975 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3976 && GET_CODE (XEXP (x, 1)) == CONST_INT
3977 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3978 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3979 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3980 && TARGET_POWERPC64)
3981 {
3982 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3983 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3984 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3985 opnum, (enum reload_type) type);
3986 *win = 1;
3987 return x;
3988 }
3989
24ea750e
DJ
3990 if (GET_CODE (x) == PLUS
3991 && GET_CODE (XEXP (x, 0)) == REG
3992 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3993 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3994 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3995 && !SPE_VECTOR_MODE (mode)
17caeff2 3996 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3997 || mode == DDmode || mode == TDmode
54b695e7 3998 || mode == DImode))
78c875e8 3999 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4000 {
4001 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4002 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4003 HOST_WIDE_INT high
c4ad648e 4004 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4005
4006 /* Check for 32-bit overflow. */
4007 if (high + low != val)
c4ad648e 4008 {
24ea750e
DJ
4009 *win = 0;
4010 return x;
4011 }
4012
4013 /* Reload the high part into a base reg; leave the low part
c4ad648e 4014 in the mem directly. */
24ea750e
DJ
4015
4016 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4017 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4018 GEN_INT (high)),
4019 GEN_INT (low));
24ea750e
DJ
4020
4021 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4022 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4023 opnum, (enum reload_type)type);
24ea750e
DJ
4024 *win = 1;
4025 return x;
4026 }
4937d02d 4027
24ea750e 4028 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4029 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4030 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4031#if TARGET_MACHO
4032 && DEFAULT_ABI == ABI_DARWIN
a29077da 4033 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4034#else
4035 && DEFAULT_ABI == ABI_V4
4036 && !flag_pic
4037#endif
7393f7f8 4038 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4039 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4040 without fprs. */
0d8c1c97 4041 && mode != TFmode
7393f7f8 4042 && mode != TDmode
7b5d92b2 4043 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4044 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4045 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4046 {
8308679f 4047#if TARGET_MACHO
a29077da
GK
4048 if (flag_pic)
4049 {
4050 rtx offset = gen_rtx_CONST (Pmode,
4051 gen_rtx_MINUS (Pmode, x,
11abc112 4052 machopic_function_base_sym ()));
a29077da
GK
4053 x = gen_rtx_LO_SUM (GET_MODE (x),
4054 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4055 gen_rtx_HIGH (Pmode, offset)), offset);
4056 }
4057 else
8308679f 4058#endif
a29077da 4059 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4060 gen_rtx_HIGH (Pmode, x), x);
a29077da 4061
24ea750e 4062 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4063 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4064 opnum, (enum reload_type)type);
24ea750e
DJ
4065 *win = 1;
4066 return x;
4067 }
4937d02d 4068
dec1f3aa
DE
4069 /* Reload an offset address wrapped by an AND that represents the
4070 masking of the lower bits. Strip the outer AND and let reload
4071 convert the offset address into an indirect address. */
4072 if (TARGET_ALTIVEC
4073 && ALTIVEC_VECTOR_MODE (mode)
4074 && GET_CODE (x) == AND
4075 && GET_CODE (XEXP (x, 0)) == PLUS
4076 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4077 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4078 && GET_CODE (XEXP (x, 1)) == CONST_INT
4079 && INTVAL (XEXP (x, 1)) == -16)
4080 {
4081 x = XEXP (x, 0);
4082 *win = 1;
4083 return x;
4084 }
4085
24ea750e 4086 if (TARGET_TOC
4d588c14 4087 && constant_pool_expr_p (x)
c1f11548 4088 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4089 {
194c524a 4090 x = create_TOC_reference (x);
24ea750e
DJ
4091 *win = 1;
4092 return x;
4093 }
4094 *win = 0;
4095 return x;
f676971a 4096}
24ea750e 4097
258bfae2
FS
4098/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4099 that is a valid memory address for an instruction.
4100 The MODE argument is the machine mode for the MEM expression
4101 that wants to use this address.
4102
4103 On the RS/6000, there are four valid address: a SYMBOL_REF that
4104 refers to a constant pool entry of an address (or the sum of it
4105 plus a constant), a short (16-bit signed) constant plus a register,
4106 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4107 auto-increment. For DFmode, DDmode and DImode with a constant plus
4108 register, we must ensure that both words are addressable or PowerPC64
4109 with offset word aligned.
258bfae2 4110
4d4447b5 4111 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4112 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4113 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4114 during assembly output. */
4115int
a2369ed3 4116rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4117{
850e8d3d
DN
4118 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4119 if (TARGET_ALTIVEC
4120 && ALTIVEC_VECTOR_MODE (mode)
4121 && GET_CODE (x) == AND
4122 && GET_CODE (XEXP (x, 1)) == CONST_INT
4123 && INTVAL (XEXP (x, 1)) == -16)
4124 x = XEXP (x, 0);
4125
c4501e62
JJ
4126 if (RS6000_SYMBOL_REF_TLS_P (x))
4127 return 0;
4d588c14 4128 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4129 return 1;
4130 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4131 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4132 && !SPE_VECTOR_MODE (mode)
429ec7dc 4133 && mode != TFmode
7393f7f8 4134 && mode != TDmode
54b695e7 4135 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4136 && !(TARGET_E500_DOUBLE
4137 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4138 && TARGET_UPDATE
4d588c14 4139 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4140 return 1;
d04b6e6e 4141 if (legitimate_small_data_p (mode, x))
258bfae2 4142 return 1;
4d588c14 4143 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4144 return 1;
4145 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4146 if (! reg_ok_strict
4147 && GET_CODE (x) == PLUS
4148 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4149 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4150 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4151 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4152 return 1;
76d2b81d 4153 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4154 return 1;
4155 if (mode != TImode
76d2b81d 4156 && mode != TFmode
7393f7f8 4157 && mode != TDmode
a3170dc6
AH
4158 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4159 || TARGET_POWERPC64
4d4447b5 4160 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
258bfae2 4161 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4162 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4163 return 1;
6fb5fa3c
DB
4164 if (GET_CODE (x) == PRE_MODIFY
4165 && mode != TImode
4166 && mode != TFmode
4167 && mode != TDmode
4168 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4169 || TARGET_POWERPC64
4d4447b5 4170 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4171 && (TARGET_POWERPC64 || mode != DImode)
4172 && !ALTIVEC_VECTOR_MODE (mode)
4173 && !SPE_VECTOR_MODE (mode)
4174 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4175 && !(TARGET_E500_DOUBLE
4176 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4177 && TARGET_UPDATE
4178 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4179 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4180 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4181 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4182 return 1;
4d588c14 4183 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4184 return 1;
4185 return 0;
4186}
4d588c14
RH
4187
4188/* Go to LABEL if ADDR (a legitimate address expression)
4189 has an effect that depends on the machine mode it is used for.
4190
4191 On the RS/6000 this is true of all integral offsets (since AltiVec
4192 modes don't allow them) or is a pre-increment or decrement.
4193
4194 ??? Except that due to conceptual problems in offsettable_address_p
4195 we can't really report the problems of integral offsets. So leave
f676971a 4196 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4197 sub-words of a TFmode operand, which is what we had before. */
4198
4199bool
a2369ed3 4200rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4201{
4202 switch (GET_CODE (addr))
4203 {
4204 case PLUS:
4205 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4206 {
4207 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4208 return val + 12 + 0x8000 >= 0x10000;
4209 }
4210 break;
4211
4212 case LO_SUM:
4213 return true;
4214
6fb5fa3c
DB
4215 case PRE_INC:
4216 case PRE_DEC:
4217 case PRE_MODIFY:
4218 return TARGET_UPDATE;
4d588c14
RH
4219
4220 default:
4221 break;
4222 }
4223
4224 return false;
4225}
d8ecbcdb 4226
d04b6e6e
EB
4227/* More elaborate version of recog's offsettable_memref_p predicate
4228 that works around the ??? note of rs6000_mode_dependent_address.
4229 In particular it accepts
4230
4231 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4232
4233 in 32-bit mode, that the recog predicate rejects. */
4234
4235bool
4236rs6000_offsettable_memref_p (rtx op)
4237{
4238 if (!MEM_P (op))
4239 return false;
4240
4241 /* First mimic offsettable_memref_p. */
4242 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4243 return true;
4244
4245 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4246 the latter predicate knows nothing about the mode of the memory
4247 reference and, therefore, assumes that it is the largest supported
4248 mode (TFmode). As a consequence, legitimate offsettable memory
4249 references are rejected. rs6000_legitimate_offset_address_p contains
4250 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4251 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4252}
4253
d8ecbcdb
AH
4254/* Return number of consecutive hard regs needed starting at reg REGNO
4255 to hold something of mode MODE.
4256 This is ordinarily the length in words of a value of mode MODE
4257 but can be less for certain modes in special long registers.
4258
4259 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4260 scalar instructions. The upper 32 bits are only available to the
4261 SIMD instructions.
4262
4263 POWER and PowerPC GPRs hold 32 bits worth;
4264 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4265
4266int
4267rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4268{
4269 if (FP_REGNO_P (regno))
4270 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4271
4272 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4273 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4274
4275 if (ALTIVEC_REGNO_P (regno))
4276 return
4277 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4278
8521c414
JM
4279 /* The value returned for SCmode in the E500 double case is 2 for
4280 ABI compatibility; storing an SCmode value in a single register
4281 would require function_arg and rs6000_spe_function_arg to handle
4282 SCmode so as to pass the value correctly in a pair of
4283 registers. */
4284 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4285 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4286
d8ecbcdb
AH
4287 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4288}
2aa4498c
AH
4289
4290/* Change register usage conditional on target flags. */
4291void
4292rs6000_conditional_register_usage (void)
4293{
4294 int i;
4295
4296 /* Set MQ register fixed (already call_used) if not POWER
4297 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4298 be allocated. */
4299 if (! TARGET_POWER)
4300 fixed_regs[64] = 1;
4301
7c9ac5c0 4302 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4303 if (TARGET_64BIT)
4304 fixed_regs[13] = call_used_regs[13]
4305 = call_really_used_regs[13] = 1;
4306
4307 /* Conditionally disable FPRs. */
4308 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4309 for (i = 32; i < 64; i++)
4310 fixed_regs[i] = call_used_regs[i]
c4ad648e 4311 = call_really_used_regs[i] = 1;
2aa4498c 4312
7c9ac5c0
PH
4313 /* The TOC register is not killed across calls in a way that is
4314 visible to the compiler. */
4315 if (DEFAULT_ABI == ABI_AIX)
4316 call_really_used_regs[2] = 0;
4317
2aa4498c
AH
4318 if (DEFAULT_ABI == ABI_V4
4319 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4320 && flag_pic == 2)
4321 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4322
4323 if (DEFAULT_ABI == ABI_V4
4324 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4325 && flag_pic == 1)
4326 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4327 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4328 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4329
4330 if (DEFAULT_ABI == ABI_DARWIN
4331 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4332 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4333 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4334 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4335
b4db40bf
JJ
4336 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4337 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4338 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4339
2aa4498c
AH
4340 if (TARGET_SPE)
4341 {
4342 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4343 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4344 registers in prologues and epilogues. We no longer use r14
4345 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4346 pool for link-compatibility with older versions of GCC. Once
4347 "old" code has died out, we can return r14 to the allocation
4348 pool. */
4349 fixed_regs[14]
4350 = call_used_regs[14]
4351 = call_really_used_regs[14] = 1;
2aa4498c
AH
4352 }
4353
0db747be 4354 if (!TARGET_ALTIVEC)
2aa4498c
AH
4355 {
4356 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4357 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4358 call_really_used_regs[VRSAVE_REGNO] = 1;
4359 }
4360
0db747be
DE
4361 if (TARGET_ALTIVEC)
4362 global_regs[VSCR_REGNO] = 1;
4363
2aa4498c 4364 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4365 {
4366 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4367 call_used_regs[i] = call_really_used_regs[i] = 1;
4368
4369 /* AIX reserves VR20:31 in non-extended ABI mode. */
4370 if (TARGET_XCOFF)
4371 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4372 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4373 }
2aa4498c 4374}
fb4d4348 4375\f
a4f6c312
SS
4376/* Try to output insns to set TARGET equal to the constant C if it can
4377 be done in less than N insns. Do all computations in MODE.
4378 Returns the place where the output has been placed if it can be
4379 done and the insns have been emitted. If it would take more than N
4380 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4381
4382rtx
f676971a 4383rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4384 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4385{
af8cb5c5 4386 rtx result, insn, set;
2bfcf297
DB
4387 HOST_WIDE_INT c0, c1;
4388
37409796 4389 switch (mode)
2bfcf297 4390 {
37409796
NS
4391 case QImode:
4392 case HImode:
2bfcf297 4393 if (dest == NULL)
c4ad648e 4394 dest = gen_reg_rtx (mode);
2bfcf297
DB
4395 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4396 return dest;
bb8df8a6 4397
37409796 4398 case SImode:
b3a13419 4399 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4400
d448860e 4401 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4402 GEN_INT (INTVAL (source)
4403 & (~ (HOST_WIDE_INT) 0xffff))));
4404 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4405 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4406 GEN_INT (INTVAL (source) & 0xffff))));
4407 result = dest;
37409796
NS
4408 break;
4409
4410 case DImode:
4411 switch (GET_CODE (source))
af8cb5c5 4412 {
37409796 4413 case CONST_INT:
af8cb5c5
DE
4414 c0 = INTVAL (source);
4415 c1 = -(c0 < 0);
37409796 4416 break;
bb8df8a6 4417
37409796 4418 case CONST_DOUBLE:
2bfcf297 4419#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4420 c0 = CONST_DOUBLE_LOW (source);
4421 c1 = -(c0 < 0);
2bfcf297 4422#else
af8cb5c5
DE
4423 c0 = CONST_DOUBLE_LOW (source);
4424 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4425#endif
37409796
NS
4426 break;
4427
4428 default:
4429 gcc_unreachable ();
af8cb5c5 4430 }
af8cb5c5
DE
4431
4432 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4433 break;
4434
4435 default:
4436 gcc_unreachable ();
2bfcf297 4437 }
2bfcf297 4438
af8cb5c5
DE
4439 insn = get_last_insn ();
4440 set = single_set (insn);
4441 if (! CONSTANT_P (SET_SRC (set)))
4442 set_unique_reg_note (insn, REG_EQUAL, source);
4443
4444 return result;
2bfcf297
DB
4445}
4446
4447/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4448 fall back to a straight forward decomposition. We do this to avoid
4449 exponential run times encountered when looking for longer sequences
4450 with rs6000_emit_set_const. */
4451static rtx
a2369ed3 4452rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4453{
4454 if (!TARGET_POWERPC64)
4455 {
4456 rtx operand1, operand2;
4457
4458 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4459 DImode);
d448860e 4460 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4461 DImode);
4462 emit_move_insn (operand1, GEN_INT (c1));
4463 emit_move_insn (operand2, GEN_INT (c2));
4464 }
4465 else
4466 {
bc06712d 4467 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4468
bc06712d 4469 ud1 = c1 & 0xffff;
f921c9c9 4470 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4471#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4472 c2 = c1 >> 32;
2bfcf297 4473#endif
bc06712d 4474 ud3 = c2 & 0xffff;
f921c9c9 4475 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4476
f676971a 4477 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4478 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4479 {
bc06712d 4480 if (ud1 & 0x8000)
b78d48dd 4481 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4482 else
4483 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4484 }
2bfcf297 4485
f676971a 4486 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4487 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4488 {
bc06712d 4489 if (ud2 & 0x8000)
f676971a 4490 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4491 - 0x80000000));
252b88f7 4492 else
bc06712d
TR
4493 emit_move_insn (dest, GEN_INT (ud2 << 16));
4494 if (ud1 != 0)
d448860e
JH
4495 emit_move_insn (copy_rtx (dest),
4496 gen_rtx_IOR (DImode, copy_rtx (dest),
4497 GEN_INT (ud1)));
252b88f7 4498 }
f676971a 4499 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4500 || (ud4 == 0 && ! (ud3 & 0x8000)))
4501 {
4502 if (ud3 & 0x8000)
f676971a 4503 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4504 - 0x80000000));
4505 else
4506 emit_move_insn (dest, GEN_INT (ud3 << 16));
4507
4508 if (ud2 != 0)
d448860e
JH
4509 emit_move_insn (copy_rtx (dest),
4510 gen_rtx_IOR (DImode, copy_rtx (dest),
4511 GEN_INT (ud2)));
4512 emit_move_insn (copy_rtx (dest),
4513 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4514 GEN_INT (16)));
bc06712d 4515 if (ud1 != 0)
d448860e
JH
4516 emit_move_insn (copy_rtx (dest),
4517 gen_rtx_IOR (DImode, copy_rtx (dest),
4518 GEN_INT (ud1)));
bc06712d 4519 }
f676971a 4520 else
bc06712d
TR
4521 {
4522 if (ud4 & 0x8000)
f676971a 4523 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4524 - 0x80000000));
4525 else
4526 emit_move_insn (dest, GEN_INT (ud4 << 16));
4527
4528 if (ud3 != 0)
d448860e
JH
4529 emit_move_insn (copy_rtx (dest),
4530 gen_rtx_IOR (DImode, copy_rtx (dest),
4531 GEN_INT (ud3)));
2bfcf297 4532
d448860e
JH
4533 emit_move_insn (copy_rtx (dest),
4534 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4535 GEN_INT (32)));
bc06712d 4536 if (ud2 != 0)
d448860e
JH
4537 emit_move_insn (copy_rtx (dest),
4538 gen_rtx_IOR (DImode, copy_rtx (dest),
4539 GEN_INT (ud2 << 16)));
bc06712d 4540 if (ud1 != 0)
d448860e
JH
4541 emit_move_insn (copy_rtx (dest),
4542 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4543 }
4544 }
2bfcf297
DB
4545 return dest;
4546}
4547
76d2b81d 4548/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4549 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4550
4551static void
4552rs6000_eliminate_indexed_memrefs (rtx operands[2])
4553{
4554 if (GET_CODE (operands[0]) == MEM
4555 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4556 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4557 && ! reload_in_progress)
4558 operands[0]
4559 = replace_equiv_address (operands[0],
4560 copy_addr_to_reg (XEXP (operands[0], 0)));
4561
4562 if (GET_CODE (operands[1]) == MEM
4563 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4564 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4565 && ! reload_in_progress)
4566 operands[1]
4567 = replace_equiv_address (operands[1],
4568 copy_addr_to_reg (XEXP (operands[1], 0)));
4569}
4570
fb4d4348
GK
4571/* Emit a move from SOURCE to DEST in mode MODE. */
4572void
a2369ed3 4573rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4574{
4575 rtx operands[2];
4576 operands[0] = dest;
4577 operands[1] = source;
f676971a 4578
fb4d4348
GK
4579 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4580 if (GET_CODE (operands[1]) == CONST_DOUBLE
4581 && ! FLOAT_MODE_P (mode)
4582 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4583 {
4584 /* FIXME. This should never happen. */
4585 /* Since it seems that it does, do the safe thing and convert
4586 to a CONST_INT. */
2496c7bd 4587 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4588 }
37409796
NS
4589 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4590 || FLOAT_MODE_P (mode)
4591 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4592 || CONST_DOUBLE_LOW (operands[1]) < 0)
4593 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4594 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4595
c9e8cb32
DD
4596 /* Check if GCC is setting up a block move that will end up using FP
4597 registers as temporaries. We must make sure this is acceptable. */
4598 if (GET_CODE (operands[0]) == MEM
4599 && GET_CODE (operands[1]) == MEM
4600 && mode == DImode
41543739
GK
4601 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4602 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4603 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4604 ? 32 : MEM_ALIGN (operands[0])))
4605 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4606 ? 32
41543739
GK
4607 : MEM_ALIGN (operands[1]))))
4608 && ! MEM_VOLATILE_P (operands [0])
4609 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4610 {
41543739
GK
4611 emit_move_insn (adjust_address (operands[0], SImode, 0),
4612 adjust_address (operands[1], SImode, 0));
d448860e
JH
4613 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4614 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4615 return;
4616 }
630d42a0 4617
b3a13419 4618 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4619 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4620 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4621
a3170dc6
AH
4622 if (mode == SFmode && ! TARGET_POWERPC
4623 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4624 && GET_CODE (operands[0]) == MEM)
fb4d4348 4625 {
ffc14f31
GK
4626 int regnum;
4627
4628 if (reload_in_progress || reload_completed)
4629 regnum = true_regnum (operands[1]);
4630 else if (GET_CODE (operands[1]) == REG)
4631 regnum = REGNO (operands[1]);
4632 else
4633 regnum = -1;
f676971a 4634
fb4d4348
GK
4635 /* If operands[1] is a register, on POWER it may have
4636 double-precision data in it, so truncate it to single
4637 precision. */
4638 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4639 {
4640 rtx newreg;
b3a13419 4641 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4642 : gen_reg_rtx (mode));
fb4d4348
GK
4643 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4644 operands[1] = newreg;
4645 }
4646 }
4647
c4501e62
JJ
4648 /* Recognize the case where operand[1] is a reference to thread-local
4649 data and load its address to a register. */
84f52ebd 4650 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4651 {
84f52ebd
RH
4652 enum tls_model model;
4653 rtx tmp = operands[1];
4654 rtx addend = NULL;
4655
4656 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4657 {
4658 addend = XEXP (XEXP (tmp, 0), 1);
4659 tmp = XEXP (XEXP (tmp, 0), 0);
4660 }
4661
4662 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4663 model = SYMBOL_REF_TLS_MODEL (tmp);
4664 gcc_assert (model != 0);
4665
4666 tmp = rs6000_legitimize_tls_address (tmp, model);
4667 if (addend)
4668 {
4669 tmp = gen_rtx_PLUS (mode, tmp, addend);
4670 tmp = force_operand (tmp, operands[0]);
4671 }
4672 operands[1] = tmp;
c4501e62
JJ
4673 }
4674
8f4e6caf
RH
4675 /* Handle the case where reload calls us with an invalid address. */
4676 if (reload_in_progress && mode == Pmode
69ef87e2 4677 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4678 || ! nonimmediate_operand (operands[0], mode)))
4679 goto emit_set;
4680
a9baceb1
GK
4681 /* 128-bit constant floating-point values on Darwin should really be
4682 loaded as two parts. */
8521c414 4683 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4684 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4685 {
4686 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4687 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4688 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4689 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4690 simplify_gen_subreg (imode, operands[1], mode, 0),
4691 imode);
4692 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4693 GET_MODE_SIZE (imode)),
4694 simplify_gen_subreg (imode, operands[1], mode,
4695 GET_MODE_SIZE (imode)),
4696 imode);
a9baceb1
GK
4697 return;
4698 }
4699
fb4d4348
GK
4700 /* FIXME: In the long term, this switch statement should go away
4701 and be replaced by a sequence of tests based on things like
4702 mode == Pmode. */
4703 switch (mode)
4704 {
4705 case HImode:
4706 case QImode:
4707 if (CONSTANT_P (operands[1])
4708 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4709 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4710 break;
4711
06f4e019 4712 case TFmode:
7393f7f8 4713 case TDmode:
76d2b81d
DJ
4714 rs6000_eliminate_indexed_memrefs (operands);
4715 /* fall through */
4716
fb4d4348 4717 case DFmode:
7393f7f8 4718 case DDmode:
fb4d4348 4719 case SFmode:
f676971a 4720 if (CONSTANT_P (operands[1])
fb4d4348 4721 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4722 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4723 break;
f676971a 4724
0ac081f6
AH
4725 case V16QImode:
4726 case V8HImode:
4727 case V4SFmode:
4728 case V4SImode:
a3170dc6
AH
4729 case V4HImode:
4730 case V2SFmode:
4731 case V2SImode:
00a892b8 4732 case V1DImode:
69ef87e2 4733 if (CONSTANT_P (operands[1])
d744e06e 4734 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4735 operands[1] = force_const_mem (mode, operands[1]);
4736 break;
f676971a 4737
fb4d4348 4738 case SImode:
a9098fd0 4739 case DImode:
fb4d4348
GK
4740 /* Use default pattern for address of ELF small data */
4741 if (TARGET_ELF
a9098fd0 4742 && mode == Pmode
f607bc57 4743 && DEFAULT_ABI == ABI_V4
f676971a 4744 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4745 || GET_CODE (operands[1]) == CONST)
4746 && small_data_operand (operands[1], mode))
fb4d4348
GK
4747 {
4748 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4749 return;
4750 }
4751
f607bc57 4752 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4753 && mode == Pmode && mode == SImode
4754 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4755 {
4756 emit_insn (gen_movsi_got (operands[0], operands[1]));
4757 return;
4758 }
4759
ee890fe2 4760 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4761 && TARGET_NO_TOC
4762 && ! flag_pic
a9098fd0 4763 && mode == Pmode
fb4d4348
GK
4764 && CONSTANT_P (operands[1])
4765 && GET_CODE (operands[1]) != HIGH
4766 && GET_CODE (operands[1]) != CONST_INT)
4767 {
b3a13419
ILT
4768 rtx target = (!can_create_pseudo_p ()
4769 ? operands[0]
4770 : gen_reg_rtx (mode));
fb4d4348
GK
4771
4772 /* If this is a function address on -mcall-aixdesc,
4773 convert it to the address of the descriptor. */
4774 if (DEFAULT_ABI == ABI_AIX
4775 && GET_CODE (operands[1]) == SYMBOL_REF
4776 && XSTR (operands[1], 0)[0] == '.')
4777 {
4778 const char *name = XSTR (operands[1], 0);
4779 rtx new_ref;
4780 while (*name == '.')
4781 name++;
4782 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4783 CONSTANT_POOL_ADDRESS_P (new_ref)
4784 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4785 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4786 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4787 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4788 operands[1] = new_ref;
4789 }
7509c759 4790
ee890fe2
SS
4791 if (DEFAULT_ABI == ABI_DARWIN)
4792 {
ab82a49f
AP
4793#if TARGET_MACHO
4794 if (MACHO_DYNAMIC_NO_PIC_P)
4795 {
4796 /* Take care of any required data indirection. */
4797 operands[1] = rs6000_machopic_legitimize_pic_address (
4798 operands[1], mode, operands[0]);
4799 if (operands[0] != operands[1])
4800 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4801 operands[0], operands[1]));
ab82a49f
AP
4802 return;
4803 }
4804#endif
b8a55285
AP
4805 emit_insn (gen_macho_high (target, operands[1]));
4806 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4807 return;
4808 }
4809
fb4d4348
GK
4810 emit_insn (gen_elf_high (target, operands[1]));
4811 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4812 return;
4813 }
4814
a9098fd0
GK
4815 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4816 and we have put it in the TOC, we just need to make a TOC-relative
4817 reference to it. */
4818 if (TARGET_TOC
4819 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4820 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4821 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4822 get_pool_mode (operands[1])))
fb4d4348 4823 {
a9098fd0 4824 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4825 }
a9098fd0
GK
4826 else if (mode == Pmode
4827 && CONSTANT_P (operands[1])
38886f37
AO
4828 && ((GET_CODE (operands[1]) != CONST_INT
4829 && ! easy_fp_constant (operands[1], mode))
4830 || (GET_CODE (operands[1]) == CONST_INT
4831 && num_insns_constant (operands[1], mode) > 2)
4832 || (GET_CODE (operands[0]) == REG
4833 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4834 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4835 && ! legitimate_constant_pool_address_p (operands[1])
4836 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4837 {
4838 /* Emit a USE operation so that the constant isn't deleted if
4839 expensive optimizations are turned on because nobody
4840 references it. This should only be done for operands that
4841 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4842 This should not be done for operands that contain LABEL_REFs.
4843 For now, we just handle the obvious case. */
4844 if (GET_CODE (operands[1]) != LABEL_REF)
4845 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4846
c859cda6 4847#if TARGET_MACHO
ee890fe2 4848 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4849 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4850 {
ee890fe2
SS
4851 operands[1] =
4852 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4853 operands[0]);
4854 if (operands[0] != operands[1])
4855 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4856 return;
4857 }
c859cda6 4858#endif
ee890fe2 4859
fb4d4348
GK
4860 /* If we are to limit the number of things we put in the TOC and
4861 this is a symbol plus a constant we can add in one insn,
4862 just put the symbol in the TOC and add the constant. Don't do
4863 this if reload is in progress. */
4864 if (GET_CODE (operands[1]) == CONST
4865 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4866 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4867 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4868 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4869 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4870 && ! side_effects_p (operands[0]))
4871 {
a4f6c312
SS
4872 rtx sym =
4873 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4874 rtx other = XEXP (XEXP (operands[1], 0), 1);
4875
a9098fd0
GK
4876 sym = force_reg (mode, sym);
4877 if (mode == SImode)
4878 emit_insn (gen_addsi3 (operands[0], sym, other));
4879 else
4880 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4881 return;
4882 }
4883
a9098fd0 4884 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4885
f676971a 4886 if (TARGET_TOC
4d588c14 4887 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4888 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4889 get_pool_constant (XEXP (operands[1], 0)),
4890 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4891 {
ba4828e0 4892 operands[1]
542a8afa 4893 = gen_const_mem (mode,
c4ad648e 4894 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4895 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4896 }
fb4d4348
GK
4897 }
4898 break;
a9098fd0 4899
fb4d4348 4900 case TImode:
76d2b81d
DJ
4901 rs6000_eliminate_indexed_memrefs (operands);
4902
27dc0551
DE
4903 if (TARGET_POWER)
4904 {
4905 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4906 gen_rtvec (2,
4907 gen_rtx_SET (VOIDmode,
4908 operands[0], operands[1]),
4909 gen_rtx_CLOBBER (VOIDmode,
4910 gen_rtx_SCRATCH (SImode)))));
4911 return;
4912 }
fb4d4348
GK
4913 break;
4914
4915 default:
37409796 4916 gcc_unreachable ();
fb4d4348
GK
4917 }
4918
a9098fd0
GK
4919 /* Above, we may have called force_const_mem which may have returned
4920 an invalid address. If we can, fix this up; otherwise, reload will
4921 have to deal with it. */
8f4e6caf
RH
4922 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4923 operands[1] = validize_mem (operands[1]);
a9098fd0 4924
8f4e6caf 4925 emit_set:
fb4d4348
GK
4926 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4927}
4697a36c 4928\f
2858f73a
GK
4929/* Nonzero if we can use a floating-point register to pass this arg. */
4930#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4931 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4932 && (MODE) != SDmode \
2858f73a
GK
4933 && (CUM)->fregno <= FP_ARG_MAX_REG \
4934 && TARGET_HARD_FLOAT && TARGET_FPRS)
4935
4936/* Nonzero if we can use an AltiVec register to pass this arg. */
4937#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4938 (ALTIVEC_VECTOR_MODE (MODE) \
4939 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4940 && TARGET_ALTIVEC_ABI \
83953138 4941 && (NAMED))
2858f73a 4942
c6e8c921
GK
4943/* Return a nonzero value to say to return the function value in
4944 memory, just as large structures are always returned. TYPE will be
4945 the data type of the value, and FNTYPE will be the type of the
4946 function doing the returning, or @code{NULL} for libcalls.
4947
4948 The AIX ABI for the RS/6000 specifies that all structures are
4949 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4950 specifies that structures <= 8 bytes are returned in r3/r4, but a
4951 draft put them in memory, and GCC used to implement the draft
df01da37 4952 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4953 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4954 compatibility can change DRAFT_V4_STRUCT_RET to override the
4955 default, and -m switches get the final word. See
4956 rs6000_override_options for more details.
4957
4958 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4959 long double support is enabled. These values are returned in memory.
4960
4961 int_size_in_bytes returns -1 for variable size objects, which go in
4962 memory always. The cast to unsigned makes -1 > 8. */
4963
4964static bool
586de218 4965rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 4966{
594a51fe
SS
4967 /* In the darwin64 abi, try to use registers for larger structs
4968 if possible. */
0b5383eb 4969 if (rs6000_darwin64_abi
594a51fe 4970 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4971 && int_size_in_bytes (type) > 0)
4972 {
4973 CUMULATIVE_ARGS valcum;
4974 rtx valret;
4975
4976 valcum.words = 0;
4977 valcum.fregno = FP_ARG_MIN_REG;
4978 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4979 /* Do a trial code generation as if this were going to be passed
4980 as an argument; if any part goes in memory, we return NULL. */
4981 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4982 if (valret)
4983 return false;
4984 /* Otherwise fall through to more conventional ABI rules. */
4985 }
594a51fe 4986
c6e8c921 4987 if (AGGREGATE_TYPE_P (type)
df01da37 4988 && (aix_struct_return
c6e8c921
GK
4989 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4990 return true;
b693336b 4991
bada2eb8
DE
4992 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4993 modes only exist for GCC vector types if -maltivec. */
4994 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4995 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4996 return false;
4997
b693336b
PB
4998 /* Return synthetic vectors in memory. */
4999 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5000 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5001 {
5002 static bool warned_for_return_big_vectors = false;
5003 if (!warned_for_return_big_vectors)
5004 {
d4ee4d25 5005 warning (0, "GCC vector returned by reference: "
b693336b
PB
5006 "non-standard ABI extension with no compatibility guarantee");
5007 warned_for_return_big_vectors = true;
5008 }
5009 return true;
5010 }
5011
602ea4d3 5012 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5013 return true;
ad630bef 5014
c6e8c921
GK
5015 return false;
5016}
5017
4697a36c
MM
5018/* Initialize a variable CUM of type CUMULATIVE_ARGS
5019 for a call to a function whose data type is FNTYPE.
5020 For a library call, FNTYPE is 0.
5021
5022 For incoming args we set the number of arguments in the prototype large
1c20ae99 5023 so we never return a PARALLEL. */
4697a36c
MM
5024
5025void
f676971a 5026init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5027 rtx libname ATTRIBUTE_UNUSED, int incoming,
5028 int libcall, int n_named_args)
4697a36c
MM
5029{
5030 static CUMULATIVE_ARGS zero_cumulative;
5031
5032 *cum = zero_cumulative;
5033 cum->words = 0;
5034 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5035 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5036 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5037 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5038 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5039 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5040 cum->stdarg = fntype
5041 && (TYPE_ARG_TYPES (fntype) != 0
5042 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5043 != void_type_node));
4697a36c 5044
0f6937fe
AM
5045 cum->nargs_prototype = 0;
5046 if (incoming || cum->prototype)
5047 cum->nargs_prototype = n_named_args;
4697a36c 5048
a5c76ee6 5049 /* Check for a longcall attribute. */
3eb4e360
AM
5050 if ((!fntype && rs6000_default_long_calls)
5051 || (fntype
5052 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5053 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5054 cum->call_cookie |= CALL_LONG;
6a4cee5f 5055
4697a36c
MM
5056 if (TARGET_DEBUG_ARG)
5057 {
5058 fprintf (stderr, "\ninit_cumulative_args:");
5059 if (fntype)
5060 {
5061 tree ret_type = TREE_TYPE (fntype);
5062 fprintf (stderr, " ret code = %s,",
5063 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5064 }
5065
6a4cee5f
MM
5066 if (cum->call_cookie & CALL_LONG)
5067 fprintf (stderr, " longcall,");
5068
4697a36c
MM
5069 fprintf (stderr, " proto = %d, nargs = %d\n",
5070 cum->prototype, cum->nargs_prototype);
5071 }
f676971a 5072
c4ad648e
AM
5073 if (fntype
5074 && !TARGET_ALTIVEC
5075 && TARGET_ALTIVEC_ABI
5076 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5077 {
c85ce869 5078 error ("cannot return value in vector register because"
c4ad648e 5079 " altivec instructions are disabled, use -maltivec"
c85ce869 5080 " to enable them");
c4ad648e 5081 }
4697a36c
MM
5082}
5083\f
fe984136
RH
5084/* Return true if TYPE must be passed on the stack and not in registers. */
5085
5086static bool
586de218 5087rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5088{
5089 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5090 return must_pass_in_stack_var_size (mode, type);
5091 else
5092 return must_pass_in_stack_var_size_or_pad (mode, type);
5093}
5094
c229cba9
DE
5095/* If defined, a C expression which determines whether, and in which
5096 direction, to pad out an argument with extra space. The value
5097 should be of type `enum direction': either `upward' to pad above
5098 the argument, `downward' to pad below, or `none' to inhibit
5099 padding.
5100
5101 For the AIX ABI structs are always stored left shifted in their
5102 argument slot. */
5103
9ebbca7d 5104enum direction
586de218 5105function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5106{
6e985040
AM
5107#ifndef AGGREGATE_PADDING_FIXED
5108#define AGGREGATE_PADDING_FIXED 0
5109#endif
5110#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5111#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5112#endif
5113
5114 if (!AGGREGATE_PADDING_FIXED)
5115 {
5116 /* GCC used to pass structures of the same size as integer types as
5117 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5118 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5119 passed padded downward, except that -mstrict-align further
5120 muddied the water in that multi-component structures of 2 and 4
5121 bytes in size were passed padded upward.
5122
5123 The following arranges for best compatibility with previous
5124 versions of gcc, but removes the -mstrict-align dependency. */
5125 if (BYTES_BIG_ENDIAN)
5126 {
5127 HOST_WIDE_INT size = 0;
5128
5129 if (mode == BLKmode)
5130 {
5131 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5132 size = int_size_in_bytes (type);
5133 }
5134 else
5135 size = GET_MODE_SIZE (mode);
5136
5137 if (size == 1 || size == 2 || size == 4)
5138 return downward;
5139 }
5140 return upward;
5141 }
5142
5143 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5144 {
5145 if (type != 0 && AGGREGATE_TYPE_P (type))
5146 return upward;
5147 }
c229cba9 5148
d3704c46
KH
5149 /* Fall back to the default. */
5150 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5151}
5152
b6c9286a 5153/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5154 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5155 PARM_BOUNDARY is used for all arguments.
f676971a 5156
84e9ad15
AM
5157 V.4 wants long longs and doubles to be double word aligned. Just
5158 testing the mode size is a boneheaded way to do this as it means
5159 that other types such as complex int are also double word aligned.
5160 However, we're stuck with this because changing the ABI might break
5161 existing library interfaces.
5162
b693336b
PB
5163 Doubleword align SPE vectors.
5164 Quadword align Altivec vectors.
5165 Quadword align large synthetic vector types. */
b6c9286a
MM
5166
5167int
b693336b 5168function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5169{
84e9ad15
AM
5170 if (DEFAULT_ABI == ABI_V4
5171 && (GET_MODE_SIZE (mode) == 8
5172 || (TARGET_HARD_FLOAT
5173 && TARGET_FPRS
7393f7f8 5174 && (mode == TFmode || mode == TDmode))))
4ed78545 5175 return 64;
ad630bef
DE
5176 else if (SPE_VECTOR_MODE (mode)
5177 || (type && TREE_CODE (type) == VECTOR_TYPE
5178 && int_size_in_bytes (type) >= 8
5179 && int_size_in_bytes (type) < 16))
e1f83b4d 5180 return 64;
ad630bef
DE
5181 else if (ALTIVEC_VECTOR_MODE (mode)
5182 || (type && TREE_CODE (type) == VECTOR_TYPE
5183 && int_size_in_bytes (type) >= 16))
0ac081f6 5184 return 128;
0b5383eb
DJ
5185 else if (rs6000_darwin64_abi && mode == BLKmode
5186 && type && TYPE_ALIGN (type) > 64)
5187 return 128;
9ebbca7d 5188 else
b6c9286a 5189 return PARM_BOUNDARY;
b6c9286a 5190}
c53bdcf5 5191
294bd182
AM
5192/* For a function parm of MODE and TYPE, return the starting word in
5193 the parameter area. NWORDS of the parameter area are already used. */
5194
5195static unsigned int
5196rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5197{
5198 unsigned int align;
5199 unsigned int parm_offset;
5200
5201 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5202 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5203 return nwords + (-(parm_offset + nwords) & align);
5204}
5205
c53bdcf5
AM
5206/* Compute the size (in words) of a function argument. */
5207
5208static unsigned long
5209rs6000_arg_size (enum machine_mode mode, tree type)
5210{
5211 unsigned long size;
5212
5213 if (mode != BLKmode)
5214 size = GET_MODE_SIZE (mode);
5215 else
5216 size = int_size_in_bytes (type);
5217
5218 if (TARGET_32BIT)
5219 return (size + 3) >> 2;
5220 else
5221 return (size + 7) >> 3;
5222}
b6c9286a 5223\f
0b5383eb 5224/* Use this to flush pending int fields. */
594a51fe
SS
5225
5226static void
0b5383eb
DJ
5227rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5228 HOST_WIDE_INT bitpos)
594a51fe 5229{
0b5383eb
DJ
5230 unsigned int startbit, endbit;
5231 int intregs, intoffset;
5232 enum machine_mode mode;
594a51fe 5233
0b5383eb
DJ
5234 if (cum->intoffset == -1)
5235 return;
594a51fe 5236
0b5383eb
DJ
5237 intoffset = cum->intoffset;
5238 cum->intoffset = -1;
5239
5240 if (intoffset % BITS_PER_WORD != 0)
5241 {
5242 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5243 MODE_INT, 0);
5244 if (mode == BLKmode)
594a51fe 5245 {
0b5383eb
DJ
5246 /* We couldn't find an appropriate mode, which happens,
5247 e.g., in packed structs when there are 3 bytes to load.
5248 Back intoffset back to the beginning of the word in this
5249 case. */
5250 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5251 }
594a51fe 5252 }
0b5383eb
DJ
5253
5254 startbit = intoffset & -BITS_PER_WORD;
5255 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5256 intregs = (endbit - startbit) / BITS_PER_WORD;
5257 cum->words += intregs;
5258}
5259
5260/* The darwin64 ABI calls for us to recurse down through structs,
5261 looking for elements passed in registers. Unfortunately, we have
5262 to track int register count here also because of misalignments
5263 in powerpc alignment mode. */
5264
5265static void
5266rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5267 tree type,
5268 HOST_WIDE_INT startbitpos)
5269{
5270 tree f;
5271
5272 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5273 if (TREE_CODE (f) == FIELD_DECL)
5274 {
5275 HOST_WIDE_INT bitpos = startbitpos;
5276 tree ftype = TREE_TYPE (f);
70fb00df
AP
5277 enum machine_mode mode;
5278 if (ftype == error_mark_node)
5279 continue;
5280 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5281
5282 if (DECL_SIZE (f) != 0
5283 && host_integerp (bit_position (f), 1))
5284 bitpos += int_bit_position (f);
5285
5286 /* ??? FIXME: else assume zero offset. */
5287
5288 if (TREE_CODE (ftype) == RECORD_TYPE)
5289 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5290 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5291 {
5292 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5293 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5294 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5295 }
5296 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5297 {
5298 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5299 cum->vregno++;
5300 cum->words += 2;
5301 }
5302 else if (cum->intoffset == -1)
5303 cum->intoffset = bitpos;
5304 }
594a51fe
SS
5305}
5306
4697a36c
MM
5307/* Update the data in CUM to advance over an argument
5308 of mode MODE and data type TYPE.
b2d04ecf
AM
5309 (TYPE is null for libcalls where that information may not be available.)
5310
5311 Note that for args passed by reference, function_arg will be called
5312 with MODE and TYPE set to that of the pointer to the arg, not the arg
5313 itself. */
4697a36c
MM
5314
5315void
f676971a 5316function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5317 tree type, int named, int depth)
4697a36c 5318{
0b5383eb
DJ
5319 int size;
5320
594a51fe
SS
5321 /* Only tick off an argument if we're not recursing. */
5322 if (depth == 0)
5323 cum->nargs_prototype--;
4697a36c 5324
ad630bef
DE
5325 if (TARGET_ALTIVEC_ABI
5326 && (ALTIVEC_VECTOR_MODE (mode)
5327 || (type && TREE_CODE (type) == VECTOR_TYPE
5328 && int_size_in_bytes (type) == 16)))
0ac081f6 5329 {
4ed78545
AM
5330 bool stack = false;
5331
2858f73a 5332 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5333 {
6d0ef01e
HP
5334 cum->vregno++;
5335 if (!TARGET_ALTIVEC)
c85ce869 5336 error ("cannot pass argument in vector register because"
6d0ef01e 5337 " altivec instructions are disabled, use -maltivec"
c85ce869 5338 " to enable them");
4ed78545
AM
5339
5340 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5341 even if it is going to be passed in a vector register.
4ed78545
AM
5342 Darwin does the same for variable-argument functions. */
5343 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5344 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5345 stack = true;
6d0ef01e 5346 }
4ed78545
AM
5347 else
5348 stack = true;
5349
5350 if (stack)
c4ad648e 5351 {
a594a19c 5352 int align;
f676971a 5353
2858f73a
GK
5354 /* Vector parameters must be 16-byte aligned. This places
5355 them at 2 mod 4 in terms of words in 32-bit mode, since
5356 the parameter save area starts at offset 24 from the
5357 stack. In 64-bit mode, they just have to start on an
5358 even word, since the parameter save area is 16-byte
5359 aligned. Space for GPRs is reserved even if the argument
5360 will be passed in memory. */
5361 if (TARGET_32BIT)
4ed78545 5362 align = (2 - cum->words) & 3;
2858f73a
GK
5363 else
5364 align = cum->words & 1;
c53bdcf5 5365 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5366
a594a19c
GK
5367 if (TARGET_DEBUG_ARG)
5368 {
f676971a 5369 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5370 cum->words, align);
5371 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5372 cum->nargs_prototype, cum->prototype,
2858f73a 5373 GET_MODE_NAME (mode));
a594a19c
GK
5374 }
5375 }
0ac081f6 5376 }
a4b0320c 5377 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5378 && !cum->stdarg
5379 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5380 cum->sysv_gregno++;
594a51fe
SS
5381
5382 else if (rs6000_darwin64_abi
5383 && mode == BLKmode
0b5383eb
DJ
5384 && TREE_CODE (type) == RECORD_TYPE
5385 && (size = int_size_in_bytes (type)) > 0)
5386 {
5387 /* Variable sized types have size == -1 and are
5388 treated as if consisting entirely of ints.
5389 Pad to 16 byte boundary if needed. */
5390 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5391 && (cum->words % 2) != 0)
5392 cum->words++;
5393 /* For varargs, we can just go up by the size of the struct. */
5394 if (!named)
5395 cum->words += (size + 7) / 8;
5396 else
5397 {
5398 /* It is tempting to say int register count just goes up by
5399 sizeof(type)/8, but this is wrong in a case such as
5400 { int; double; int; } [powerpc alignment]. We have to
5401 grovel through the fields for these too. */
5402 cum->intoffset = 0;
5403 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5404 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5405 size * BITS_PER_UNIT);
5406 }
5407 }
f607bc57 5408 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5409 {
a3170dc6 5410 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5411 && (mode == SFmode || mode == DFmode
7393f7f8 5412 || mode == DDmode || mode == TDmode
602ea4d3 5413 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5414 {
2d83f070
JJ
5415 /* _Decimal128 must use an even/odd register pair. This assumes
5416 that the register number is odd when fregno is odd. */
5417 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5418 cum->fregno++;
5419
5420 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5421 <= FP_ARG_V4_MAX_REG)
602ea4d3 5422 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5423 else
5424 {
602ea4d3 5425 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5426 if (mode == DFmode || mode == TFmode
5427 || mode == DDmode || mode == TDmode)
c4ad648e 5428 cum->words += cum->words & 1;
c53bdcf5 5429 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5430 }
4697a36c 5431 }
4cc833b7
RH
5432 else
5433 {
b2d04ecf 5434 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5435 int gregno = cum->sysv_gregno;
5436
4ed78545
AM
5437 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5438 (r7,r8) or (r9,r10). As does any other 2 word item such
5439 as complex int due to a historical mistake. */
5440 if (n_words == 2)
5441 gregno += (1 - gregno) & 1;
4cc833b7 5442
4ed78545 5443 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5444 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5445 {
4ed78545
AM
5446 /* Long long and SPE vectors are aligned on the stack.
5447 So are other 2 word items such as complex int due to
5448 a historical mistake. */
4cc833b7
RH
5449 if (n_words == 2)
5450 cum->words += cum->words & 1;
5451 cum->words += n_words;
5452 }
4697a36c 5453
4cc833b7
RH
5454 /* Note: continuing to accumulate gregno past when we've started
5455 spilling to the stack indicates the fact that we've started
5456 spilling to the stack to expand_builtin_saveregs. */
5457 cum->sysv_gregno = gregno + n_words;
5458 }
4697a36c 5459
4cc833b7
RH
5460 if (TARGET_DEBUG_ARG)
5461 {
5462 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5463 cum->words, cum->fregno);
5464 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5465 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5466 fprintf (stderr, "mode = %4s, named = %d\n",
5467 GET_MODE_NAME (mode), named);
5468 }
4697a36c
MM
5469 }
5470 else
4cc833b7 5471 {
b2d04ecf 5472 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5473 int start_words = cum->words;
5474 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5475
294bd182 5476 cum->words = align_words + n_words;
4697a36c 5477
ebb109ad 5478 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5479 && mode != SDmode
a3170dc6 5480 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5481 {
5482 /* _Decimal128 must be passed in an even/odd float register pair.
5483 This assumes that the register number is odd when fregno is
5484 odd. */
5485 if (mode == TDmode && (cum->fregno % 2) == 1)
5486 cum->fregno++;
5487 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5488 }
4cc833b7
RH
5489
5490 if (TARGET_DEBUG_ARG)
5491 {
5492 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5493 cum->words, cum->fregno);
5494 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5495 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5496 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5497 named, align_words - start_words, depth);
4cc833b7
RH
5498 }
5499 }
4697a36c 5500}
a6c9bed4 5501
f82f556d
AH
5502static rtx
5503spe_build_register_parallel (enum machine_mode mode, int gregno)
5504{
17caeff2 5505 rtx r1, r3, r5, r7;
f82f556d 5506
37409796 5507 switch (mode)
f82f556d 5508 {
37409796 5509 case DFmode:
4d4447b5 5510 case DDmode:
54b695e7
AH
5511 r1 = gen_rtx_REG (DImode, gregno);
5512 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5513 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5514
5515 case DCmode:
17caeff2 5516 case TFmode:
4d4447b5 5517 case TDmode:
54b695e7
AH
5518 r1 = gen_rtx_REG (DImode, gregno);
5519 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5520 r3 = gen_rtx_REG (DImode, gregno + 2);
5521 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5522 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5523
17caeff2
JM
5524 case TCmode:
5525 r1 = gen_rtx_REG (DImode, gregno);
5526 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5527 r3 = gen_rtx_REG (DImode, gregno + 2);
5528 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5529 r5 = gen_rtx_REG (DImode, gregno + 4);
5530 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5531 r7 = gen_rtx_REG (DImode, gregno + 6);
5532 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5533 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5534
37409796
NS
5535 default:
5536 gcc_unreachable ();
f82f556d 5537 }
f82f556d 5538}
b78d48dd 5539
f82f556d 5540/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5541static rtx
f676971a 5542rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5543 tree type)
a6c9bed4 5544{
f82f556d
AH
5545 int gregno = cum->sysv_gregno;
5546
5547 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5548 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5
PB
5549 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
5550 || mode == DDmode || mode == TDmode
5551 || mode == DCmode || mode == TCmode))
f82f556d 5552 {
b5870bee
AH
5553 int n_words = rs6000_arg_size (mode, type);
5554
f82f556d 5555 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4d4447b5 5556 if (mode == DFmode || mode == DDmode)
b5870bee 5557 gregno += (1 - gregno) & 1;
f82f556d 5558
b5870bee
AH
5559 /* Multi-reg args are not split between registers and stack. */
5560 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5561 return NULL_RTX;
5562
5563 return spe_build_register_parallel (mode, gregno);
5564 }
a6c9bed4
AH
5565 if (cum->stdarg)
5566 {
c53bdcf5 5567 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5568
5569 /* SPE vectors are put in odd registers. */
5570 if (n_words == 2 && (gregno & 1) == 0)
5571 gregno += 1;
5572
5573 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5574 {
5575 rtx r1, r2;
5576 enum machine_mode m = SImode;
5577
5578 r1 = gen_rtx_REG (m, gregno);
5579 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5580 r2 = gen_rtx_REG (m, gregno + 1);
5581 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5582 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5583 }
5584 else
b78d48dd 5585 return NULL_RTX;
a6c9bed4
AH
5586 }
5587 else
5588 {
f82f556d
AH
5589 if (gregno <= GP_ARG_MAX_REG)
5590 return gen_rtx_REG (mode, gregno);
a6c9bed4 5591 else
b78d48dd 5592 return NULL_RTX;
a6c9bed4
AH
5593 }
5594}
5595
0b5383eb
DJ
5596/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5597 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5598
0b5383eb 5599static void
bb8df8a6 5600rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5601 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5602{
0b5383eb
DJ
5603 enum machine_mode mode;
5604 unsigned int regno;
5605 unsigned int startbit, endbit;
5606 int this_regno, intregs, intoffset;
5607 rtx reg;
594a51fe 5608
0b5383eb
DJ
5609 if (cum->intoffset == -1)
5610 return;
5611
5612 intoffset = cum->intoffset;
5613 cum->intoffset = -1;
5614
5615 /* If this is the trailing part of a word, try to only load that
5616 much into the register. Otherwise load the whole register. Note
5617 that in the latter case we may pick up unwanted bits. It's not a
5618 problem at the moment but may wish to revisit. */
5619
5620 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5621 {
0b5383eb
DJ
5622 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5623 MODE_INT, 0);
5624 if (mode == BLKmode)
5625 {
5626 /* We couldn't find an appropriate mode, which happens,
5627 e.g., in packed structs when there are 3 bytes to load.
5628 Back intoffset back to the beginning of the word in this
5629 case. */
5630 intoffset = intoffset & -BITS_PER_WORD;
5631 mode = word_mode;
5632 }
5633 }
5634 else
5635 mode = word_mode;
5636
5637 startbit = intoffset & -BITS_PER_WORD;
5638 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5639 intregs = (endbit - startbit) / BITS_PER_WORD;
5640 this_regno = cum->words + intoffset / BITS_PER_WORD;
5641
5642 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5643 cum->use_stack = 1;
bb8df8a6 5644
0b5383eb
DJ
5645 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5646 if (intregs <= 0)
5647 return;
5648
5649 intoffset /= BITS_PER_UNIT;
5650 do
5651 {
5652 regno = GP_ARG_MIN_REG + this_regno;
5653 reg = gen_rtx_REG (mode, regno);
5654 rvec[(*k)++] =
5655 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5656
5657 this_regno += 1;
5658 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5659 mode = word_mode;
5660 intregs -= 1;
5661 }
5662 while (intregs > 0);
5663}
5664
5665/* Recursive workhorse for the following. */
5666
5667static void
586de218 5668rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5669 HOST_WIDE_INT startbitpos, rtx rvec[],
5670 int *k)
5671{
5672 tree f;
5673
5674 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5675 if (TREE_CODE (f) == FIELD_DECL)
5676 {
5677 HOST_WIDE_INT bitpos = startbitpos;
5678 tree ftype = TREE_TYPE (f);
70fb00df
AP
5679 enum machine_mode mode;
5680 if (ftype == error_mark_node)
5681 continue;
5682 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5683
5684 if (DECL_SIZE (f) != 0
5685 && host_integerp (bit_position (f), 1))
5686 bitpos += int_bit_position (f);
5687
5688 /* ??? FIXME: else assume zero offset. */
5689
5690 if (TREE_CODE (ftype) == RECORD_TYPE)
5691 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5692 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5693 {
0b5383eb
DJ
5694#if 0
5695 switch (mode)
594a51fe 5696 {
0b5383eb
DJ
5697 case SCmode: mode = SFmode; break;
5698 case DCmode: mode = DFmode; break;
5699 case TCmode: mode = TFmode; break;
5700 default: break;
594a51fe 5701 }
0b5383eb
DJ
5702#endif
5703 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5704 rvec[(*k)++]
bb8df8a6 5705 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5706 gen_rtx_REG (mode, cum->fregno++),
5707 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5708 if (mode == TFmode || mode == TDmode)
0b5383eb 5709 cum->fregno++;
594a51fe 5710 }
0b5383eb
DJ
5711 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5712 {
5713 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5714 rvec[(*k)++]
bb8df8a6
EC
5715 = gen_rtx_EXPR_LIST (VOIDmode,
5716 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5717 GEN_INT (bitpos / BITS_PER_UNIT));
5718 }
5719 else if (cum->intoffset == -1)
5720 cum->intoffset = bitpos;
5721 }
5722}
594a51fe 5723
0b5383eb
DJ
5724/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5725 the register(s) to be used for each field and subfield of a struct
5726 being passed by value, along with the offset of where the
5727 register's value may be found in the block. FP fields go in FP
5728 register, vector fields go in vector registers, and everything
bb8df8a6 5729 else goes in int registers, packed as in memory.
8ff40a74 5730
0b5383eb
DJ
5731 This code is also used for function return values. RETVAL indicates
5732 whether this is the case.
8ff40a74 5733
a4d05547 5734 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5735 calling convention. */
594a51fe 5736
0b5383eb 5737static rtx
586de218 5738rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5739 int named, bool retval)
5740{
5741 rtx rvec[FIRST_PSEUDO_REGISTER];
5742 int k = 1, kbase = 1;
5743 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5744 /* This is a copy; modifications are not visible to our caller. */
5745 CUMULATIVE_ARGS copy_cum = *orig_cum;
5746 CUMULATIVE_ARGS *cum = &copy_cum;
5747
5748 /* Pad to 16 byte boundary if needed. */
5749 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5750 && (cum->words % 2) != 0)
5751 cum->words++;
5752
5753 cum->intoffset = 0;
5754 cum->use_stack = 0;
5755 cum->named = named;
5756
5757 /* Put entries into rvec[] for individual FP and vector fields, and
5758 for the chunks of memory that go in int regs. Note we start at
5759 element 1; 0 is reserved for an indication of using memory, and
5760 may or may not be filled in below. */
5761 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5762 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5763
5764 /* If any part of the struct went on the stack put all of it there.
5765 This hack is because the generic code for
5766 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5767 parts of the struct are not at the beginning. */
5768 if (cum->use_stack)
5769 {
5770 if (retval)
5771 return NULL_RTX; /* doesn't go in registers at all */
5772 kbase = 0;
5773 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5774 }
5775 if (k > 1 || cum->use_stack)
5776 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5777 else
5778 return NULL_RTX;
5779}
5780
b78d48dd
FJ
5781/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5782
5783static rtx
ec6376ab 5784rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5785{
ec6376ab
AM
5786 int n_units;
5787 int i, k;
5788 rtx rvec[GP_ARG_NUM_REG + 1];
5789
5790 if (align_words >= GP_ARG_NUM_REG)
5791 return NULL_RTX;
5792
5793 n_units = rs6000_arg_size (mode, type);
5794
5795 /* Optimize the simple case where the arg fits in one gpr, except in
5796 the case of BLKmode due to assign_parms assuming that registers are
5797 BITS_PER_WORD wide. */
5798 if (n_units == 0
5799 || (n_units == 1 && mode != BLKmode))
5800 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5801
5802 k = 0;
5803 if (align_words + n_units > GP_ARG_NUM_REG)
5804 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5805 using a magic NULL_RTX component.
79773478
AM
5806 This is not strictly correct. Only some of the arg belongs in
5807 memory, not all of it. However, the normal scheme using
5808 function_arg_partial_nregs can result in unusual subregs, eg.
5809 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5810 store the whole arg to memory is often more efficient than code
5811 to store pieces, and we know that space is available in the right
5812 place for the whole arg. */
ec6376ab
AM
5813 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5814
5815 i = 0;
5816 do
36a454e1 5817 {
ec6376ab
AM
5818 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5819 rtx off = GEN_INT (i++ * 4);
5820 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5821 }
ec6376ab
AM
5822 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5823
5824 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5825}
5826
4697a36c
MM
5827/* Determine where to put an argument to a function.
5828 Value is zero to push the argument on the stack,
5829 or a hard register in which to store the argument.
5830
5831 MODE is the argument's machine mode.
5832 TYPE is the data type of the argument (as a tree).
5833 This is null for libcalls where that information may
5834 not be available.
5835 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5836 the preceding args and about the function being called. It is
5837 not modified in this routine.
4697a36c
MM
5838 NAMED is nonzero if this argument is a named parameter
5839 (otherwise it is an extra parameter matching an ellipsis).
5840
5841 On RS/6000 the first eight words of non-FP are normally in registers
5842 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5843 Under V.4, the first 8 FP args are in registers.
5844
5845 If this is floating-point and no prototype is specified, we use
5846 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5847 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5848 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5849 doesn't support PARALLEL anyway.
5850
5851 Note that for args passed by reference, function_arg will be called
5852 with MODE and TYPE set to that of the pointer to the arg, not the arg
5853 itself. */
4697a36c 5854
9390387d 5855rtx
f676971a 5856function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5857 tree type, int named)
4697a36c 5858{
4cc833b7 5859 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5860
a4f6c312
SS
5861 /* Return a marker to indicate whether CR1 needs to set or clear the
5862 bit that V.4 uses to say fp args were passed in registers.
5863 Assume that we don't need the marker for software floating point,
5864 or compiler generated library calls. */
4697a36c
MM
5865 if (mode == VOIDmode)
5866 {
f607bc57 5867 if (abi == ABI_V4
b9599e46 5868 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5869 && (cum->stdarg
5870 || (cum->nargs_prototype < 0
5871 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5872 {
a3170dc6
AH
5873 /* For the SPE, we need to crxor CR6 always. */
5874 if (TARGET_SPE_ABI)
5875 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5876 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5877 return GEN_INT (cum->call_cookie
5878 | ((cum->fregno == FP_ARG_MIN_REG)
5879 ? CALL_V4_SET_FP_ARGS
5880 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5881 }
4697a36c 5882
7509c759 5883 return GEN_INT (cum->call_cookie);
4697a36c
MM
5884 }
5885
0b5383eb
DJ
5886 if (rs6000_darwin64_abi && mode == BLKmode
5887 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5888 {
0b5383eb 5889 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5890 if (rslt != NULL_RTX)
5891 return rslt;
5892 /* Else fall through to usual handling. */
5893 }
5894
2858f73a 5895 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5896 if (TARGET_64BIT && ! cum->prototype)
5897 {
c4ad648e
AM
5898 /* Vector parameters get passed in vector register
5899 and also in GPRs or memory, in absence of prototype. */
5900 int align_words;
5901 rtx slot;
5902 align_words = (cum->words + 1) & ~1;
5903
5904 if (align_words >= GP_ARG_NUM_REG)
5905 {
5906 slot = NULL_RTX;
5907 }
5908 else
5909 {
5910 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5911 }
5912 return gen_rtx_PARALLEL (mode,
5913 gen_rtvec (2,
5914 gen_rtx_EXPR_LIST (VOIDmode,
5915 slot, const0_rtx),
5916 gen_rtx_EXPR_LIST (VOIDmode,
5917 gen_rtx_REG (mode, cum->vregno),
5918 const0_rtx)));
c72d6c26
HP
5919 }
5920 else
5921 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5922 else if (TARGET_ALTIVEC_ABI
5923 && (ALTIVEC_VECTOR_MODE (mode)
5924 || (type && TREE_CODE (type) == VECTOR_TYPE
5925 && int_size_in_bytes (type) == 16)))
0ac081f6 5926 {
2858f73a 5927 if (named || abi == ABI_V4)
a594a19c 5928 return NULL_RTX;
0ac081f6 5929 else
a594a19c
GK
5930 {
5931 /* Vector parameters to varargs functions under AIX or Darwin
5932 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5933 int align, align_words, n_words;
5934 enum machine_mode part_mode;
a594a19c
GK
5935
5936 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5937 2 mod 4 in terms of words in 32-bit mode, since the parameter
5938 save area starts at offset 24 from the stack. In 64-bit mode,
5939 they just have to start on an even word, since the parameter
5940 save area is 16-byte aligned. */
5941 if (TARGET_32BIT)
4ed78545 5942 align = (2 - cum->words) & 3;
2858f73a
GK
5943 else
5944 align = cum->words & 1;
a594a19c
GK
5945 align_words = cum->words + align;
5946
5947 /* Out of registers? Memory, then. */
5948 if (align_words >= GP_ARG_NUM_REG)
5949 return NULL_RTX;
ec6376ab
AM
5950
5951 if (TARGET_32BIT && TARGET_POWERPC64)
5952 return rs6000_mixed_function_arg (mode, type, align_words);
5953
2858f73a
GK
5954 /* The vector value goes in GPRs. Only the part of the
5955 value in GPRs is reported here. */
ec6376ab
AM
5956 part_mode = mode;
5957 n_words = rs6000_arg_size (mode, type);
5958 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5959 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5960 is either wholly in GPRs or half in GPRs and half not. */
5961 part_mode = DImode;
ec6376ab
AM
5962
5963 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5964 }
0ac081f6 5965 }
f82f556d
AH
5966 else if (TARGET_SPE_ABI && TARGET_SPE
5967 && (SPE_VECTOR_MODE (mode)
18f63bfa 5968 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5969 || mode == DDmode
17caeff2
JM
5970 || mode == DCmode
5971 || mode == TFmode
7393f7f8 5972 || mode == TDmode
17caeff2 5973 || mode == TCmode))))
a6c9bed4 5974 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5975
f607bc57 5976 else if (abi == ABI_V4)
4697a36c 5977 {
a3170dc6 5978 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5979 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5980 || (mode == TFmode && !TARGET_IEEEQUAD)
5981 || mode == DDmode || mode == TDmode))
4cc833b7 5982 {
2d83f070
JJ
5983 /* _Decimal128 must use an even/odd register pair. This assumes
5984 that the register number is odd when fregno is odd. */
5985 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5986 cum->fregno++;
5987
5988 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5989 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5990 return gen_rtx_REG (mode, cum->fregno);
5991 else
b78d48dd 5992 return NULL_RTX;
4cc833b7
RH
5993 }
5994 else
5995 {
b2d04ecf 5996 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5997 int gregno = cum->sysv_gregno;
5998
4ed78545
AM
5999 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6000 (r7,r8) or (r9,r10). As does any other 2 word item such
6001 as complex int due to a historical mistake. */
6002 if (n_words == 2)
6003 gregno += (1 - gregno) & 1;
4cc833b7 6004
4ed78545 6005 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6006 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6007 return NULL_RTX;
ec6376ab
AM
6008
6009 if (TARGET_32BIT && TARGET_POWERPC64)
6010 return rs6000_mixed_function_arg (mode, type,
6011 gregno - GP_ARG_MIN_REG);
6012 return gen_rtx_REG (mode, gregno);
4cc833b7 6013 }
4697a36c 6014 }
4cc833b7
RH
6015 else
6016 {
294bd182 6017 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6018
2d83f070
JJ
6019 /* _Decimal128 must be passed in an even/odd float register pair.
6020 This assumes that the register number is odd when fregno is odd. */
6021 if (mode == TDmode && (cum->fregno % 2) == 1)
6022 cum->fregno++;
6023
2858f73a 6024 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6025 {
ec6376ab
AM
6026 rtx rvec[GP_ARG_NUM_REG + 1];
6027 rtx r;
6028 int k;
c53bdcf5
AM
6029 bool needs_psave;
6030 enum machine_mode fmode = mode;
c53bdcf5
AM
6031 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6032
6033 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6034 {
c53bdcf5
AM
6035 /* Currently, we only ever need one reg here because complex
6036 doubles are split. */
7393f7f8
BE
6037 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6038 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6039
7393f7f8
BE
6040 /* Long double or _Decimal128 split over regs and memory. */
6041 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6042 }
c53bdcf5
AM
6043
6044 /* Do we also need to pass this arg in the parameter save
6045 area? */
6046 needs_psave = (type
6047 && (cum->nargs_prototype <= 0
6048 || (DEFAULT_ABI == ABI_AIX
de17c25f 6049 && TARGET_XL_COMPAT
c53bdcf5
AM
6050 && align_words >= GP_ARG_NUM_REG)));
6051
6052 if (!needs_psave && mode == fmode)
ec6376ab 6053 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6054
ec6376ab 6055 k = 0;
c53bdcf5
AM
6056 if (needs_psave)
6057 {
ec6376ab 6058 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6059 This piece must come first, before the fprs. */
c53bdcf5
AM
6060 if (align_words < GP_ARG_NUM_REG)
6061 {
6062 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6063
6064 if (align_words + n_words > GP_ARG_NUM_REG
6065 || (TARGET_32BIT && TARGET_POWERPC64))
6066 {
6067 /* If this is partially on the stack, then we only
6068 include the portion actually in registers here. */
6069 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6070 rtx off;
79773478
AM
6071 int i = 0;
6072 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6073 /* Not all of the arg fits in gprs. Say that it
6074 goes in memory too, using a magic NULL_RTX
6075 component. Also see comment in
6076 rs6000_mixed_function_arg for why the normal
6077 function_arg_partial_nregs scheme doesn't work
6078 in this case. */
6079 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6080 const0_rtx);
ec6376ab
AM
6081 do
6082 {
6083 r = gen_rtx_REG (rmode,
6084 GP_ARG_MIN_REG + align_words);
2e6c9641 6085 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6086 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6087 }
6088 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6089 }
6090 else
6091 {
6092 /* The whole arg fits in gprs. */
6093 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6094 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6095 }
c53bdcf5 6096 }
ec6376ab
AM
6097 else
6098 /* It's entirely in memory. */
6099 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6100 }
6101
ec6376ab
AM
6102 /* Describe where this piece goes in the fprs. */
6103 r = gen_rtx_REG (fmode, cum->fregno);
6104 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6105
6106 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6107 }
6108 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6109 {
ec6376ab
AM
6110 if (TARGET_32BIT && TARGET_POWERPC64)
6111 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6112
4eeca74f
AM
6113 if (mode == BLKmode)
6114 mode = Pmode;
6115
b2d04ecf
AM
6116 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6117 }
4cc833b7
RH
6118 else
6119 return NULL_RTX;
4697a36c 6120 }
4697a36c
MM
6121}
6122\f
ec6376ab 6123/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6124 the number of bytes passed in registers. For args passed entirely in
6125 registers or entirely in memory, zero. When an arg is described by a
6126 PARALLEL, perhaps using more than one register type, this function
6127 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6128
78a52f11
RH
6129static int
6130rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6131 tree type, bool named)
4697a36c 6132{
c53bdcf5 6133 int ret = 0;
ec6376ab 6134 int align_words;
c53bdcf5 6135
f607bc57 6136 if (DEFAULT_ABI == ABI_V4)
4697a36c 6137 return 0;
4697a36c 6138
c53bdcf5
AM
6139 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6140 && cum->nargs_prototype >= 0)
6141 return 0;
6142
0b5383eb
DJ
6143 /* In this complicated case we just disable the partial_nregs code. */
6144 if (rs6000_darwin64_abi && mode == BLKmode
6145 && TREE_CODE (type) == RECORD_TYPE
6146 && int_size_in_bytes (type) > 0)
6147 return 0;
6148
294bd182 6149 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6150
79773478
AM
6151 if (USE_FP_FOR_ARG_P (cum, mode, type))
6152 {
fb63c729
AM
6153 /* If we are passing this arg in the fixed parameter save area
6154 (gprs or memory) as well as fprs, then this function should
79773478
AM
6155 return the number of partial bytes passed in the parameter
6156 save area rather than partial bytes passed in fprs. */
6157 if (type
6158 && (cum->nargs_prototype <= 0
6159 || (DEFAULT_ABI == ABI_AIX
6160 && TARGET_XL_COMPAT
6161 && align_words >= GP_ARG_NUM_REG)))
6162 return 0;
6163 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6164 > FP_ARG_MAX_REG + 1)
ac7e839c 6165 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6166 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6167 return 0;
6168 }
6169
ec6376ab
AM
6170 if (align_words < GP_ARG_NUM_REG
6171 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6172 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6173
c53bdcf5 6174 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6175 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6176
c53bdcf5 6177 return ret;
4697a36c
MM
6178}
6179\f
6180/* A C expression that indicates when an argument must be passed by
6181 reference. If nonzero for an argument, a copy of that argument is
6182 made in memory and a pointer to the argument is passed instead of
6183 the argument itself. The pointer is passed in whatever way is
6184 appropriate for passing a pointer to that type.
6185
b2d04ecf
AM
6186 Under V.4, aggregates and long double are passed by reference.
6187
6188 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6189 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6190
6191 As an extension to all ABIs, variable sized types are passed by
6192 reference. */
4697a36c 6193
8cd5a4e0 6194static bool
f676971a 6195rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6196 enum machine_mode mode, const_tree type,
bada2eb8 6197 bool named ATTRIBUTE_UNUSED)
4697a36c 6198{
602ea4d3 6199 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6200 {
6201 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6202 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6203 return 1;
6204 }
6205
6206 if (!type)
6207 return 0;
4697a36c 6208
bada2eb8
DE
6209 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6210 {
6211 if (TARGET_DEBUG_ARG)
6212 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6213 return 1;
6214 }
6215
6216 if (int_size_in_bytes (type) < 0)
6217 {
6218 if (TARGET_DEBUG_ARG)
6219 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6220 return 1;
6221 }
6222
6223 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6224 modes only exist for GCC vector types if -maltivec. */
6225 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6226 {
6227 if (TARGET_DEBUG_ARG)
6228 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6229 return 1;
6230 }
b693336b
PB
6231
6232 /* Pass synthetic vectors in memory. */
bada2eb8 6233 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6234 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6235 {
6236 static bool warned_for_pass_big_vectors = false;
6237 if (TARGET_DEBUG_ARG)
6238 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6239 if (!warned_for_pass_big_vectors)
6240 {
d4ee4d25 6241 warning (0, "GCC vector passed by reference: "
b693336b
PB
6242 "non-standard ABI extension with no compatibility guarantee");
6243 warned_for_pass_big_vectors = true;
6244 }
6245 return 1;
6246 }
6247
b2d04ecf 6248 return 0;
4697a36c 6249}
5985c7a6
FJ
6250
6251static void
2d9db8eb 6252rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6253{
6254 int i;
6255 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6256
6257 if (nregs == 0)
6258 return;
6259
c4ad648e 6260 for (i = 0; i < nregs; i++)
5985c7a6 6261 {
9390387d 6262 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6263 if (reload_completed)
c4ad648e
AM
6264 {
6265 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6266 tem = NULL_RTX;
6267 else
6268 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6269 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6270 }
5985c7a6
FJ
6271 else
6272 tem = replace_equiv_address (tem, XEXP (tem, 0));
6273
37409796 6274 gcc_assert (tem);
5985c7a6
FJ
6275
6276 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6277 }
6278}
4697a36c
MM
6279\f
6280/* Perform any needed actions needed for a function that is receiving a
f676971a 6281 variable number of arguments.
4697a36c
MM
6282
6283 CUM is as above.
6284
6285 MODE and TYPE are the mode and type of the current parameter.
6286
6287 PRETEND_SIZE is a variable that should be set to the amount of stack
6288 that must be pushed by the prolog to pretend that our caller pushed
6289 it.
6290
6291 Normally, this macro will push all remaining incoming registers on the
6292 stack and set PRETEND_SIZE to the length of the registers pushed. */
6293
c6e8c921 6294static void
f676971a 6295setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6296 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6297 int no_rtl)
4697a36c 6298{
4cc833b7
RH
6299 CUMULATIVE_ARGS next_cum;
6300 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6301 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6302 int first_reg_offset;
6303 alias_set_type set;
4697a36c 6304
f31bf321 6305 /* Skip the last named argument. */
d34c5b80 6306 next_cum = *cum;
594a51fe 6307 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6308
f607bc57 6309 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6310 {
5b667039
JJ
6311 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6312
60e2d0ca 6313 if (! no_rtl)
5b667039
JJ
6314 {
6315 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6316 HOST_WIDE_INT offset = 0;
6317
6318 /* Try to optimize the size of the varargs save area.
6319 The ABI requires that ap.reg_save_area is doubleword
6320 aligned, but we don't need to allocate space for all
6321 the bytes, only those to which we actually will save
6322 anything. */
6323 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6324 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6325 if (TARGET_HARD_FLOAT && TARGET_FPRS
6326 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6327 && cfun->va_list_fpr_size)
6328 {
6329 if (gpr_reg_num)
6330 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6331 * UNITS_PER_FP_WORD;
6332 if (cfun->va_list_fpr_size
6333 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6334 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6335 else
6336 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6337 * UNITS_PER_FP_WORD;
6338 }
6339 if (gpr_reg_num)
6340 {
6341 offset = -((first_reg_offset * reg_size) & ~7);
6342 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6343 {
6344 gpr_reg_num = cfun->va_list_gpr_size;
6345 if (reg_size == 4 && (first_reg_offset & 1))
6346 gpr_reg_num++;
6347 }
6348 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6349 }
6350 else if (fpr_size)
6351 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6352 * UNITS_PER_FP_WORD
6353 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6354
5b667039
JJ
6355 if (gpr_size + fpr_size)
6356 {
6357 rtx reg_save_area
6358 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6359 gcc_assert (GET_CODE (reg_save_area) == MEM);
6360 reg_save_area = XEXP (reg_save_area, 0);
6361 if (GET_CODE (reg_save_area) == PLUS)
6362 {
6363 gcc_assert (XEXP (reg_save_area, 0)
6364 == virtual_stack_vars_rtx);
6365 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6366 offset += INTVAL (XEXP (reg_save_area, 1));
6367 }
6368 else
6369 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6370 }
6371
6372 cfun->machine->varargs_save_offset = offset;
6373 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6374 }
4697a36c 6375 }
60e2d0ca 6376 else
4697a36c 6377 {
d34c5b80 6378 first_reg_offset = next_cum.words;
4cc833b7 6379 save_area = virtual_incoming_args_rtx;
4697a36c 6380
fe984136 6381 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6382 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6383 }
4697a36c 6384
dfafc897 6385 set = get_varargs_alias_set ();
9d30f3c1
JJ
6386 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6387 && cfun->va_list_gpr_size)
4cc833b7 6388 {
9d30f3c1
JJ
6389 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6390
6391 if (va_list_gpr_counter_field)
6392 {
6393 /* V4 va_list_gpr_size counts number of registers needed. */
6394 if (nregs > cfun->va_list_gpr_size)
6395 nregs = cfun->va_list_gpr_size;
6396 }
6397 else
6398 {
6399 /* char * va_list instead counts number of bytes needed. */
6400 if (nregs > cfun->va_list_gpr_size / reg_size)
6401 nregs = cfun->va_list_gpr_size / reg_size;
6402 }
6403
dfafc897 6404 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6405 plus_constant (save_area,
13e2e16e
DE
6406 first_reg_offset * reg_size));
6407 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6408 set_mem_alias_set (mem, set);
8ac61af7 6409 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6410
f676971a 6411 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6412 nregs);
4697a36c
MM
6413 }
6414
4697a36c 6415 /* Save FP registers if needed. */
f607bc57 6416 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6417 && TARGET_HARD_FLOAT && TARGET_FPRS
6418 && ! no_rtl
9d30f3c1
JJ
6419 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6420 && cfun->va_list_fpr_size)
4697a36c 6421 {
9d30f3c1 6422 int fregno = next_cum.fregno, nregs;
9ebbca7d 6423 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6424 rtx lab = gen_label_rtx ();
5b667039
JJ
6425 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6426 * UNITS_PER_FP_WORD);
4697a36c 6427
c4ad648e
AM
6428 emit_jump_insn
6429 (gen_rtx_SET (VOIDmode,
6430 pc_rtx,
6431 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6432 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6433 const0_rtx),
39403d82 6434 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6435 pc_rtx)));
6436
9d30f3c1
JJ
6437 for (nregs = 0;
6438 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6439 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6440 {
5496b36f 6441 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6442 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6443 set_mem_alias_set (mem, set);
94ff898d 6444 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6445 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6446 }
4cc833b7
RH
6447
6448 emit_label (lab);
4697a36c 6449 }
4697a36c 6450}
4697a36c 6451
dfafc897 6452/* Create the va_list data type. */
2c4974b7 6453
c35d187f
RH
6454static tree
6455rs6000_build_builtin_va_list (void)
dfafc897 6456{
64c2816f 6457 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6458
9ebbca7d
GK
6459 /* For AIX, prefer 'char *' because that's what the system
6460 header files like. */
f607bc57 6461 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6462 return build_pointer_type (char_type_node);
dfafc897 6463
f1e639b1 6464 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6465 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6466
f676971a 6467 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6468 unsigned_char_type_node);
f676971a 6469 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6470 unsigned_char_type_node);
64c2816f
DT
6471 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6472 every user file. */
6473 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6474 short_unsigned_type_node);
dfafc897
FS
6475 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6476 ptr_type_node);
6477 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6478 ptr_type_node);
6479
9d30f3c1
JJ
6480 va_list_gpr_counter_field = f_gpr;
6481 va_list_fpr_counter_field = f_fpr;
6482
dfafc897
FS
6483 DECL_FIELD_CONTEXT (f_gpr) = record;
6484 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6485 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6486 DECL_FIELD_CONTEXT (f_ovf) = record;
6487 DECL_FIELD_CONTEXT (f_sav) = record;
6488
bab45a51
FS
6489 TREE_CHAIN (record) = type_decl;
6490 TYPE_NAME (record) = type_decl;
dfafc897
FS
6491 TYPE_FIELDS (record) = f_gpr;
6492 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6493 TREE_CHAIN (f_fpr) = f_res;
6494 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6495 TREE_CHAIN (f_ovf) = f_sav;
6496
6497 layout_type (record);
6498
6499 /* The correct type is an array type of one element. */
6500 return build_array_type (record, build_index_type (size_zero_node));
6501}
6502
6503/* Implement va_start. */
6504
d7bd8aeb 6505static void
a2369ed3 6506rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6507{
dfafc897 6508 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6509 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6510 tree gpr, fpr, ovf, sav, t;
2c4974b7 6511
dfafc897 6512 /* Only SVR4 needs something special. */
f607bc57 6513 if (DEFAULT_ABI != ABI_V4)
dfafc897 6514 {
e5faf155 6515 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6516 return;
6517 }
6518
973a648b 6519 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6520 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6521 f_res = TREE_CHAIN (f_fpr);
6522 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6523 f_sav = TREE_CHAIN (f_ovf);
6524
872a65b5 6525 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6526 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6527 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6528 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6529 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6530
6531 /* Count number of gp and fp argument registers used. */
4cc833b7 6532 words = current_function_args_info.words;
987732e0
DE
6533 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6534 GP_ARG_NUM_REG);
6535 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6536 FP_ARG_NUM_REG);
dfafc897
FS
6537
6538 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6539 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6540 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6541 words, n_gpr, n_fpr);
dfafc897 6542
9d30f3c1
JJ
6543 if (cfun->va_list_gpr_size)
6544 {
07beea0d 6545 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6546 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6547 TREE_SIDE_EFFECTS (t) = 1;
6548 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6549 }
58c8adc1 6550
9d30f3c1
JJ
6551 if (cfun->va_list_fpr_size)
6552 {
07beea0d 6553 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6554 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6555 TREE_SIDE_EFFECTS (t) = 1;
6556 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6557 }
dfafc897
FS
6558
6559 /* Find the overflow area. */
6560 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6561 if (words != 0)
5be014d5
AP
6562 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6563 size_int (words * UNITS_PER_WORD));
07beea0d 6564 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6565 TREE_SIDE_EFFECTS (t) = 1;
6566 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6567
9d30f3c1
JJ
6568 /* If there were no va_arg invocations, don't set up the register
6569 save area. */
6570 if (!cfun->va_list_gpr_size
6571 && !cfun->va_list_fpr_size
6572 && n_gpr < GP_ARG_NUM_REG
6573 && n_fpr < FP_ARG_V4_MAX_REG)
6574 return;
6575
dfafc897
FS
6576 /* Find the register save area. */
6577 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6578 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6579 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6580 size_int (cfun->machine->varargs_save_offset));
07beea0d 6581 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6582 TREE_SIDE_EFFECTS (t) = 1;
6583 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6584}
6585
6586/* Implement va_arg. */
6587
23a60a04
JM
6588tree
6589rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6590{
cd3ce9b4
JM
6591 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6592 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6593 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6594 tree lab_false, lab_over, addr;
6595 int align;
6596 tree ptrtype = build_pointer_type (type);
7393f7f8 6597 int regalign = 0;
cd3ce9b4 6598
08b0dc1b
RH
6599 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6600 {
6601 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6602 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6603 }
6604
cd3ce9b4
JM
6605 if (DEFAULT_ABI != ABI_V4)
6606 {
08b0dc1b 6607 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6608 {
6609 tree elem_type = TREE_TYPE (type);
6610 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6611 int elem_size = GET_MODE_SIZE (elem_mode);
6612
6613 if (elem_size < UNITS_PER_WORD)
6614 {
23a60a04 6615 tree real_part, imag_part;
cd3ce9b4
JM
6616 tree post = NULL_TREE;
6617
23a60a04
JM
6618 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6619 &post);
6620 /* Copy the value into a temporary, lest the formal temporary
6621 be reused out from under us. */
6622 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6623 append_to_statement_list (post, pre_p);
6624
23a60a04
JM
6625 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6626 post_p);
cd3ce9b4 6627
47a25a46 6628 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6629 }
6630 }
6631
23a60a04 6632 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6633 }
6634
6635 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6636 f_fpr = TREE_CHAIN (f_gpr);
6637 f_res = TREE_CHAIN (f_fpr);
6638 f_ovf = TREE_CHAIN (f_res);
6639 f_sav = TREE_CHAIN (f_ovf);
6640
872a65b5 6641 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6642 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6643 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6644 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6645 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6646
6647 size = int_size_in_bytes (type);
6648 rsize = (size + 3) / 4;
6649 align = 1;
6650
08b0dc1b 6651 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6652 && (TYPE_MODE (type) == SFmode
6653 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6654 || TYPE_MODE (type) == TFmode
6655 || TYPE_MODE (type) == DDmode
6656 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6657 {
6658 /* FP args go in FP registers, if present. */
cd3ce9b4 6659 reg = fpr;
602ea4d3 6660 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6661 sav_ofs = 8*4;
6662 sav_scale = 8;
602ea4d3 6663 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6664 align = 8;
6665 }
6666 else
6667 {
6668 /* Otherwise into GP registers. */
cd3ce9b4
JM
6669 reg = gpr;
6670 n_reg = rsize;
6671 sav_ofs = 0;
6672 sav_scale = 4;
6673 if (n_reg == 2)
6674 align = 8;
6675 }
6676
6677 /* Pull the value out of the saved registers.... */
6678
6679 lab_over = NULL;
6680 addr = create_tmp_var (ptr_type_node, "addr");
6681 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6682
6683 /* AltiVec vectors never go in registers when -mabi=altivec. */
6684 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6685 align = 16;
6686 else
6687 {
6688 lab_false = create_artificial_label ();
6689 lab_over = create_artificial_label ();
6690
6691 /* Long long and SPE vectors are aligned in the registers.
6692 As are any other 2 gpr item such as complex int due to a
6693 historical mistake. */
6694 u = reg;
602ea4d3 6695 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6696 {
7393f7f8 6697 regalign = 1;
cd3ce9b4 6698 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6699 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6700 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6701 }
7393f7f8
BE
6702 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6703 reg number is 0 for f1, so we want to make it odd. */
6704 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6705 {
6706 regalign = 1;
6707 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6708 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6709 }
cd3ce9b4 6710
95674810 6711 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6712 t = build2 (GE_EXPR, boolean_type_node, u, t);
6713 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6714 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6715 gimplify_and_add (t, pre_p);
6716
6717 t = sav;
6718 if (sav_ofs)
5be014d5 6719 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6720
8fb632eb
ZD
6721 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6722 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6723 u = fold_convert (sizetype, u);
6724 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6725 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6726
07beea0d 6727 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6728 gimplify_and_add (t, pre_p);
6729
6730 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6731 gimplify_and_add (t, pre_p);
6732
6733 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6734 append_to_statement_list (t, pre_p);
6735
7393f7f8 6736 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6737 {
6738 /* Ensure that we don't find any more args in regs.
7393f7f8 6739 Alignment has taken care of for special cases. */
07beea0d 6740 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6741 gimplify_and_add (t, pre_p);
6742 }
6743 }
6744
6745 /* ... otherwise out of the overflow area. */
6746
6747 /* Care for on-stack alignment if needed. */
6748 t = ovf;
6749 if (align != 1)
6750 {
5be014d5
AP
6751 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6752 t = fold_convert (sizetype, t);
4a90aeeb 6753 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6754 size_int (-align));
6755 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6756 }
6757 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6758
07beea0d 6759 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6760 gimplify_and_add (u, pre_p);
6761
5be014d5 6762 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6763 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6764 gimplify_and_add (t, pre_p);
6765
6766 if (lab_over)
6767 {
6768 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6769 append_to_statement_list (t, pre_p);
6770 }
6771
0cfbc62b
JM
6772 if (STRICT_ALIGNMENT
6773 && (TYPE_ALIGN (type)
6774 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6775 {
6776 /* The value (of type complex double, for example) may not be
6777 aligned in memory in the saved registers, so copy via a
6778 temporary. (This is the same code as used for SPARC.) */
6779 tree tmp = create_tmp_var (type, "va_arg_tmp");
6780 tree dest_addr = build_fold_addr_expr (tmp);
6781
5039610b
SL
6782 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6783 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6784
6785 gimplify_and_add (copy, pre_p);
6786 addr = dest_addr;
6787 }
6788
08b0dc1b 6789 addr = fold_convert (ptrtype, addr);
872a65b5 6790 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6791}
6792
0ac081f6
AH
6793/* Builtins. */
6794
58646b77
PB
6795static void
6796def_builtin (int mask, const char *name, tree type, int code)
6797{
96038623 6798 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6799 {
6800 if (rs6000_builtin_decls[code])
6801 abort ();
6802
6803 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6804 add_builtin_function (name, type, code, BUILT_IN_MD,
6805 NULL, NULL_TREE);
58646b77
PB
6806 }
6807}
0ac081f6 6808
24408032
AH
6809/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6810
2212663f 6811static const struct builtin_description bdesc_3arg[] =
24408032
AH
6812{
6813 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6814 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6815 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6816 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6817 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6818 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6819 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6820 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6821 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6822 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6823 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6824 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6825 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6826 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6827 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6828 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6829 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6830 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6831 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6832 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6833 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6834 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6835 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6836
6837 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6838 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6842 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6843 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6844 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6845 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6846 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6847 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6848 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6849 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6850 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6851 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6852
6853 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6854 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6855 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6856 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6857 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6858 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6859 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6860 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6861 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6862};
2212663f 6863
95385cbb
AH
6864/* DST operations: void foo (void *, const int, const char). */
6865
6866static const struct builtin_description bdesc_dst[] =
6867{
6868 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6869 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6870 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6871 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6872
6873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6877};
6878
2212663f 6879/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6880
a3170dc6 6881static struct builtin_description bdesc_2arg[] =
0ac081f6 6882{
f18c054f
DB
6883 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6884 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6885 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6886 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6887 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6888 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6889 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6890 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6891 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6892 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6893 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6894 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6895 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6896 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6897 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6898 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6899 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6900 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6901 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6902 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6903 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6904 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6905 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6906 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6907 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6908 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6909 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6910 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6911 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6912 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6913 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6914 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6915 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6916 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6917 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6918 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6919 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6920 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6921 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6922 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6923 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6924 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6925 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6926 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6927 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6928 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6929 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6930 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6931 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6932 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6933 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6934 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6935 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6936 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6937 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6938 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6939 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6940 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6941 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6942 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6943 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6944 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6945 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6946 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6947 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6948 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6949 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6950 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6951 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6952 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6953 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6954 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6955 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6956 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6957 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6958 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6959 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6960 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6961 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6962 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6963 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6964 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6965 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6966 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6967 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6968 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6969 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6970 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6971 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6972 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6973 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6974 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6975 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6976 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6977 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6978 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6979 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6980 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6981 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6982 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6983 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6984 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6985 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6986 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6987 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6988 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6989 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6990 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6991 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6992 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6993 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6994
58646b77
PB
6995 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6996 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6997 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6998 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6999 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7000 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7001 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7002 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7003 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7004 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7005 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7006 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7007 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7008 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7009 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7010 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7011 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7012 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7013 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7014 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7015 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7016 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7017 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7018 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7019 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7020 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7021 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7022 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7023 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7024 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7025 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7026 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7027 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7028 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7029 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7030 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7031 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7032 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7033 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7034 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7035 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7036 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7037 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7038 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7039 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7040 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7041 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7042 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7043 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7044 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7045 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7046 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7047 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7048 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7049 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7050 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7051 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7052 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7053 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7054 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7055 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7056 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7057 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7058 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7059 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7060 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7061 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7062 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7063 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7064 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7065 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7066 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7067 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7068 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7069 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7070 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7071 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7072 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7073 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7074 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7075 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7076 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7077 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7078 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7079 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7080 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7081 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7082 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7083 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7084 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7085 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7122
96038623
DE
7123 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7124 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7125 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7126 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7127 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7128 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7129 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7130 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7131 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7132 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7133
a3170dc6
AH
7134 /* Place holder, leave as first spe builtin. */
7135 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7136 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7137 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7138 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7139 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7140 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7141 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7142 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7143 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7144 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7145 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7146 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7147 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7148 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7149 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7150 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7151 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7152 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7153 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7154 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7155 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7156 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7157 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7158 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7159 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7160 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7161 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7162 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7163 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7164 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7165 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7166 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7167 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7168 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7169 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7170 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7171 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7172 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7173 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7174 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7175 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7176 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7177 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7178 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7179 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7180 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7181 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7182 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7183 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7184 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7185 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7186 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7187 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7188 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7189 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7190 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7191 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7192 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7193 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7194 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7195 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7196 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7197 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7198 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7199 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7200 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7201 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7202 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7203 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7204 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7205 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7206 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7207 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7208 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7209 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7210 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7211 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7212 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7213 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7214 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7215 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7216 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7217 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7218 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7219 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7220 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7221 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7222 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7223 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7224 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7225 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7226 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7227 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7228 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7229 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7230 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7231 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7232 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7233 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7234 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7235 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7236 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7237 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7238 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7239 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7240 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7241 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7242 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7243 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7244
7245 /* SPE binary operations expecting a 5-bit unsigned literal. */
7246 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7247
7248 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7249 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7250 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7251 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7252 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7253 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7254 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7255 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7256 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7257 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7258 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7259 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7260 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7261 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7262 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7263 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7264 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7265 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7266 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7267 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7268 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7269 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7270 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7271 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7272 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7273 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7274
7275 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7276 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7277};
7278
7279/* AltiVec predicates. */
7280
7281struct builtin_description_predicates
7282{
7283 const unsigned int mask;
7284 const enum insn_code icode;
7285 const char *opcode;
7286 const char *const name;
7287 const enum rs6000_builtins code;
7288};
7289
7290static const struct builtin_description_predicates bdesc_altivec_preds[] =
7291{
7292 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7293 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7294 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7295 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7296 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7297 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7298 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7299 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7300 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7301 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7302 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7303 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7304 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7305
7306 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7307 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7308 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7309};
24408032 7310
a3170dc6
AH
7311/* SPE predicates. */
7312static struct builtin_description bdesc_spe_predicates[] =
7313{
7314 /* Place-holder. Leave as first. */
7315 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7316 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7317 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7318 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7319 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7320 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7321 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7322 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7323 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7324 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7325 /* Place-holder. Leave as last. */
7326 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7327};
7328
7329/* SPE evsel predicates. */
7330static struct builtin_description bdesc_spe_evsel[] =
7331{
7332 /* Place-holder. Leave as first. */
7333 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7334 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7335 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7336 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7337 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7338 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7339 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7340 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7341 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7342 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7343 /* Place-holder. Leave as last. */
7344 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7345};
7346
96038623
DE
7347/* PAIRED predicates. */
7348static const struct builtin_description bdesc_paired_preds[] =
7349{
7350 /* Place-holder. Leave as first. */
7351 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7352 /* Place-holder. Leave as last. */
7353 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7354};
7355
b6d08ca1 7356/* ABS* operations. */
100c4561
AH
7357
7358static const struct builtin_description bdesc_abs[] =
7359{
7360 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7361 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7362 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7363 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7364 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7365 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7366 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7367};
7368
617e0e1d
DB
7369/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7370 foo (VECa). */
24408032 7371
a3170dc6 7372static struct builtin_description bdesc_1arg[] =
2212663f 7373{
617e0e1d
DB
7374 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7375 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7376 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7377 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7378 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7379 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7380 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7381 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7382 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7383 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7384 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7385 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7386 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7387 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7388 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7389 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7390 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7391
58646b77
PB
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7411
a3170dc6
AH
7412 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7413 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7414 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7415 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7416 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7417 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7418 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7419 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7420 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7421 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7422 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7423 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7424 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7425 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7426 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7427 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7428 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7429 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7430 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7431 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7432 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7433 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7434 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7435 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7436 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7437 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7438 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7439 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7440 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7441 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7442
7443 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7444 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7445
7446 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7447 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7448 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7449 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7450 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7451};
7452
7453static rtx
5039610b 7454rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7455{
7456 rtx pat;
5039610b 7457 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7458 rtx op0 = expand_normal (arg0);
2212663f
DB
7459 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7460 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7461
0559cc77
DE
7462 if (icode == CODE_FOR_nothing)
7463 /* Builtin not supported on this processor. */
7464 return 0;
7465
20e26713
AH
7466 /* If we got invalid arguments bail out before generating bad rtl. */
7467 if (arg0 == error_mark_node)
9a171fcd 7468 return const0_rtx;
20e26713 7469
0559cc77
DE
7470 if (icode == CODE_FOR_altivec_vspltisb
7471 || icode == CODE_FOR_altivec_vspltish
7472 || icode == CODE_FOR_altivec_vspltisw
7473 || icode == CODE_FOR_spe_evsplatfi
7474 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7475 {
7476 /* Only allow 5-bit *signed* literals. */
b44140e7 7477 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7478 || INTVAL (op0) > 15
7479 || INTVAL (op0) < -16)
b44140e7
AH
7480 {
7481 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7482 return const0_rtx;
b44140e7 7483 }
b44140e7
AH
7484 }
7485
c62f2db5 7486 if (target == 0
2212663f
DB
7487 || GET_MODE (target) != tmode
7488 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7489 target = gen_reg_rtx (tmode);
7490
7491 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7492 op0 = copy_to_mode_reg (mode0, op0);
7493
7494 pat = GEN_FCN (icode) (target, op0);
7495 if (! pat)
7496 return 0;
7497 emit_insn (pat);
0ac081f6 7498
2212663f
DB
7499 return target;
7500}
ae4b4a02 7501
100c4561 7502static rtx
5039610b 7503altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7504{
7505 rtx pat, scratch1, scratch2;
5039610b 7506 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7507 rtx op0 = expand_normal (arg0);
100c4561
AH
7508 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7509 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7510
7511 /* If we have invalid arguments, bail out before generating bad rtl. */
7512 if (arg0 == error_mark_node)
9a171fcd 7513 return const0_rtx;
100c4561
AH
7514
7515 if (target == 0
7516 || GET_MODE (target) != tmode
7517 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7518 target = gen_reg_rtx (tmode);
7519
7520 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7521 op0 = copy_to_mode_reg (mode0, op0);
7522
7523 scratch1 = gen_reg_rtx (mode0);
7524 scratch2 = gen_reg_rtx (mode0);
7525
7526 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7527 if (! pat)
7528 return 0;
7529 emit_insn (pat);
7530
7531 return target;
7532}
7533
0ac081f6 7534static rtx
5039610b 7535rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7536{
7537 rtx pat;
5039610b
SL
7538 tree arg0 = CALL_EXPR_ARG (exp, 0);
7539 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7540 rtx op0 = expand_normal (arg0);
7541 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7542 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7543 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7544 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7545
0559cc77
DE
7546 if (icode == CODE_FOR_nothing)
7547 /* Builtin not supported on this processor. */
7548 return 0;
7549
20e26713
AH
7550 /* If we got invalid arguments bail out before generating bad rtl. */
7551 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7552 return const0_rtx;
20e26713 7553
0559cc77
DE
7554 if (icode == CODE_FOR_altivec_vcfux
7555 || icode == CODE_FOR_altivec_vcfsx
7556 || icode == CODE_FOR_altivec_vctsxs
7557 || icode == CODE_FOR_altivec_vctuxs
7558 || icode == CODE_FOR_altivec_vspltb
7559 || icode == CODE_FOR_altivec_vsplth
7560 || icode == CODE_FOR_altivec_vspltw
7561 || icode == CODE_FOR_spe_evaddiw
7562 || icode == CODE_FOR_spe_evldd
7563 || icode == CODE_FOR_spe_evldh
7564 || icode == CODE_FOR_spe_evldw
7565 || icode == CODE_FOR_spe_evlhhesplat
7566 || icode == CODE_FOR_spe_evlhhossplat
7567 || icode == CODE_FOR_spe_evlhhousplat
7568 || icode == CODE_FOR_spe_evlwhe
7569 || icode == CODE_FOR_spe_evlwhos
7570 || icode == CODE_FOR_spe_evlwhou
7571 || icode == CODE_FOR_spe_evlwhsplat
7572 || icode == CODE_FOR_spe_evlwwsplat
7573 || icode == CODE_FOR_spe_evrlwi
7574 || icode == CODE_FOR_spe_evslwi
7575 || icode == CODE_FOR_spe_evsrwis
f5119d10 7576 || icode == CODE_FOR_spe_evsubifw
0559cc77 7577 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7578 {
7579 /* Only allow 5-bit unsigned literals. */
8bb418a3 7580 STRIP_NOPS (arg1);
b44140e7
AH
7581 if (TREE_CODE (arg1) != INTEGER_CST
7582 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7583 {
7584 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7585 return const0_rtx;
b44140e7 7586 }
b44140e7
AH
7587 }
7588
c62f2db5 7589 if (target == 0
0ac081f6
AH
7590 || GET_MODE (target) != tmode
7591 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7592 target = gen_reg_rtx (tmode);
7593
7594 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7595 op0 = copy_to_mode_reg (mode0, op0);
7596 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7597 op1 = copy_to_mode_reg (mode1, op1);
7598
7599 pat = GEN_FCN (icode) (target, op0, op1);
7600 if (! pat)
7601 return 0;
7602 emit_insn (pat);
7603
7604 return target;
7605}
6525c0e7 7606
ae4b4a02 7607static rtx
f676971a 7608altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7609 tree exp, rtx target)
ae4b4a02
AH
7610{
7611 rtx pat, scratch;
5039610b
SL
7612 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7613 tree arg0 = CALL_EXPR_ARG (exp, 1);
7614 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7615 rtx op0 = expand_normal (arg0);
7616 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7617 enum machine_mode tmode = SImode;
7618 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7619 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7620 int cr6_form_int;
7621
7622 if (TREE_CODE (cr6_form) != INTEGER_CST)
7623 {
7624 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7625 return const0_rtx;
ae4b4a02
AH
7626 }
7627 else
7628 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7629
37409796 7630 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7631
7632 /* If we have invalid arguments, bail out before generating bad rtl. */
7633 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7634 return const0_rtx;
ae4b4a02
AH
7635
7636 if (target == 0
7637 || GET_MODE (target) != tmode
7638 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7639 target = gen_reg_rtx (tmode);
7640
7641 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7642 op0 = copy_to_mode_reg (mode0, op0);
7643 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7644 op1 = copy_to_mode_reg (mode1, op1);
7645
7646 scratch = gen_reg_rtx (mode0);
7647
7648 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7649 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7650 if (! pat)
7651 return 0;
7652 emit_insn (pat);
7653
7654 /* The vec_any* and vec_all* predicates use the same opcodes for two
7655 different operations, but the bits in CR6 will be different
7656 depending on what information we want. So we have to play tricks
7657 with CR6 to get the right bits out.
7658
7659 If you think this is disgusting, look at the specs for the
7660 AltiVec predicates. */
7661
c4ad648e
AM
7662 switch (cr6_form_int)
7663 {
7664 case 0:
7665 emit_insn (gen_cr6_test_for_zero (target));
7666 break;
7667 case 1:
7668 emit_insn (gen_cr6_test_for_zero_reverse (target));
7669 break;
7670 case 2:
7671 emit_insn (gen_cr6_test_for_lt (target));
7672 break;
7673 case 3:
7674 emit_insn (gen_cr6_test_for_lt_reverse (target));
7675 break;
7676 default:
7677 error ("argument 1 of __builtin_altivec_predicate is out of range");
7678 break;
7679 }
ae4b4a02
AH
7680
7681 return target;
7682}
7683
96038623
DE
7684static rtx
7685paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7686{
7687 rtx pat, addr;
7688 tree arg0 = CALL_EXPR_ARG (exp, 0);
7689 tree arg1 = CALL_EXPR_ARG (exp, 1);
7690 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7691 enum machine_mode mode0 = Pmode;
7692 enum machine_mode mode1 = Pmode;
7693 rtx op0 = expand_normal (arg0);
7694 rtx op1 = expand_normal (arg1);
7695
7696 if (icode == CODE_FOR_nothing)
7697 /* Builtin not supported on this processor. */
7698 return 0;
7699
7700 /* If we got invalid arguments bail out before generating bad rtl. */
7701 if (arg0 == error_mark_node || arg1 == error_mark_node)
7702 return const0_rtx;
7703
7704 if (target == 0
7705 || GET_MODE (target) != tmode
7706 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7707 target = gen_reg_rtx (tmode);
7708
7709 op1 = copy_to_mode_reg (mode1, op1);
7710
7711 if (op0 == const0_rtx)
7712 {
7713 addr = gen_rtx_MEM (tmode, op1);
7714 }
7715 else
7716 {
7717 op0 = copy_to_mode_reg (mode0, op0);
7718 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7719 }
7720
7721 pat = GEN_FCN (icode) (target, addr);
7722
7723 if (! pat)
7724 return 0;
7725 emit_insn (pat);
7726
7727 return target;
7728}
7729
b4a62fa0 7730static rtx
5039610b 7731altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7732{
7733 rtx pat, addr;
5039610b
SL
7734 tree arg0 = CALL_EXPR_ARG (exp, 0);
7735 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7736 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7737 enum machine_mode mode0 = Pmode;
7738 enum machine_mode mode1 = Pmode;
84217346
MD
7739 rtx op0 = expand_normal (arg0);
7740 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7741
7742 if (icode == CODE_FOR_nothing)
7743 /* Builtin not supported on this processor. */
7744 return 0;
7745
7746 /* If we got invalid arguments bail out before generating bad rtl. */
7747 if (arg0 == error_mark_node || arg1 == error_mark_node)
7748 return const0_rtx;
7749
7750 if (target == 0
7751 || GET_MODE (target) != tmode
7752 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7753 target = gen_reg_rtx (tmode);
7754
f676971a 7755 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7756
7757 if (op0 == const0_rtx)
7758 {
7759 addr = gen_rtx_MEM (tmode, op1);
7760 }
7761 else
7762 {
7763 op0 = copy_to_mode_reg (mode0, op0);
7764 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7765 }
7766
7767 pat = GEN_FCN (icode) (target, addr);
7768
7769 if (! pat)
7770 return 0;
7771 emit_insn (pat);
7772
7773 return target;
7774}
7775
61bea3b0 7776static rtx
5039610b 7777spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7778{
5039610b
SL
7779 tree arg0 = CALL_EXPR_ARG (exp, 0);
7780 tree arg1 = CALL_EXPR_ARG (exp, 1);
7781 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7782 rtx op0 = expand_normal (arg0);
7783 rtx op1 = expand_normal (arg1);
7784 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7785 rtx pat;
7786 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7787 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7788 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7789
7790 /* Invalid arguments. Bail before doing anything stoopid! */
7791 if (arg0 == error_mark_node
7792 || arg1 == error_mark_node
7793 || arg2 == error_mark_node)
7794 return const0_rtx;
7795
7796 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7797 op0 = copy_to_mode_reg (mode2, op0);
7798 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7799 op1 = copy_to_mode_reg (mode0, op1);
7800 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7801 op2 = copy_to_mode_reg (mode1, op2);
7802
7803 pat = GEN_FCN (icode) (op1, op2, op0);
7804 if (pat)
7805 emit_insn (pat);
7806 return NULL_RTX;
7807}
7808
96038623
DE
7809static rtx
7810paired_expand_stv_builtin (enum insn_code icode, tree exp)
7811{
7812 tree arg0 = CALL_EXPR_ARG (exp, 0);
7813 tree arg1 = CALL_EXPR_ARG (exp, 1);
7814 tree arg2 = CALL_EXPR_ARG (exp, 2);
7815 rtx op0 = expand_normal (arg0);
7816 rtx op1 = expand_normal (arg1);
7817 rtx op2 = expand_normal (arg2);
7818 rtx pat, addr;
7819 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7820 enum machine_mode mode1 = Pmode;
7821 enum machine_mode mode2 = Pmode;
7822
7823 /* Invalid arguments. Bail before doing anything stoopid! */
7824 if (arg0 == error_mark_node
7825 || arg1 == error_mark_node
7826 || arg2 == error_mark_node)
7827 return const0_rtx;
7828
7829 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7830 op0 = copy_to_mode_reg (tmode, op0);
7831
7832 op2 = copy_to_mode_reg (mode2, op2);
7833
7834 if (op1 == const0_rtx)
7835 {
7836 addr = gen_rtx_MEM (tmode, op2);
7837 }
7838 else
7839 {
7840 op1 = copy_to_mode_reg (mode1, op1);
7841 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7842 }
7843
7844 pat = GEN_FCN (icode) (addr, op0);
7845 if (pat)
7846 emit_insn (pat);
7847 return NULL_RTX;
7848}
7849
6525c0e7 7850static rtx
5039610b 7851altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7852{
5039610b
SL
7853 tree arg0 = CALL_EXPR_ARG (exp, 0);
7854 tree arg1 = CALL_EXPR_ARG (exp, 1);
7855 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7856 rtx op0 = expand_normal (arg0);
7857 rtx op1 = expand_normal (arg1);
7858 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7859 rtx pat, addr;
7860 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7861 enum machine_mode mode1 = Pmode;
7862 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7863
7864 /* Invalid arguments. Bail before doing anything stoopid! */
7865 if (arg0 == error_mark_node
7866 || arg1 == error_mark_node
7867 || arg2 == error_mark_node)
9a171fcd 7868 return const0_rtx;
6525c0e7 7869
b4a62fa0
SB
7870 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7871 op0 = copy_to_mode_reg (tmode, op0);
7872
f676971a 7873 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7874
7875 if (op1 == const0_rtx)
7876 {
7877 addr = gen_rtx_MEM (tmode, op2);
7878 }
7879 else
7880 {
7881 op1 = copy_to_mode_reg (mode1, op1);
7882 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7883 }
6525c0e7 7884
b4a62fa0 7885 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7886 if (pat)
7887 emit_insn (pat);
7888 return NULL_RTX;
7889}
7890
2212663f 7891static rtx
5039610b 7892rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7893{
7894 rtx pat;
5039610b
SL
7895 tree arg0 = CALL_EXPR_ARG (exp, 0);
7896 tree arg1 = CALL_EXPR_ARG (exp, 1);
7897 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7898 rtx op0 = expand_normal (arg0);
7899 rtx op1 = expand_normal (arg1);
7900 rtx op2 = expand_normal (arg2);
2212663f
DB
7901 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7902 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7903 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7904 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7905
774b5662
DE
7906 if (icode == CODE_FOR_nothing)
7907 /* Builtin not supported on this processor. */
7908 return 0;
7909
20e26713
AH
7910 /* If we got invalid arguments bail out before generating bad rtl. */
7911 if (arg0 == error_mark_node
7912 || arg1 == error_mark_node
7913 || arg2 == error_mark_node)
9a171fcd 7914 return const0_rtx;
20e26713 7915
aba5fb01
NS
7916 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7917 || icode == CODE_FOR_altivec_vsldoi_v4si
7918 || icode == CODE_FOR_altivec_vsldoi_v8hi
7919 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7920 {
7921 /* Only allow 4-bit unsigned literals. */
8bb418a3 7922 STRIP_NOPS (arg2);
b44140e7
AH
7923 if (TREE_CODE (arg2) != INTEGER_CST
7924 || TREE_INT_CST_LOW (arg2) & ~0xf)
7925 {
7926 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7927 return const0_rtx;
b44140e7 7928 }
b44140e7
AH
7929 }
7930
c62f2db5 7931 if (target == 0
2212663f
DB
7932 || GET_MODE (target) != tmode
7933 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7934 target = gen_reg_rtx (tmode);
7935
7936 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7937 op0 = copy_to_mode_reg (mode0, op0);
7938 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7939 op1 = copy_to_mode_reg (mode1, op1);
7940 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7941 op2 = copy_to_mode_reg (mode2, op2);
7942
49e39588
RE
7943 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
7944 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
7945 else
7946 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
7947 if (! pat)
7948 return 0;
7949 emit_insn (pat);
7950
7951 return target;
7952}
92898235 7953
3a9b8c7e 7954/* Expand the lvx builtins. */
0ac081f6 7955static rtx
a2369ed3 7956altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7957{
5039610b 7958 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7959 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7960 tree arg0;
7961 enum machine_mode tmode, mode0;
7c3abc73 7962 rtx pat, op0;
3a9b8c7e 7963 enum insn_code icode;
92898235 7964
0ac081f6
AH
7965 switch (fcode)
7966 {
f18c054f 7967 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7968 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7969 break;
f18c054f 7970 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7971 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7972 break;
7973 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7974 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7975 break;
7976 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7977 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7978 break;
7979 default:
7980 *expandedp = false;
7981 return NULL_RTX;
7982 }
0ac081f6 7983
3a9b8c7e 7984 *expandedp = true;
f18c054f 7985
5039610b 7986 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7987 op0 = expand_normal (arg0);
3a9b8c7e
AH
7988 tmode = insn_data[icode].operand[0].mode;
7989 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7990
3a9b8c7e
AH
7991 if (target == 0
7992 || GET_MODE (target) != tmode
7993 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7994 target = gen_reg_rtx (tmode);
24408032 7995
3a9b8c7e
AH
7996 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7997 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7998
3a9b8c7e
AH
7999 pat = GEN_FCN (icode) (target, op0);
8000 if (! pat)
8001 return 0;
8002 emit_insn (pat);
8003 return target;
8004}
f18c054f 8005
3a9b8c7e
AH
8006/* Expand the stvx builtins. */
8007static rtx
f676971a 8008altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8009 bool *expandedp)
3a9b8c7e 8010{
5039610b 8011 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8012 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8013 tree arg0, arg1;
8014 enum machine_mode mode0, mode1;
7c3abc73 8015 rtx pat, op0, op1;
3a9b8c7e 8016 enum insn_code icode;
f18c054f 8017
3a9b8c7e
AH
8018 switch (fcode)
8019 {
8020 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8021 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8022 break;
8023 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8024 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8025 break;
8026 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8027 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8028 break;
8029 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8030 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8031 break;
8032 default:
8033 *expandedp = false;
8034 return NULL_RTX;
8035 }
24408032 8036
5039610b
SL
8037 arg0 = CALL_EXPR_ARG (exp, 0);
8038 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8039 op0 = expand_normal (arg0);
8040 op1 = expand_normal (arg1);
3a9b8c7e
AH
8041 mode0 = insn_data[icode].operand[0].mode;
8042 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8043
3a9b8c7e
AH
8044 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8045 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8046 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8047 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8048
3a9b8c7e
AH
8049 pat = GEN_FCN (icode) (op0, op1);
8050 if (pat)
8051 emit_insn (pat);
f18c054f 8052
3a9b8c7e
AH
8053 *expandedp = true;
8054 return NULL_RTX;
8055}
f18c054f 8056
3a9b8c7e
AH
8057/* Expand the dst builtins. */
8058static rtx
f676971a 8059altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8060 bool *expandedp)
3a9b8c7e 8061{
5039610b 8062 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8063 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8064 tree arg0, arg1, arg2;
8065 enum machine_mode mode0, mode1, mode2;
7c3abc73 8066 rtx pat, op0, op1, op2;
586de218 8067 const struct builtin_description *d;
a3170dc6 8068 size_t i;
f18c054f 8069
3a9b8c7e 8070 *expandedp = false;
f18c054f 8071
3a9b8c7e 8072 /* Handle DST variants. */
586de218 8073 d = bdesc_dst;
3a9b8c7e
AH
8074 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8075 if (d->code == fcode)
8076 {
5039610b
SL
8077 arg0 = CALL_EXPR_ARG (exp, 0);
8078 arg1 = CALL_EXPR_ARG (exp, 1);
8079 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8080 op0 = expand_normal (arg0);
8081 op1 = expand_normal (arg1);
8082 op2 = expand_normal (arg2);
3a9b8c7e
AH
8083 mode0 = insn_data[d->icode].operand[0].mode;
8084 mode1 = insn_data[d->icode].operand[1].mode;
8085 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8086
3a9b8c7e
AH
8087 /* Invalid arguments, bail out before generating bad rtl. */
8088 if (arg0 == error_mark_node
8089 || arg1 == error_mark_node
8090 || arg2 == error_mark_node)
8091 return const0_rtx;
f18c054f 8092
86e7df90 8093 *expandedp = true;
8bb418a3 8094 STRIP_NOPS (arg2);
3a9b8c7e
AH
8095 if (TREE_CODE (arg2) != INTEGER_CST
8096 || TREE_INT_CST_LOW (arg2) & ~0x3)
8097 {
9e637a26 8098 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8099 return const0_rtx;
8100 }
f18c054f 8101
3a9b8c7e 8102 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8103 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8104 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8105 op1 = copy_to_mode_reg (mode1, op1);
24408032 8106
3a9b8c7e
AH
8107 pat = GEN_FCN (d->icode) (op0, op1, op2);
8108 if (pat != 0)
8109 emit_insn (pat);
f18c054f 8110
3a9b8c7e
AH
8111 return NULL_RTX;
8112 }
f18c054f 8113
3a9b8c7e
AH
8114 return NULL_RTX;
8115}
24408032 8116
7a4eca66
DE
8117/* Expand vec_init builtin. */
8118static rtx
5039610b 8119altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8120{
8121 enum machine_mode tmode = TYPE_MODE (type);
8122 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8123 int i, n_elt = GET_MODE_NUNITS (tmode);
8124 rtvec v = rtvec_alloc (n_elt);
8125
8126 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8127 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8128
5039610b 8129 for (i = 0; i < n_elt; ++i)
7a4eca66 8130 {
5039610b 8131 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8132 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8133 }
8134
7a4eca66
DE
8135 if (!target || !register_operand (target, tmode))
8136 target = gen_reg_rtx (tmode);
8137
8138 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8139 return target;
8140}
8141
8142/* Return the integer constant in ARG. Constrain it to be in the range
8143 of the subparts of VEC_TYPE; issue an error if not. */
8144
8145static int
8146get_element_number (tree vec_type, tree arg)
8147{
8148 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8149
8150 if (!host_integerp (arg, 1)
8151 || (elt = tree_low_cst (arg, 1), elt > max))
8152 {
8153 error ("selector must be an integer constant in the range 0..%wi", max);
8154 return 0;
8155 }
8156
8157 return elt;
8158}
8159
8160/* Expand vec_set builtin. */
8161static rtx
5039610b 8162altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8163{
8164 enum machine_mode tmode, mode1;
8165 tree arg0, arg1, arg2;
8166 int elt;
8167 rtx op0, op1;
8168
5039610b
SL
8169 arg0 = CALL_EXPR_ARG (exp, 0);
8170 arg1 = CALL_EXPR_ARG (exp, 1);
8171 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8172
8173 tmode = TYPE_MODE (TREE_TYPE (arg0));
8174 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8175 gcc_assert (VECTOR_MODE_P (tmode));
8176
8177 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8178 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8179 elt = get_element_number (TREE_TYPE (arg0), arg2);
8180
8181 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8182 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8183
8184 op0 = force_reg (tmode, op0);
8185 op1 = force_reg (mode1, op1);
8186
8187 rs6000_expand_vector_set (op0, op1, elt);
8188
8189 return op0;
8190}
8191
8192/* Expand vec_ext builtin. */
8193static rtx
5039610b 8194altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8195{
8196 enum machine_mode tmode, mode0;
8197 tree arg0, arg1;
8198 int elt;
8199 rtx op0;
8200
5039610b
SL
8201 arg0 = CALL_EXPR_ARG (exp, 0);
8202 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8203
84217346 8204 op0 = expand_normal (arg0);
7a4eca66
DE
8205 elt = get_element_number (TREE_TYPE (arg0), arg1);
8206
8207 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8208 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8209 gcc_assert (VECTOR_MODE_P (mode0));
8210
8211 op0 = force_reg (mode0, op0);
8212
8213 if (optimize || !target || !register_operand (target, tmode))
8214 target = gen_reg_rtx (tmode);
8215
8216 rs6000_expand_vector_extract (target, op0, elt);
8217
8218 return target;
8219}
8220
3a9b8c7e
AH
8221/* Expand the builtin in EXP and store the result in TARGET. Store
8222 true in *EXPANDEDP if we found a builtin to expand. */
8223static rtx
a2369ed3 8224altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8225{
586de218
KG
8226 const struct builtin_description *d;
8227 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8228 size_t i;
8229 enum insn_code icode;
5039610b 8230 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8231 tree arg0;
8232 rtx op0, pat;
8233 enum machine_mode tmode, mode0;
3a9b8c7e 8234 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8235
58646b77
PB
8236 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8237 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8238 {
8239 *expandedp = true;
ea40ba9c 8240 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8241 return const0_rtx;
8242 }
8243
3a9b8c7e
AH
8244 target = altivec_expand_ld_builtin (exp, target, expandedp);
8245 if (*expandedp)
8246 return target;
0ac081f6 8247
3a9b8c7e
AH
8248 target = altivec_expand_st_builtin (exp, target, expandedp);
8249 if (*expandedp)
8250 return target;
8251
8252 target = altivec_expand_dst_builtin (exp, target, expandedp);
8253 if (*expandedp)
8254 return target;
8255
8256 *expandedp = true;
95385cbb 8257
3a9b8c7e
AH
8258 switch (fcode)
8259 {
6525c0e7 8260 case ALTIVEC_BUILTIN_STVX:
5039610b 8261 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8262 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8263 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8264 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8265 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8266 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8267 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8268 case ALTIVEC_BUILTIN_STVXL:
5039610b 8269 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8270
95385cbb
AH
8271 case ALTIVEC_BUILTIN_MFVSCR:
8272 icode = CODE_FOR_altivec_mfvscr;
8273 tmode = insn_data[icode].operand[0].mode;
8274
8275 if (target == 0
8276 || GET_MODE (target) != tmode
8277 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8278 target = gen_reg_rtx (tmode);
f676971a 8279
95385cbb 8280 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8281 if (! pat)
8282 return 0;
8283 emit_insn (pat);
95385cbb
AH
8284 return target;
8285
8286 case ALTIVEC_BUILTIN_MTVSCR:
8287 icode = CODE_FOR_altivec_mtvscr;
5039610b 8288 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8289 op0 = expand_normal (arg0);
95385cbb
AH
8290 mode0 = insn_data[icode].operand[0].mode;
8291
8292 /* If we got invalid arguments bail out before generating bad rtl. */
8293 if (arg0 == error_mark_node)
9a171fcd 8294 return const0_rtx;
95385cbb
AH
8295
8296 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8297 op0 = copy_to_mode_reg (mode0, op0);
8298
8299 pat = GEN_FCN (icode) (op0);
8300 if (pat)
8301 emit_insn (pat);
8302 return NULL_RTX;
3a9b8c7e 8303
95385cbb
AH
8304 case ALTIVEC_BUILTIN_DSSALL:
8305 emit_insn (gen_altivec_dssall ());
8306 return NULL_RTX;
8307
8308 case ALTIVEC_BUILTIN_DSS:
8309 icode = CODE_FOR_altivec_dss;
5039610b 8310 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8311 STRIP_NOPS (arg0);
84217346 8312 op0 = expand_normal (arg0);
95385cbb
AH
8313 mode0 = insn_data[icode].operand[0].mode;
8314
8315 /* If we got invalid arguments bail out before generating bad rtl. */
8316 if (arg0 == error_mark_node)
9a171fcd 8317 return const0_rtx;
95385cbb 8318
b44140e7
AH
8319 if (TREE_CODE (arg0) != INTEGER_CST
8320 || TREE_INT_CST_LOW (arg0) & ~0x3)
8321 {
8322 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8323 return const0_rtx;
b44140e7
AH
8324 }
8325
95385cbb
AH
8326 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8327 op0 = copy_to_mode_reg (mode0, op0);
8328
8329 emit_insn (gen_altivec_dss (op0));
0ac081f6 8330 return NULL_RTX;
7a4eca66
DE
8331
8332 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8333 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8334 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8335 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8336 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8337
8338 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8339 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8340 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8341 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8342 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8343
8344 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8345 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8346 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8347 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8348 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8349
8350 default:
8351 break;
8352 /* Fall through. */
0ac081f6 8353 }
24408032 8354
100c4561 8355 /* Expand abs* operations. */
586de218 8356 d = bdesc_abs;
ca7558fc 8357 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8358 if (d->code == fcode)
5039610b 8359 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8360
ae4b4a02 8361 /* Expand the AltiVec predicates. */
586de218 8362 dp = bdesc_altivec_preds;
ca7558fc 8363 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8364 if (dp->code == fcode)
c4ad648e 8365 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8366 exp, target);
ae4b4a02 8367
6525c0e7
AH
8368 /* LV* are funky. We initialized them differently. */
8369 switch (fcode)
8370 {
8371 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8372 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8373 exp, target);
6525c0e7 8374 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8375 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8376 exp, target);
6525c0e7 8377 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8378 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8379 exp, target);
6525c0e7 8380 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8381 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8382 exp, target);
6525c0e7 8383 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8384 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8385 exp, target);
6525c0e7 8386 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8387 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8388 exp, target);
6525c0e7 8389 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8390 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8391 exp, target);
6525c0e7
AH
8392 default:
8393 break;
8394 /* Fall through. */
8395 }
95385cbb 8396
92898235 8397 *expandedp = false;
0ac081f6
AH
8398 return NULL_RTX;
8399}
8400
96038623
DE
8401/* Expand the builtin in EXP and store the result in TARGET. Store
8402 true in *EXPANDEDP if we found a builtin to expand. */
8403static rtx
8404paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8405{
8406 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8407 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8408 const struct builtin_description *d;
96038623
DE
8409 size_t i;
8410
8411 *expandedp = true;
8412
8413 switch (fcode)
8414 {
8415 case PAIRED_BUILTIN_STX:
8416 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8417 case PAIRED_BUILTIN_LX:
8418 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8419 default:
8420 break;
8421 /* Fall through. */
8422 }
8423
8424 /* Expand the paired predicates. */
23a651fc 8425 d = bdesc_paired_preds;
96038623
DE
8426 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8427 if (d->code == fcode)
8428 return paired_expand_predicate_builtin (d->icode, exp, target);
8429
8430 *expandedp = false;
8431 return NULL_RTX;
8432}
8433
a3170dc6
AH
8434/* Binops that need to be initialized manually, but can be expanded
8435 automagically by rs6000_expand_binop_builtin. */
8436static struct builtin_description bdesc_2arg_spe[] =
8437{
8438 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8439 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8440 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8441 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8442 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8443 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8444 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8445 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8446 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8447 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8448 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8449 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8450 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8451 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8452 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8453 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8454 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8455 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8456 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8457 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8458 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8459 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8460};
8461
8462/* Expand the builtin in EXP and store the result in TARGET. Store
8463 true in *EXPANDEDP if we found a builtin to expand.
8464
8465 This expands the SPE builtins that are not simple unary and binary
8466 operations. */
8467static rtx
a2369ed3 8468spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8469{
5039610b 8470 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8471 tree arg1, arg0;
8472 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8473 enum insn_code icode;
8474 enum machine_mode tmode, mode0;
8475 rtx pat, op0;
8476 struct builtin_description *d;
8477 size_t i;
8478
8479 *expandedp = true;
8480
8481 /* Syntax check for a 5-bit unsigned immediate. */
8482 switch (fcode)
8483 {
8484 case SPE_BUILTIN_EVSTDD:
8485 case SPE_BUILTIN_EVSTDH:
8486 case SPE_BUILTIN_EVSTDW:
8487 case SPE_BUILTIN_EVSTWHE:
8488 case SPE_BUILTIN_EVSTWHO:
8489 case SPE_BUILTIN_EVSTWWE:
8490 case SPE_BUILTIN_EVSTWWO:
5039610b 8491 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8492 if (TREE_CODE (arg1) != INTEGER_CST
8493 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8494 {
8495 error ("argument 2 must be a 5-bit unsigned literal");
8496 return const0_rtx;
8497 }
8498 break;
8499 default:
8500 break;
8501 }
8502
00332c9f
AH
8503 /* The evsplat*i instructions are not quite generic. */
8504 switch (fcode)
8505 {
8506 case SPE_BUILTIN_EVSPLATFI:
8507 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8508 exp, target);
00332c9f
AH
8509 case SPE_BUILTIN_EVSPLATI:
8510 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8511 exp, target);
00332c9f
AH
8512 default:
8513 break;
8514 }
8515
a3170dc6
AH
8516 d = (struct builtin_description *) bdesc_2arg_spe;
8517 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8518 if (d->code == fcode)
5039610b 8519 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8520
8521 d = (struct builtin_description *) bdesc_spe_predicates;
8522 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8523 if (d->code == fcode)
5039610b 8524 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8525
8526 d = (struct builtin_description *) bdesc_spe_evsel;
8527 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8528 if (d->code == fcode)
5039610b 8529 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8530
8531 switch (fcode)
8532 {
8533 case SPE_BUILTIN_EVSTDDX:
5039610b 8534 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8535 case SPE_BUILTIN_EVSTDHX:
5039610b 8536 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8537 case SPE_BUILTIN_EVSTDWX:
5039610b 8538 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8539 case SPE_BUILTIN_EVSTWHEX:
5039610b 8540 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8541 case SPE_BUILTIN_EVSTWHOX:
5039610b 8542 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8543 case SPE_BUILTIN_EVSTWWEX:
5039610b 8544 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8545 case SPE_BUILTIN_EVSTWWOX:
5039610b 8546 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8547 case SPE_BUILTIN_EVSTDD:
5039610b 8548 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8549 case SPE_BUILTIN_EVSTDH:
5039610b 8550 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8551 case SPE_BUILTIN_EVSTDW:
5039610b 8552 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8553 case SPE_BUILTIN_EVSTWHE:
5039610b 8554 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8555 case SPE_BUILTIN_EVSTWHO:
5039610b 8556 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8557 case SPE_BUILTIN_EVSTWWE:
5039610b 8558 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8559 case SPE_BUILTIN_EVSTWWO:
5039610b 8560 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8561 case SPE_BUILTIN_MFSPEFSCR:
8562 icode = CODE_FOR_spe_mfspefscr;
8563 tmode = insn_data[icode].operand[0].mode;
8564
8565 if (target == 0
8566 || GET_MODE (target) != tmode
8567 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8568 target = gen_reg_rtx (tmode);
f676971a 8569
a3170dc6
AH
8570 pat = GEN_FCN (icode) (target);
8571 if (! pat)
8572 return 0;
8573 emit_insn (pat);
8574 return target;
8575 case SPE_BUILTIN_MTSPEFSCR:
8576 icode = CODE_FOR_spe_mtspefscr;
5039610b 8577 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8578 op0 = expand_normal (arg0);
a3170dc6
AH
8579 mode0 = insn_data[icode].operand[0].mode;
8580
8581 if (arg0 == error_mark_node)
8582 return const0_rtx;
8583
8584 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8585 op0 = copy_to_mode_reg (mode0, op0);
8586
8587 pat = GEN_FCN (icode) (op0);
8588 if (pat)
8589 emit_insn (pat);
8590 return NULL_RTX;
8591 default:
8592 break;
8593 }
8594
8595 *expandedp = false;
8596 return NULL_RTX;
8597}
8598
96038623
DE
8599static rtx
8600paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8601{
8602 rtx pat, scratch, tmp;
8603 tree form = CALL_EXPR_ARG (exp, 0);
8604 tree arg0 = CALL_EXPR_ARG (exp, 1);
8605 tree arg1 = CALL_EXPR_ARG (exp, 2);
8606 rtx op0 = expand_normal (arg0);
8607 rtx op1 = expand_normal (arg1);
8608 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8609 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8610 int form_int;
8611 enum rtx_code code;
8612
8613 if (TREE_CODE (form) != INTEGER_CST)
8614 {
8615 error ("argument 1 of __builtin_paired_predicate must be a constant");
8616 return const0_rtx;
8617 }
8618 else
8619 form_int = TREE_INT_CST_LOW (form);
8620
8621 gcc_assert (mode0 == mode1);
8622
8623 if (arg0 == error_mark_node || arg1 == error_mark_node)
8624 return const0_rtx;
8625
8626 if (target == 0
8627 || GET_MODE (target) != SImode
8628 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8629 target = gen_reg_rtx (SImode);
8630 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8631 op0 = copy_to_mode_reg (mode0, op0);
8632 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8633 op1 = copy_to_mode_reg (mode1, op1);
8634
8635 scratch = gen_reg_rtx (CCFPmode);
8636
8637 pat = GEN_FCN (icode) (scratch, op0, op1);
8638 if (!pat)
8639 return const0_rtx;
8640
8641 emit_insn (pat);
8642
8643 switch (form_int)
8644 {
8645 /* LT bit. */
8646 case 0:
8647 code = LT;
8648 break;
8649 /* GT bit. */
8650 case 1:
8651 code = GT;
8652 break;
8653 /* EQ bit. */
8654 case 2:
8655 code = EQ;
8656 break;
8657 /* UN bit. */
8658 case 3:
8659 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8660 return target;
8661 default:
8662 error ("argument 1 of __builtin_paired_predicate is out of range");
8663 return const0_rtx;
8664 }
8665
8666 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8667 emit_move_insn (target, tmp);
8668 return target;
8669}
8670
a3170dc6 8671static rtx
5039610b 8672spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8673{
8674 rtx pat, scratch, tmp;
5039610b
SL
8675 tree form = CALL_EXPR_ARG (exp, 0);
8676 tree arg0 = CALL_EXPR_ARG (exp, 1);
8677 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8678 rtx op0 = expand_normal (arg0);
8679 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8680 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8681 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8682 int form_int;
8683 enum rtx_code code;
8684
8685 if (TREE_CODE (form) != INTEGER_CST)
8686 {
8687 error ("argument 1 of __builtin_spe_predicate must be a constant");
8688 return const0_rtx;
8689 }
8690 else
8691 form_int = TREE_INT_CST_LOW (form);
8692
37409796 8693 gcc_assert (mode0 == mode1);
a3170dc6
AH
8694
8695 if (arg0 == error_mark_node || arg1 == error_mark_node)
8696 return const0_rtx;
8697
8698 if (target == 0
8699 || GET_MODE (target) != SImode
8700 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8701 target = gen_reg_rtx (SImode);
8702
8703 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8704 op0 = copy_to_mode_reg (mode0, op0);
8705 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8706 op1 = copy_to_mode_reg (mode1, op1);
8707
8708 scratch = gen_reg_rtx (CCmode);
8709
8710 pat = GEN_FCN (icode) (scratch, op0, op1);
8711 if (! pat)
8712 return const0_rtx;
8713 emit_insn (pat);
8714
8715 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8716 _lower_. We use one compare, but look in different bits of the
8717 CR for each variant.
8718
8719 There are 2 elements in each SPE simd type (upper/lower). The CR
8720 bits are set as follows:
8721
8722 BIT0 | BIT 1 | BIT 2 | BIT 3
8723 U | L | (U | L) | (U & L)
8724
8725 So, for an "all" relationship, BIT 3 would be set.
8726 For an "any" relationship, BIT 2 would be set. Etc.
8727
8728 Following traditional nomenclature, these bits map to:
8729
8730 BIT0 | BIT 1 | BIT 2 | BIT 3
8731 LT | GT | EQ | OV
8732
8733 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8734 */
8735
8736 switch (form_int)
8737 {
8738 /* All variant. OV bit. */
8739 case 0:
8740 /* We need to get to the OV bit, which is the ORDERED bit. We
8741 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8742 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8743 So let's just use another pattern. */
8744 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8745 return target;
8746 /* Any variant. EQ bit. */
8747 case 1:
8748 code = EQ;
8749 break;
8750 /* Upper variant. LT bit. */
8751 case 2:
8752 code = LT;
8753 break;
8754 /* Lower variant. GT bit. */
8755 case 3:
8756 code = GT;
8757 break;
8758 default:
8759 error ("argument 1 of __builtin_spe_predicate is out of range");
8760 return const0_rtx;
8761 }
8762
8763 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8764 emit_move_insn (target, tmp);
8765
8766 return target;
8767}
8768
8769/* The evsel builtins look like this:
8770
8771 e = __builtin_spe_evsel_OP (a, b, c, d);
8772
8773 and work like this:
8774
8775 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8776 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8777*/
8778
8779static rtx
5039610b 8780spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8781{
8782 rtx pat, scratch;
5039610b
SL
8783 tree arg0 = CALL_EXPR_ARG (exp, 0);
8784 tree arg1 = CALL_EXPR_ARG (exp, 1);
8785 tree arg2 = CALL_EXPR_ARG (exp, 2);
8786 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8787 rtx op0 = expand_normal (arg0);
8788 rtx op1 = expand_normal (arg1);
8789 rtx op2 = expand_normal (arg2);
8790 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8791 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8792 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8793
37409796 8794 gcc_assert (mode0 == mode1);
a3170dc6
AH
8795
8796 if (arg0 == error_mark_node || arg1 == error_mark_node
8797 || arg2 == error_mark_node || arg3 == error_mark_node)
8798 return const0_rtx;
8799
8800 if (target == 0
8801 || GET_MODE (target) != mode0
8802 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8803 target = gen_reg_rtx (mode0);
8804
8805 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8806 op0 = copy_to_mode_reg (mode0, op0);
8807 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8808 op1 = copy_to_mode_reg (mode0, op1);
8809 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8810 op2 = copy_to_mode_reg (mode0, op2);
8811 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8812 op3 = copy_to_mode_reg (mode0, op3);
8813
8814 /* Generate the compare. */
8815 scratch = gen_reg_rtx (CCmode);
8816 pat = GEN_FCN (icode) (scratch, op0, op1);
8817 if (! pat)
8818 return const0_rtx;
8819 emit_insn (pat);
8820
8821 if (mode0 == V2SImode)
8822 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8823 else
8824 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8825
8826 return target;
8827}
8828
0ac081f6
AH
8829/* Expand an expression EXP that calls a built-in function,
8830 with result going to TARGET if that's convenient
8831 (and in mode MODE if that's convenient).
8832 SUBTARGET may be used as the target for computing one of EXP's operands.
8833 IGNORE is nonzero if the value is to be ignored. */
8834
8835static rtx
a2369ed3 8836rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8837 enum machine_mode mode ATTRIBUTE_UNUSED,
8838 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8839{
5039610b 8840 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8841 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8842 const struct builtin_description *d;
92898235
AH
8843 size_t i;
8844 rtx ret;
8845 bool success;
f676971a 8846
9c78b944
DE
8847 if (fcode == RS6000_BUILTIN_RECIP)
8848 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8849
8850 if (fcode == RS6000_BUILTIN_RECIPF)
8851 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8852
8853 if (fcode == RS6000_BUILTIN_RSQRTF)
8854 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8855
7ccf35ed
DN
8856 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8857 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8858 {
8859 int icode = (int) CODE_FOR_altivec_lvsr;
8860 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8861 enum machine_mode mode = insn_data[icode].operand[1].mode;
8862 tree arg;
8863 rtx op, addr, pat;
8864
37409796 8865 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8866
5039610b 8867 arg = CALL_EXPR_ARG (exp, 0);
37409796 8868 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8869 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8870 addr = memory_address (mode, op);
8871 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8872 op = addr;
8873 else
8874 {
8875 /* For the load case need to negate the address. */
8876 op = gen_reg_rtx (GET_MODE (addr));
8877 emit_insn (gen_rtx_SET (VOIDmode, op,
8878 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8879 }
7ccf35ed
DN
8880 op = gen_rtx_MEM (mode, op);
8881
8882 if (target == 0
8883 || GET_MODE (target) != tmode
8884 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8885 target = gen_reg_rtx (tmode);
8886
8887 /*pat = gen_altivec_lvsr (target, op);*/
8888 pat = GEN_FCN (icode) (target, op);
8889 if (!pat)
8890 return 0;
8891 emit_insn (pat);
8892
8893 return target;
8894 }
5039610b
SL
8895
8896 /* FIXME: There's got to be a nicer way to handle this case than
8897 constructing a new CALL_EXPR. */
f57d17f1
TM
8898 if (fcode == ALTIVEC_BUILTIN_VCFUX
8899 || fcode == ALTIVEC_BUILTIN_VCFSX)
8900 {
5039610b
SL
8901 if (call_expr_nargs (exp) == 1)
8902 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8903 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8904 }
7ccf35ed 8905
0ac081f6 8906 if (TARGET_ALTIVEC)
92898235
AH
8907 {
8908 ret = altivec_expand_builtin (exp, target, &success);
8909
a3170dc6
AH
8910 if (success)
8911 return ret;
8912 }
8913 if (TARGET_SPE)
8914 {
8915 ret = spe_expand_builtin (exp, target, &success);
8916
92898235
AH
8917 if (success)
8918 return ret;
8919 }
96038623
DE
8920 if (TARGET_PAIRED_FLOAT)
8921 {
8922 ret = paired_expand_builtin (exp, target, &success);
8923
8924 if (success)
8925 return ret;
8926 }
92898235 8927
96038623 8928 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 8929
37409796
NS
8930 /* Handle simple unary operations. */
8931 d = (struct builtin_description *) bdesc_1arg;
8932 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8933 if (d->code == fcode)
5039610b 8934 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8935
37409796
NS
8936 /* Handle simple binary operations. */
8937 d = (struct builtin_description *) bdesc_2arg;
8938 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8939 if (d->code == fcode)
5039610b 8940 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8941
37409796 8942 /* Handle simple ternary operations. */
586de218 8943 d = bdesc_3arg;
37409796
NS
8944 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8945 if (d->code == fcode)
5039610b 8946 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8947
37409796 8948 gcc_unreachable ();
0ac081f6
AH
8949}
8950
7c62e993
PB
8951static tree
8952build_opaque_vector_type (tree node, int nunits)
8953{
8954 node = copy_node (node);
8955 TYPE_MAIN_VARIANT (node) = node;
8956 return build_vector_type (node, nunits);
8957}
8958
0ac081f6 8959static void
863d938c 8960rs6000_init_builtins (void)
0ac081f6 8961{
4a5eab38
PB
8962 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8963 V2SF_type_node = build_vector_type (float_type_node, 2);
8964 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8965 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8966 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8967 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8968 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8969
8970 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8971 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8972 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8973
7c62e993
PB
8974 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8975 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8976 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8977 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8978
8bb418a3
ZL
8979 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8980 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8981 'vector unsigned short'. */
8982
8dd16ecc
NS
8983 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8984 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8985 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8986 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8987
58646b77
PB
8988 long_integer_type_internal_node = long_integer_type_node;
8989 long_unsigned_type_internal_node = long_unsigned_type_node;
8990 intQI_type_internal_node = intQI_type_node;
8991 uintQI_type_internal_node = unsigned_intQI_type_node;
8992 intHI_type_internal_node = intHI_type_node;
8993 uintHI_type_internal_node = unsigned_intHI_type_node;
8994 intSI_type_internal_node = intSI_type_node;
8995 uintSI_type_internal_node = unsigned_intSI_type_node;
8996 float_type_internal_node = float_type_node;
8997 void_type_internal_node = void_type_node;
8998
8bb418a3
ZL
8999 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9000 get_identifier ("__bool char"),
9001 bool_char_type_node));
9002 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9003 get_identifier ("__bool short"),
9004 bool_short_type_node));
9005 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9006 get_identifier ("__bool int"),
9007 bool_int_type_node));
9008 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9009 get_identifier ("__pixel"),
9010 pixel_type_node));
9011
4a5eab38
PB
9012 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9013 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9014 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9015 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9016
9017 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9018 get_identifier ("__vector unsigned char"),
9019 unsigned_V16QI_type_node));
9020 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9021 get_identifier ("__vector signed char"),
9022 V16QI_type_node));
9023 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9024 get_identifier ("__vector __bool char"),
9025 bool_V16QI_type_node));
9026
9027 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9028 get_identifier ("__vector unsigned short"),
9029 unsigned_V8HI_type_node));
9030 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9031 get_identifier ("__vector signed short"),
9032 V8HI_type_node));
9033 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9034 get_identifier ("__vector __bool short"),
9035 bool_V8HI_type_node));
9036
9037 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9038 get_identifier ("__vector unsigned int"),
9039 unsigned_V4SI_type_node));
9040 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9041 get_identifier ("__vector signed int"),
9042 V4SI_type_node));
9043 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9044 get_identifier ("__vector __bool int"),
9045 bool_V4SI_type_node));
9046
9047 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9048 get_identifier ("__vector float"),
9049 V4SF_type_node));
9050 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9051 get_identifier ("__vector __pixel"),
9052 pixel_V8HI_type_node));
9053
96038623
DE
9054 if (TARGET_PAIRED_FLOAT)
9055 paired_init_builtins ();
a3170dc6 9056 if (TARGET_SPE)
3fdaa45a 9057 spe_init_builtins ();
0ac081f6
AH
9058 if (TARGET_ALTIVEC)
9059 altivec_init_builtins ();
96038623 9060 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9061 rs6000_common_init_builtins ();
9c78b944
DE
9062 if (TARGET_PPC_GFXOPT)
9063 {
9064 tree ftype = build_function_type_list (float_type_node,
9065 float_type_node,
9066 float_type_node,
9067 NULL_TREE);
9068 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9069 RS6000_BUILTIN_RECIPF);
9070
9071 ftype = build_function_type_list (float_type_node,
9072 float_type_node,
9073 NULL_TREE);
9074 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9075 RS6000_BUILTIN_RSQRTF);
9076 }
9077 if (TARGET_POPCNTB)
9078 {
9079 tree ftype = build_function_type_list (double_type_node,
9080 double_type_node,
9081 double_type_node,
9082 NULL_TREE);
9083 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9084 RS6000_BUILTIN_RECIP);
9085
9086 }
69ca3549
DE
9087
9088#if TARGET_XCOFF
9089 /* AIX libm provides clog as __clog. */
9090 if (built_in_decls [BUILT_IN_CLOG])
9091 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9092#endif
0ac081f6
AH
9093}
9094
a3170dc6
AH
9095/* Search through a set of builtins and enable the mask bits.
9096 DESC is an array of builtins.
b6d08ca1 9097 SIZE is the total number of builtins.
a3170dc6
AH
9098 START is the builtin enum at which to start.
9099 END is the builtin enum at which to end. */
0ac081f6 9100static void
a2369ed3 9101enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9102 enum rs6000_builtins start,
a2369ed3 9103 enum rs6000_builtins end)
a3170dc6
AH
9104{
9105 int i;
9106
9107 for (i = 0; i < size; ++i)
9108 if (desc[i].code == start)
9109 break;
9110
9111 if (i == size)
9112 return;
9113
9114 for (; i < size; ++i)
9115 {
9116 /* Flip all the bits on. */
9117 desc[i].mask = target_flags;
9118 if (desc[i].code == end)
9119 break;
9120 }
9121}
9122
9123static void
863d938c 9124spe_init_builtins (void)
0ac081f6 9125{
a3170dc6
AH
9126 tree endlink = void_list_node;
9127 tree puint_type_node = build_pointer_type (unsigned_type_node);
9128 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9129 struct builtin_description *d;
0ac081f6
AH
9130 size_t i;
9131
a3170dc6
AH
9132 tree v2si_ftype_4_v2si
9133 = build_function_type
3fdaa45a
AH
9134 (opaque_V2SI_type_node,
9135 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9136 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9137 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9138 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9139 endlink)))));
9140
9141 tree v2sf_ftype_4_v2sf
9142 = build_function_type
3fdaa45a
AH
9143 (opaque_V2SF_type_node,
9144 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9145 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9146 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9147 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9148 endlink)))));
9149
9150 tree int_ftype_int_v2si_v2si
9151 = build_function_type
9152 (integer_type_node,
9153 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9154 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9155 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9156 endlink))));
9157
9158 tree int_ftype_int_v2sf_v2sf
9159 = build_function_type
9160 (integer_type_node,
9161 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9162 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9163 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9164 endlink))));
9165
9166 tree void_ftype_v2si_puint_int
9167 = build_function_type (void_type_node,
3fdaa45a 9168 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9169 tree_cons (NULL_TREE, puint_type_node,
9170 tree_cons (NULL_TREE,
9171 integer_type_node,
9172 endlink))));
9173
9174 tree void_ftype_v2si_puint_char
9175 = build_function_type (void_type_node,
3fdaa45a 9176 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9177 tree_cons (NULL_TREE, puint_type_node,
9178 tree_cons (NULL_TREE,
9179 char_type_node,
9180 endlink))));
9181
9182 tree void_ftype_v2si_pv2si_int
9183 = build_function_type (void_type_node,
3fdaa45a 9184 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9185 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9186 tree_cons (NULL_TREE,
9187 integer_type_node,
9188 endlink))));
9189
9190 tree void_ftype_v2si_pv2si_char
9191 = build_function_type (void_type_node,
3fdaa45a 9192 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9193 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9194 tree_cons (NULL_TREE,
9195 char_type_node,
9196 endlink))));
9197
9198 tree void_ftype_int
9199 = build_function_type (void_type_node,
9200 tree_cons (NULL_TREE, integer_type_node, endlink));
9201
9202 tree int_ftype_void
36e8d515 9203 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9204
9205 tree v2si_ftype_pv2si_int
3fdaa45a 9206 = build_function_type (opaque_V2SI_type_node,
6035d635 9207 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9208 tree_cons (NULL_TREE, integer_type_node,
9209 endlink)));
9210
9211 tree v2si_ftype_puint_int
3fdaa45a 9212 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9213 tree_cons (NULL_TREE, puint_type_node,
9214 tree_cons (NULL_TREE, integer_type_node,
9215 endlink)));
9216
9217 tree v2si_ftype_pushort_int
3fdaa45a 9218 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9219 tree_cons (NULL_TREE, pushort_type_node,
9220 tree_cons (NULL_TREE, integer_type_node,
9221 endlink)));
9222
00332c9f
AH
9223 tree v2si_ftype_signed_char
9224 = build_function_type (opaque_V2SI_type_node,
9225 tree_cons (NULL_TREE, signed_char_type_node,
9226 endlink));
9227
a3170dc6
AH
9228 /* The initialization of the simple binary and unary builtins is
9229 done in rs6000_common_init_builtins, but we have to enable the
9230 mask bits here manually because we have run out of `target_flags'
9231 bits. We really need to redesign this mask business. */
9232
9233 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9234 ARRAY_SIZE (bdesc_2arg),
9235 SPE_BUILTIN_EVADDW,
9236 SPE_BUILTIN_EVXOR);
9237 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9238 ARRAY_SIZE (bdesc_1arg),
9239 SPE_BUILTIN_EVABS,
9240 SPE_BUILTIN_EVSUBFUSIAAW);
9241 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9242 ARRAY_SIZE (bdesc_spe_predicates),
9243 SPE_BUILTIN_EVCMPEQ,
9244 SPE_BUILTIN_EVFSTSTLT);
9245 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9246 ARRAY_SIZE (bdesc_spe_evsel),
9247 SPE_BUILTIN_EVSEL_CMPGTS,
9248 SPE_BUILTIN_EVSEL_FSTSTEQ);
9249
36252949
AH
9250 (*lang_hooks.decls.pushdecl)
9251 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9252 opaque_V2SI_type_node));
9253
a3170dc6 9254 /* Initialize irregular SPE builtins. */
f676971a 9255
a3170dc6
AH
9256 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9257 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9258 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9259 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9260 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9261 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9262 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9263 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9264 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9265 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9266 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9267 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9268 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9269 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9270 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9271 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9272 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9273 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9274
9275 /* Loads. */
9276 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9277 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9278 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9279 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9280 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9281 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9282 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9283 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9284 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9285 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9286 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9287 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9288 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9289 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9290 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9291 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9292 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9293 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9294 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9295 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9296 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9297 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9298
9299 /* Predicates. */
9300 d = (struct builtin_description *) bdesc_spe_predicates;
9301 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9302 {
9303 tree type;
9304
9305 switch (insn_data[d->icode].operand[1].mode)
9306 {
9307 case V2SImode:
9308 type = int_ftype_int_v2si_v2si;
9309 break;
9310 case V2SFmode:
9311 type = int_ftype_int_v2sf_v2sf;
9312 break;
9313 default:
37409796 9314 gcc_unreachable ();
a3170dc6
AH
9315 }
9316
9317 def_builtin (d->mask, d->name, type, d->code);
9318 }
9319
9320 /* Evsel predicates. */
9321 d = (struct builtin_description *) bdesc_spe_evsel;
9322 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9323 {
9324 tree type;
9325
9326 switch (insn_data[d->icode].operand[1].mode)
9327 {
9328 case V2SImode:
9329 type = v2si_ftype_4_v2si;
9330 break;
9331 case V2SFmode:
9332 type = v2sf_ftype_4_v2sf;
9333 break;
9334 default:
37409796 9335 gcc_unreachable ();
a3170dc6
AH
9336 }
9337
9338 def_builtin (d->mask, d->name, type, d->code);
9339 }
9340}
9341
96038623
DE
9342static void
9343paired_init_builtins (void)
9344{
23a651fc 9345 const struct builtin_description *d;
96038623
DE
9346 size_t i;
9347 tree endlink = void_list_node;
9348
9349 tree int_ftype_int_v2sf_v2sf
9350 = build_function_type
9351 (integer_type_node,
9352 tree_cons (NULL_TREE, integer_type_node,
9353 tree_cons (NULL_TREE, V2SF_type_node,
9354 tree_cons (NULL_TREE, V2SF_type_node,
9355 endlink))));
9356 tree pcfloat_type_node =
9357 build_pointer_type (build_qualified_type
9358 (float_type_node, TYPE_QUAL_CONST));
9359
9360 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9361 long_integer_type_node,
9362 pcfloat_type_node,
9363 NULL_TREE);
9364 tree void_ftype_v2sf_long_pcfloat =
9365 build_function_type_list (void_type_node,
9366 V2SF_type_node,
9367 long_integer_type_node,
9368 pcfloat_type_node,
9369 NULL_TREE);
9370
9371
9372 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9373 PAIRED_BUILTIN_LX);
9374
9375
9376 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9377 PAIRED_BUILTIN_STX);
9378
9379 /* Predicates. */
23a651fc 9380 d = bdesc_paired_preds;
96038623
DE
9381 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9382 {
9383 tree type;
9384
9385 switch (insn_data[d->icode].operand[1].mode)
9386 {
9387 case V2SFmode:
9388 type = int_ftype_int_v2sf_v2sf;
9389 break;
9390 default:
9391 gcc_unreachable ();
9392 }
9393
9394 def_builtin (d->mask, d->name, type, d->code);
9395 }
9396}
9397
a3170dc6 9398static void
863d938c 9399altivec_init_builtins (void)
a3170dc6 9400{
586de218
KG
9401 const struct builtin_description *d;
9402 const struct builtin_description_predicates *dp;
a3170dc6 9403 size_t i;
7a4eca66
DE
9404 tree ftype;
9405
a3170dc6
AH
9406 tree pfloat_type_node = build_pointer_type (float_type_node);
9407 tree pint_type_node = build_pointer_type (integer_type_node);
9408 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9409 tree pchar_type_node = build_pointer_type (char_type_node);
9410
9411 tree pvoid_type_node = build_pointer_type (void_type_node);
9412
0dbc3651
ZW
9413 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9414 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9415 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9416 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9417
9418 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9419
58646b77
PB
9420 tree int_ftype_opaque
9421 = build_function_type_list (integer_type_node,
9422 opaque_V4SI_type_node, NULL_TREE);
9423
9424 tree opaque_ftype_opaque_int
9425 = build_function_type_list (opaque_V4SI_type_node,
9426 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9427 tree opaque_ftype_opaque_opaque_int
9428 = build_function_type_list (opaque_V4SI_type_node,
9429 opaque_V4SI_type_node, opaque_V4SI_type_node,
9430 integer_type_node, NULL_TREE);
9431 tree int_ftype_int_opaque_opaque
9432 = build_function_type_list (integer_type_node,
9433 integer_type_node, opaque_V4SI_type_node,
9434 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9435 tree int_ftype_int_v4si_v4si
9436 = build_function_type_list (integer_type_node,
9437 integer_type_node, V4SI_type_node,
9438 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9439 tree v4sf_ftype_pcfloat
9440 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9441 tree void_ftype_pfloat_v4sf
b4de2f7d 9442 = build_function_type_list (void_type_node,
a3170dc6 9443 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9444 tree v4si_ftype_pcint
9445 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9446 tree void_ftype_pint_v4si
b4de2f7d
AH
9447 = build_function_type_list (void_type_node,
9448 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9449 tree v8hi_ftype_pcshort
9450 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9451 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9452 = build_function_type_list (void_type_node,
9453 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9454 tree v16qi_ftype_pcchar
9455 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9456 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9457 = build_function_type_list (void_type_node,
9458 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9459 tree void_ftype_v4si
b4de2f7d 9460 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9461 tree v8hi_ftype_void
9462 = build_function_type (V8HI_type_node, void_list_node);
9463 tree void_ftype_void
9464 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9465 tree void_ftype_int
9466 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9467
58646b77
PB
9468 tree opaque_ftype_long_pcvoid
9469 = build_function_type_list (opaque_V4SI_type_node,
9470 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9471 tree v16qi_ftype_long_pcvoid
a3170dc6 9472 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9473 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9474 tree v8hi_ftype_long_pcvoid
a3170dc6 9475 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9476 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9477 tree v4si_ftype_long_pcvoid
a3170dc6 9478 = build_function_type_list (V4SI_type_node,
b4a62fa0 9479 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9480
58646b77
PB
9481 tree void_ftype_opaque_long_pvoid
9482 = build_function_type_list (void_type_node,
9483 opaque_V4SI_type_node, long_integer_type_node,
9484 pvoid_type_node, NULL_TREE);
b4a62fa0 9485 tree void_ftype_v4si_long_pvoid
b4de2f7d 9486 = build_function_type_list (void_type_node,
b4a62fa0 9487 V4SI_type_node, long_integer_type_node,
b4de2f7d 9488 pvoid_type_node, NULL_TREE);
b4a62fa0 9489 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9490 = build_function_type_list (void_type_node,
b4a62fa0 9491 V16QI_type_node, long_integer_type_node,
b4de2f7d 9492 pvoid_type_node, NULL_TREE);
b4a62fa0 9493 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9494 = build_function_type_list (void_type_node,
b4a62fa0 9495 V8HI_type_node, long_integer_type_node,
b4de2f7d 9496 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9497 tree int_ftype_int_v8hi_v8hi
9498 = build_function_type_list (integer_type_node,
9499 integer_type_node, V8HI_type_node,
9500 V8HI_type_node, NULL_TREE);
9501 tree int_ftype_int_v16qi_v16qi
9502 = build_function_type_list (integer_type_node,
9503 integer_type_node, V16QI_type_node,
9504 V16QI_type_node, NULL_TREE);
9505 tree int_ftype_int_v4sf_v4sf
9506 = build_function_type_list (integer_type_node,
9507 integer_type_node, V4SF_type_node,
9508 V4SF_type_node, NULL_TREE);
9509 tree v4si_ftype_v4si
9510 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9511 tree v8hi_ftype_v8hi
9512 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9513 tree v16qi_ftype_v16qi
9514 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9515 tree v4sf_ftype_v4sf
9516 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9517 tree void_ftype_pcvoid_int_int
a3170dc6 9518 = build_function_type_list (void_type_node,
0dbc3651 9519 pcvoid_type_node, integer_type_node,
8bb418a3 9520 integer_type_node, NULL_TREE);
8bb418a3 9521
0dbc3651
ZW
9522 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9523 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9525 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9526 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9527 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9528 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9529 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9530 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9531 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9532 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9533 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9534 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9535 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9536 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9537 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9538 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9539 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9540 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9541 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9542 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9543 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9544 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9545 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9546 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9547 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9548 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9549 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9550 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9551 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9552 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9553 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9554 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9555 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9556 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9557 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9558 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9559 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9560 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9561 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9562 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9563 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9564 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9565 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9566 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9567 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9568
9569 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9570
9571 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9572 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9573 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9574 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9575 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9576 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9577 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9578 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9579 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9580 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9581
a3170dc6 9582 /* Add the DST variants. */
586de218 9583 d = bdesc_dst;
a3170dc6 9584 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9585 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9586
9587 /* Initialize the predicates. */
586de218 9588 dp = bdesc_altivec_preds;
a3170dc6
AH
9589 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9590 {
9591 enum machine_mode mode1;
9592 tree type;
58646b77
PB
9593 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9594 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9595
58646b77
PB
9596 if (is_overloaded)
9597 mode1 = VOIDmode;
9598 else
9599 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9600
9601 switch (mode1)
9602 {
58646b77
PB
9603 case VOIDmode:
9604 type = int_ftype_int_opaque_opaque;
9605 break;
a3170dc6
AH
9606 case V4SImode:
9607 type = int_ftype_int_v4si_v4si;
9608 break;
9609 case V8HImode:
9610 type = int_ftype_int_v8hi_v8hi;
9611 break;
9612 case V16QImode:
9613 type = int_ftype_int_v16qi_v16qi;
9614 break;
9615 case V4SFmode:
9616 type = int_ftype_int_v4sf_v4sf;
9617 break;
9618 default:
37409796 9619 gcc_unreachable ();
a3170dc6 9620 }
f676971a 9621
a3170dc6
AH
9622 def_builtin (dp->mask, dp->name, type, dp->code);
9623 }
9624
9625 /* Initialize the abs* operators. */
586de218 9626 d = bdesc_abs;
a3170dc6
AH
9627 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9628 {
9629 enum machine_mode mode0;
9630 tree type;
9631
9632 mode0 = insn_data[d->icode].operand[0].mode;
9633
9634 switch (mode0)
9635 {
9636 case V4SImode:
9637 type = v4si_ftype_v4si;
9638 break;
9639 case V8HImode:
9640 type = v8hi_ftype_v8hi;
9641 break;
9642 case V16QImode:
9643 type = v16qi_ftype_v16qi;
9644 break;
9645 case V4SFmode:
9646 type = v4sf_ftype_v4sf;
9647 break;
9648 default:
37409796 9649 gcc_unreachable ();
a3170dc6 9650 }
f676971a 9651
a3170dc6
AH
9652 def_builtin (d->mask, d->name, type, d->code);
9653 }
7ccf35ed 9654
13c62176
DN
9655 if (TARGET_ALTIVEC)
9656 {
9657 tree decl;
9658
9659 /* Initialize target builtin that implements
9660 targetm.vectorize.builtin_mask_for_load. */
9661
c79efc4d
RÁE
9662 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9663 v16qi_ftype_long_pcvoid,
9664 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9665 BUILT_IN_MD, NULL, NULL_TREE);
9666 TREE_READONLY (decl) = 1;
13c62176
DN
9667 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9668 altivec_builtin_mask_for_load = decl;
13c62176 9669 }
7a4eca66
DE
9670
9671 /* Access to the vec_init patterns. */
9672 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9673 integer_type_node, integer_type_node,
9674 integer_type_node, NULL_TREE);
9675 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9676 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9677
9678 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9679 short_integer_type_node,
9680 short_integer_type_node,
9681 short_integer_type_node,
9682 short_integer_type_node,
9683 short_integer_type_node,
9684 short_integer_type_node,
9685 short_integer_type_node, NULL_TREE);
9686 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9687 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9688
9689 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9690 char_type_node, char_type_node,
9691 char_type_node, char_type_node,
9692 char_type_node, char_type_node,
9693 char_type_node, char_type_node,
9694 char_type_node, char_type_node,
9695 char_type_node, char_type_node,
9696 char_type_node, char_type_node,
9697 char_type_node, NULL_TREE);
9698 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9699 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9700
9701 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9702 float_type_node, float_type_node,
9703 float_type_node, NULL_TREE);
9704 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9705 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9706
9707 /* Access to the vec_set patterns. */
9708 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9709 intSI_type_node,
9710 integer_type_node, NULL_TREE);
9711 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9712 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9713
9714 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9715 intHI_type_node,
9716 integer_type_node, NULL_TREE);
9717 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9718 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9719
9720 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9721 intQI_type_node,
9722 integer_type_node, NULL_TREE);
9723 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9724 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9725
9726 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9727 float_type_node,
9728 integer_type_node, NULL_TREE);
9729 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9730 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9731
9732 /* Access to the vec_extract patterns. */
9733 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9734 integer_type_node, NULL_TREE);
9735 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9736 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9737
9738 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9739 integer_type_node, NULL_TREE);
9740 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9741 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9742
9743 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9744 integer_type_node, NULL_TREE);
9745 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9746 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9747
9748 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9749 integer_type_node, NULL_TREE);
9750 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9751 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9752}
9753
9754static void
863d938c 9755rs6000_common_init_builtins (void)
a3170dc6 9756{
586de218 9757 const struct builtin_description *d;
a3170dc6
AH
9758 size_t i;
9759
96038623
DE
9760 tree v2sf_ftype_v2sf_v2sf_v2sf
9761 = build_function_type_list (V2SF_type_node,
9762 V2SF_type_node, V2SF_type_node,
9763 V2SF_type_node, NULL_TREE);
9764
a3170dc6
AH
9765 tree v4sf_ftype_v4sf_v4sf_v16qi
9766 = build_function_type_list (V4SF_type_node,
9767 V4SF_type_node, V4SF_type_node,
9768 V16QI_type_node, NULL_TREE);
9769 tree v4si_ftype_v4si_v4si_v16qi
9770 = build_function_type_list (V4SI_type_node,
9771 V4SI_type_node, V4SI_type_node,
9772 V16QI_type_node, NULL_TREE);
9773 tree v8hi_ftype_v8hi_v8hi_v16qi
9774 = build_function_type_list (V8HI_type_node,
9775 V8HI_type_node, V8HI_type_node,
9776 V16QI_type_node, NULL_TREE);
9777 tree v16qi_ftype_v16qi_v16qi_v16qi
9778 = build_function_type_list (V16QI_type_node,
9779 V16QI_type_node, V16QI_type_node,
9780 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9781 tree v4si_ftype_int
9782 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9783 tree v8hi_ftype_int
9784 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9785 tree v16qi_ftype_int
9786 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9787 tree v8hi_ftype_v16qi
9788 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9789 tree v4sf_ftype_v4sf
9790 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9791
9792 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9793 = build_function_type_list (opaque_V2SI_type_node,
9794 opaque_V2SI_type_node,
9795 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9796
96038623 9797 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9798 = build_function_type_list (opaque_V2SF_type_node,
9799 opaque_V2SF_type_node,
9800 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9801
96038623
DE
9802 tree v2sf_ftype_v2sf_v2sf
9803 = build_function_type_list (V2SF_type_node,
9804 V2SF_type_node,
9805 V2SF_type_node, NULL_TREE);
9806
9807
a3170dc6 9808 tree v2si_ftype_int_int
2abe3e28 9809 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9810 integer_type_node, integer_type_node,
9811 NULL_TREE);
9812
58646b77
PB
9813 tree opaque_ftype_opaque
9814 = build_function_type_list (opaque_V4SI_type_node,
9815 opaque_V4SI_type_node, NULL_TREE);
9816
a3170dc6 9817 tree v2si_ftype_v2si
2abe3e28
AH
9818 = build_function_type_list (opaque_V2SI_type_node,
9819 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9820
96038623 9821 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9822 = build_function_type_list (opaque_V2SF_type_node,
9823 opaque_V2SF_type_node, NULL_TREE);
f676971a 9824
96038623
DE
9825 tree v2sf_ftype_v2sf
9826 = build_function_type_list (V2SF_type_node,
9827 V2SF_type_node, NULL_TREE);
9828
a3170dc6 9829 tree v2sf_ftype_v2si
2abe3e28
AH
9830 = build_function_type_list (opaque_V2SF_type_node,
9831 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9832
9833 tree v2si_ftype_v2sf
2abe3e28
AH
9834 = build_function_type_list (opaque_V2SI_type_node,
9835 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9836
9837 tree v2si_ftype_v2si_char
2abe3e28
AH
9838 = build_function_type_list (opaque_V2SI_type_node,
9839 opaque_V2SI_type_node,
9840 char_type_node, NULL_TREE);
a3170dc6
AH
9841
9842 tree v2si_ftype_int_char
2abe3e28 9843 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9844 integer_type_node, char_type_node, NULL_TREE);
9845
9846 tree v2si_ftype_char
2abe3e28
AH
9847 = build_function_type_list (opaque_V2SI_type_node,
9848 char_type_node, NULL_TREE);
a3170dc6
AH
9849
9850 tree int_ftype_int_int
9851 = build_function_type_list (integer_type_node,
9852 integer_type_node, integer_type_node,
9853 NULL_TREE);
95385cbb 9854
58646b77
PB
9855 tree opaque_ftype_opaque_opaque
9856 = build_function_type_list (opaque_V4SI_type_node,
9857 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9858 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9859 = build_function_type_list (V4SI_type_node,
9860 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9861 tree v4sf_ftype_v4si_int
b4de2f7d 9862 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9863 V4SI_type_node, integer_type_node, NULL_TREE);
9864 tree v4si_ftype_v4sf_int
b4de2f7d 9865 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9866 V4SF_type_node, integer_type_node, NULL_TREE);
9867 tree v4si_ftype_v4si_int
b4de2f7d 9868 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9869 V4SI_type_node, integer_type_node, NULL_TREE);
9870 tree v8hi_ftype_v8hi_int
b4de2f7d 9871 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9872 V8HI_type_node, integer_type_node, NULL_TREE);
9873 tree v16qi_ftype_v16qi_int
b4de2f7d 9874 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9875 V16QI_type_node, integer_type_node, NULL_TREE);
9876 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9877 = build_function_type_list (V16QI_type_node,
9878 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9879 integer_type_node, NULL_TREE);
9880 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9881 = build_function_type_list (V8HI_type_node,
9882 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9883 integer_type_node, NULL_TREE);
9884 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9885 = build_function_type_list (V4SI_type_node,
9886 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9887 integer_type_node, NULL_TREE);
9888 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9889 = build_function_type_list (V4SF_type_node,
9890 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9891 integer_type_node, NULL_TREE);
0ac081f6 9892 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9893 = build_function_type_list (V4SF_type_node,
9894 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9895 tree opaque_ftype_opaque_opaque_opaque
9896 = build_function_type_list (opaque_V4SI_type_node,
9897 opaque_V4SI_type_node, opaque_V4SI_type_node,
9898 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9899 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9900 = build_function_type_list (V4SF_type_node,
9901 V4SF_type_node, V4SF_type_node,
9902 V4SI_type_node, NULL_TREE);
2212663f 9903 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9904 = build_function_type_list (V4SF_type_node,
9905 V4SF_type_node, V4SF_type_node,
9906 V4SF_type_node, NULL_TREE);
f676971a 9907 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9908 = build_function_type_list (V4SI_type_node,
9909 V4SI_type_node, V4SI_type_node,
9910 V4SI_type_node, NULL_TREE);
0ac081f6 9911 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9912 = build_function_type_list (V8HI_type_node,
9913 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9914 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9915 = build_function_type_list (V8HI_type_node,
9916 V8HI_type_node, V8HI_type_node,
9917 V8HI_type_node, NULL_TREE);
c4ad648e 9918 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9919 = build_function_type_list (V4SI_type_node,
9920 V8HI_type_node, V8HI_type_node,
9921 V4SI_type_node, NULL_TREE);
c4ad648e 9922 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9923 = build_function_type_list (V4SI_type_node,
9924 V16QI_type_node, V16QI_type_node,
9925 V4SI_type_node, NULL_TREE);
0ac081f6 9926 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9927 = build_function_type_list (V16QI_type_node,
9928 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9929 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9930 = build_function_type_list (V4SI_type_node,
9931 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9932 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9933 = build_function_type_list (V8HI_type_node,
9934 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9935 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9936 = build_function_type_list (V4SI_type_node,
9937 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9938 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9939 = build_function_type_list (V8HI_type_node,
9940 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9941 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9942 = build_function_type_list (V16QI_type_node,
9943 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9944 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9945 = build_function_type_list (V4SI_type_node,
9946 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9947 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9948 = build_function_type_list (V4SI_type_node,
9949 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9950 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9951 = build_function_type_list (V4SI_type_node,
9952 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9953 tree v4si_ftype_v8hi
9954 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9955 tree int_ftype_v4si_v4si
9956 = build_function_type_list (integer_type_node,
9957 V4SI_type_node, V4SI_type_node, NULL_TREE);
9958 tree int_ftype_v4sf_v4sf
9959 = build_function_type_list (integer_type_node,
9960 V4SF_type_node, V4SF_type_node, NULL_TREE);
9961 tree int_ftype_v16qi_v16qi
9962 = build_function_type_list (integer_type_node,
9963 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9964 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9965 = build_function_type_list (integer_type_node,
9966 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9967
6f317ef3 9968 /* Add the simple ternary operators. */
586de218 9969 d = bdesc_3arg;
ca7558fc 9970 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9971 {
2212663f
DB
9972 enum machine_mode mode0, mode1, mode2, mode3;
9973 tree type;
58646b77
PB
9974 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9975 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9976
58646b77
PB
9977 if (is_overloaded)
9978 {
9979 mode0 = VOIDmode;
9980 mode1 = VOIDmode;
9981 mode2 = VOIDmode;
9982 mode3 = VOIDmode;
9983 }
9984 else
9985 {
9986 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9987 continue;
f676971a 9988
58646b77
PB
9989 mode0 = insn_data[d->icode].operand[0].mode;
9990 mode1 = insn_data[d->icode].operand[1].mode;
9991 mode2 = insn_data[d->icode].operand[2].mode;
9992 mode3 = insn_data[d->icode].operand[3].mode;
9993 }
bb8df8a6 9994
2212663f
DB
9995 /* When all four are of the same mode. */
9996 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9997 {
9998 switch (mode0)
9999 {
58646b77
PB
10000 case VOIDmode:
10001 type = opaque_ftype_opaque_opaque_opaque;
10002 break;
617e0e1d
DB
10003 case V4SImode:
10004 type = v4si_ftype_v4si_v4si_v4si;
10005 break;
2212663f
DB
10006 case V4SFmode:
10007 type = v4sf_ftype_v4sf_v4sf_v4sf;
10008 break;
10009 case V8HImode:
10010 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10011 break;
2212663f
DB
10012 case V16QImode:
10013 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10014 break;
96038623
DE
10015 case V2SFmode:
10016 type = v2sf_ftype_v2sf_v2sf_v2sf;
10017 break;
2212663f 10018 default:
37409796 10019 gcc_unreachable ();
2212663f
DB
10020 }
10021 }
10022 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10023 {
2212663f
DB
10024 switch (mode0)
10025 {
10026 case V4SImode:
10027 type = v4si_ftype_v4si_v4si_v16qi;
10028 break;
10029 case V4SFmode:
10030 type = v4sf_ftype_v4sf_v4sf_v16qi;
10031 break;
10032 case V8HImode:
10033 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10034 break;
2212663f
DB
10035 case V16QImode:
10036 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10037 break;
2212663f 10038 default:
37409796 10039 gcc_unreachable ();
2212663f
DB
10040 }
10041 }
f676971a 10042 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10043 && mode3 == V4SImode)
24408032 10044 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10045 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10046 && mode3 == V4SImode)
24408032 10047 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10048 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10049 && mode3 == V4SImode)
24408032
AH
10050 type = v4sf_ftype_v4sf_v4sf_v4si;
10051
a7b376ee 10052 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10053 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10054 && mode3 == QImode)
b9e4e5d1 10055 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10056
a7b376ee 10057 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10058 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10059 && mode3 == QImode)
b9e4e5d1 10060 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10061
a7b376ee 10062 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10063 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10064 && mode3 == QImode)
b9e4e5d1 10065 type = v4si_ftype_v4si_v4si_int;
24408032 10066
a7b376ee 10067 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10068 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10069 && mode3 == QImode)
b9e4e5d1 10070 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10071
2212663f 10072 else
37409796 10073 gcc_unreachable ();
2212663f
DB
10074
10075 def_builtin (d->mask, d->name, type, d->code);
10076 }
10077
0ac081f6 10078 /* Add the simple binary operators. */
00b960c7 10079 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10080 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10081 {
10082 enum machine_mode mode0, mode1, mode2;
10083 tree type;
58646b77
PB
10084 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10085 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10086
58646b77
PB
10087 if (is_overloaded)
10088 {
10089 mode0 = VOIDmode;
10090 mode1 = VOIDmode;
10091 mode2 = VOIDmode;
10092 }
10093 else
bb8df8a6 10094 {
58646b77
PB
10095 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10096 continue;
f676971a 10097
58646b77
PB
10098 mode0 = insn_data[d->icode].operand[0].mode;
10099 mode1 = insn_data[d->icode].operand[1].mode;
10100 mode2 = insn_data[d->icode].operand[2].mode;
10101 }
0ac081f6
AH
10102
10103 /* When all three operands are of the same mode. */
10104 if (mode0 == mode1 && mode1 == mode2)
10105 {
10106 switch (mode0)
10107 {
58646b77
PB
10108 case VOIDmode:
10109 type = opaque_ftype_opaque_opaque;
10110 break;
0ac081f6
AH
10111 case V4SFmode:
10112 type = v4sf_ftype_v4sf_v4sf;
10113 break;
10114 case V4SImode:
10115 type = v4si_ftype_v4si_v4si;
10116 break;
10117 case V16QImode:
10118 type = v16qi_ftype_v16qi_v16qi;
10119 break;
10120 case V8HImode:
10121 type = v8hi_ftype_v8hi_v8hi;
10122 break;
a3170dc6
AH
10123 case V2SImode:
10124 type = v2si_ftype_v2si_v2si;
10125 break;
96038623
DE
10126 case V2SFmode:
10127 if (TARGET_PAIRED_FLOAT)
10128 type = v2sf_ftype_v2sf_v2sf;
10129 else
10130 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10131 break;
10132 case SImode:
10133 type = int_ftype_int_int;
10134 break;
0ac081f6 10135 default:
37409796 10136 gcc_unreachable ();
0ac081f6
AH
10137 }
10138 }
10139
10140 /* A few other combos we really don't want to do manually. */
10141
10142 /* vint, vfloat, vfloat. */
10143 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10144 type = v4si_ftype_v4sf_v4sf;
10145
10146 /* vshort, vchar, vchar. */
10147 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10148 type = v8hi_ftype_v16qi_v16qi;
10149
10150 /* vint, vshort, vshort. */
10151 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10152 type = v4si_ftype_v8hi_v8hi;
10153
10154 /* vshort, vint, vint. */
10155 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10156 type = v8hi_ftype_v4si_v4si;
10157
10158 /* vchar, vshort, vshort. */
10159 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10160 type = v16qi_ftype_v8hi_v8hi;
10161
10162 /* vint, vchar, vint. */
10163 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10164 type = v4si_ftype_v16qi_v4si;
10165
fa066a23
AH
10166 /* vint, vchar, vchar. */
10167 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10168 type = v4si_ftype_v16qi_v16qi;
10169
0ac081f6
AH
10170 /* vint, vshort, vint. */
10171 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10172 type = v4si_ftype_v8hi_v4si;
f676971a 10173
a7b376ee 10174 /* vint, vint, 5-bit literal. */
2212663f 10175 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10176 type = v4si_ftype_v4si_int;
f676971a 10177
a7b376ee 10178 /* vshort, vshort, 5-bit literal. */
2212663f 10179 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10180 type = v8hi_ftype_v8hi_int;
f676971a 10181
a7b376ee 10182 /* vchar, vchar, 5-bit literal. */
2212663f 10183 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10184 type = v16qi_ftype_v16qi_int;
0ac081f6 10185
a7b376ee 10186 /* vfloat, vint, 5-bit literal. */
617e0e1d 10187 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10188 type = v4sf_ftype_v4si_int;
f676971a 10189
a7b376ee 10190 /* vint, vfloat, 5-bit literal. */
617e0e1d 10191 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10192 type = v4si_ftype_v4sf_int;
617e0e1d 10193
a3170dc6
AH
10194 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10195 type = v2si_ftype_int_int;
10196
10197 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10198 type = v2si_ftype_v2si_char;
10199
10200 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10201 type = v2si_ftype_int_char;
10202
37409796 10203 else
0ac081f6 10204 {
37409796
NS
10205 /* int, x, x. */
10206 gcc_assert (mode0 == SImode);
0ac081f6
AH
10207 switch (mode1)
10208 {
10209 case V4SImode:
10210 type = int_ftype_v4si_v4si;
10211 break;
10212 case V4SFmode:
10213 type = int_ftype_v4sf_v4sf;
10214 break;
10215 case V16QImode:
10216 type = int_ftype_v16qi_v16qi;
10217 break;
10218 case V8HImode:
10219 type = int_ftype_v8hi_v8hi;
10220 break;
10221 default:
37409796 10222 gcc_unreachable ();
0ac081f6
AH
10223 }
10224 }
10225
2212663f
DB
10226 def_builtin (d->mask, d->name, type, d->code);
10227 }
24408032 10228
2212663f
DB
10229 /* Add the simple unary operators. */
10230 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10231 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10232 {
10233 enum machine_mode mode0, mode1;
10234 tree type;
58646b77
PB
10235 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10236 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10237
10238 if (is_overloaded)
10239 {
10240 mode0 = VOIDmode;
10241 mode1 = VOIDmode;
10242 }
10243 else
10244 {
10245 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10246 continue;
bb8df8a6 10247
58646b77
PB
10248 mode0 = insn_data[d->icode].operand[0].mode;
10249 mode1 = insn_data[d->icode].operand[1].mode;
10250 }
2212663f
DB
10251
10252 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10253 type = v4si_ftype_int;
2212663f 10254 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10255 type = v8hi_ftype_int;
2212663f 10256 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10257 type = v16qi_ftype_int;
58646b77
PB
10258 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10259 type = opaque_ftype_opaque;
617e0e1d
DB
10260 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10261 type = v4sf_ftype_v4sf;
20e26713
AH
10262 else if (mode0 == V8HImode && mode1 == V16QImode)
10263 type = v8hi_ftype_v16qi;
10264 else if (mode0 == V4SImode && mode1 == V8HImode)
10265 type = v4si_ftype_v8hi;
a3170dc6
AH
10266 else if (mode0 == V2SImode && mode1 == V2SImode)
10267 type = v2si_ftype_v2si;
10268 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10269 {
10270 if (TARGET_PAIRED_FLOAT)
10271 type = v2sf_ftype_v2sf;
10272 else
10273 type = v2sf_ftype_v2sf_spe;
10274 }
a3170dc6
AH
10275 else if (mode0 == V2SFmode && mode1 == V2SImode)
10276 type = v2sf_ftype_v2si;
10277 else if (mode0 == V2SImode && mode1 == V2SFmode)
10278 type = v2si_ftype_v2sf;
10279 else if (mode0 == V2SImode && mode1 == QImode)
10280 type = v2si_ftype_char;
2212663f 10281 else
37409796 10282 gcc_unreachable ();
2212663f 10283
0ac081f6
AH
10284 def_builtin (d->mask, d->name, type, d->code);
10285 }
10286}
10287
c15c90bb
ZW
10288static void
10289rs6000_init_libfuncs (void)
10290{
602ea4d3
JJ
10291 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10292 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10293 {
602ea4d3
JJ
10294 /* AIX library routines for float->int conversion. */
10295 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10296 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10297 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10298 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10299 }
c15c90bb 10300
602ea4d3 10301 if (!TARGET_IEEEQUAD)
98c41d98 10302 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10303 if (!TARGET_XL_COMPAT)
10304 {
10305 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10306 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10307 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10308 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10309
17caeff2 10310 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10311 {
10312 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10313 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10314 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10315 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10316 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10317 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10318 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10319
10320 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10321 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10322 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10323 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10324 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10325 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10326 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10327 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10328 }
b26941b4
JM
10329
10330 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10331 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10332 }
10333 else
10334 {
10335 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10336 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10337 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10338 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10339 }
c9034561 10340 else
c15c90bb 10341 {
c9034561 10342 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10343
10344 set_optab_libfunc (add_optab, TFmode, "_q_add");
10345 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10346 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10347 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10348 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10349 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10350 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10351
c9034561
ZW
10352 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10353 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10354 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10355 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10356 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10357 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10358
85363ca0
ZW
10359 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10360 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10361 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10362 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10363 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10364 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10365 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10366 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10367 }
10368}
fba73eb1
DE
10369
10370\f
10371/* Expand a block clear operation, and return 1 if successful. Return 0
10372 if we should let the compiler generate normal code.
10373
10374 operands[0] is the destination
10375 operands[1] is the length
57e84f18 10376 operands[3] is the alignment */
fba73eb1
DE
10377
10378int
10379expand_block_clear (rtx operands[])
10380{
10381 rtx orig_dest = operands[0];
10382 rtx bytes_rtx = operands[1];
57e84f18 10383 rtx align_rtx = operands[3];
5514620a
GK
10384 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10385 HOST_WIDE_INT align;
10386 HOST_WIDE_INT bytes;
fba73eb1
DE
10387 int offset;
10388 int clear_bytes;
5514620a 10389 int clear_step;
fba73eb1
DE
10390
10391 /* If this is not a fixed size move, just call memcpy */
10392 if (! constp)
10393 return 0;
10394
37409796
NS
10395 /* This must be a fixed size alignment */
10396 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10397 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10398
10399 /* Anything to clear? */
10400 bytes = INTVAL (bytes_rtx);
10401 if (bytes <= 0)
10402 return 1;
10403
5514620a
GK
10404 /* Use the builtin memset after a point, to avoid huge code bloat.
10405 When optimize_size, avoid any significant code bloat; calling
10406 memset is about 4 instructions, so allow for one instruction to
10407 load zero and three to do clearing. */
10408 if (TARGET_ALTIVEC && align >= 128)
10409 clear_step = 16;
10410 else if (TARGET_POWERPC64 && align >= 32)
10411 clear_step = 8;
21d818ff
NF
10412 else if (TARGET_SPE && align >= 64)
10413 clear_step = 8;
5514620a
GK
10414 else
10415 clear_step = 4;
fba73eb1 10416
5514620a
GK
10417 if (optimize_size && bytes > 3 * clear_step)
10418 return 0;
10419 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10420 return 0;
10421
10422 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10423 {
fba73eb1
DE
10424 enum machine_mode mode = BLKmode;
10425 rtx dest;
f676971a 10426
5514620a
GK
10427 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10428 {
10429 clear_bytes = 16;
10430 mode = V4SImode;
10431 }
21d818ff
NF
10432 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10433 {
10434 clear_bytes = 8;
10435 mode = V2SImode;
10436 }
5514620a 10437 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10438 /* 64-bit loads and stores require word-aligned
10439 displacements. */
10440 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10441 {
10442 clear_bytes = 8;
10443 mode = DImode;
fba73eb1 10444 }
5514620a 10445 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10446 { /* move 4 bytes */
10447 clear_bytes = 4;
10448 mode = SImode;
fba73eb1 10449 }
ec53fc93 10450 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10451 { /* move 2 bytes */
10452 clear_bytes = 2;
10453 mode = HImode;
fba73eb1
DE
10454 }
10455 else /* move 1 byte at a time */
10456 {
10457 clear_bytes = 1;
10458 mode = QImode;
fba73eb1 10459 }
f676971a 10460
fba73eb1 10461 dest = adjust_address (orig_dest, mode, offset);
f676971a 10462
5514620a 10463 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10464 }
10465
10466 return 1;
10467}
10468
35aff10b 10469\f
7e69e155
MM
10470/* Expand a block move operation, and return 1 if successful. Return 0
10471 if we should let the compiler generate normal code.
10472
10473 operands[0] is the destination
10474 operands[1] is the source
10475 operands[2] is the length
10476 operands[3] is the alignment */
10477
3933e0e1
MM
10478#define MAX_MOVE_REG 4
10479
7e69e155 10480int
a2369ed3 10481expand_block_move (rtx operands[])
7e69e155 10482{
b6c9286a
MM
10483 rtx orig_dest = operands[0];
10484 rtx orig_src = operands[1];
7e69e155 10485 rtx bytes_rtx = operands[2];
7e69e155 10486 rtx align_rtx = operands[3];
3933e0e1 10487 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10488 int align;
3933e0e1
MM
10489 int bytes;
10490 int offset;
7e69e155 10491 int move_bytes;
cabfd258
GK
10492 rtx stores[MAX_MOVE_REG];
10493 int num_reg = 0;
7e69e155 10494
3933e0e1 10495 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10496 if (! constp)
3933e0e1
MM
10497 return 0;
10498
37409796
NS
10499 /* This must be a fixed size alignment */
10500 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10501 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10502
7e69e155 10503 /* Anything to move? */
3933e0e1
MM
10504 bytes = INTVAL (bytes_rtx);
10505 if (bytes <= 0)
7e69e155
MM
10506 return 1;
10507
ea9982a8 10508 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10509 reg_parm_stack_space. */
ea9982a8 10510 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10511 return 0;
10512
cabfd258 10513 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10514 {
cabfd258 10515 union {
70128ad9 10516 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10517 rtx (*mov) (rtx, rtx);
cabfd258
GK
10518 } gen_func;
10519 enum machine_mode mode = BLKmode;
10520 rtx src, dest;
f676971a 10521
5514620a
GK
10522 /* Altivec first, since it will be faster than a string move
10523 when it applies, and usually not significantly larger. */
10524 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10525 {
10526 move_bytes = 16;
10527 mode = V4SImode;
10528 gen_func.mov = gen_movv4si;
10529 }
21d818ff
NF
10530 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10531 {
10532 move_bytes = 8;
10533 mode = V2SImode;
10534 gen_func.mov = gen_movv2si;
10535 }
5514620a 10536 else if (TARGET_STRING
cabfd258
GK
10537 && bytes > 24 /* move up to 32 bytes at a time */
10538 && ! fixed_regs[5]
10539 && ! fixed_regs[6]
10540 && ! fixed_regs[7]
10541 && ! fixed_regs[8]
10542 && ! fixed_regs[9]
10543 && ! fixed_regs[10]
10544 && ! fixed_regs[11]
10545 && ! fixed_regs[12])
7e69e155 10546 {
cabfd258 10547 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10548 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10549 }
10550 else if (TARGET_STRING
10551 && bytes > 16 /* move up to 24 bytes at a time */
10552 && ! fixed_regs[5]
10553 && ! fixed_regs[6]
10554 && ! fixed_regs[7]
10555 && ! fixed_regs[8]
10556 && ! fixed_regs[9]
10557 && ! fixed_regs[10])
10558 {
10559 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10560 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10561 }
10562 else if (TARGET_STRING
10563 && bytes > 8 /* move up to 16 bytes at a time */
10564 && ! fixed_regs[5]
10565 && ! fixed_regs[6]
10566 && ! fixed_regs[7]
10567 && ! fixed_regs[8])
10568 {
10569 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10570 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10571 }
10572 else if (bytes >= 8 && TARGET_POWERPC64
10573 /* 64-bit loads and stores require word-aligned
10574 displacements. */
fba73eb1 10575 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10576 {
10577 move_bytes = 8;
10578 mode = DImode;
10579 gen_func.mov = gen_movdi;
10580 }
10581 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10582 { /* move up to 8 bytes at a time */
10583 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10584 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10585 }
cd7d9ca4 10586 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10587 { /* move 4 bytes */
10588 move_bytes = 4;
10589 mode = SImode;
10590 gen_func.mov = gen_movsi;
10591 }
ec53fc93 10592 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10593 { /* move 2 bytes */
10594 move_bytes = 2;
10595 mode = HImode;
10596 gen_func.mov = gen_movhi;
10597 }
10598 else if (TARGET_STRING && bytes > 1)
10599 { /* move up to 4 bytes at a time */
10600 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10601 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10602 }
10603 else /* move 1 byte at a time */
10604 {
10605 move_bytes = 1;
10606 mode = QImode;
10607 gen_func.mov = gen_movqi;
10608 }
f676971a 10609
cabfd258
GK
10610 src = adjust_address (orig_src, mode, offset);
10611 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10612
10613 if (mode != BLKmode)
cabfd258
GK
10614 {
10615 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10616
cabfd258
GK
10617 emit_insn ((*gen_func.mov) (tmp_reg, src));
10618 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10619 }
3933e0e1 10620
cabfd258
GK
10621 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10622 {
10623 int i;
10624 for (i = 0; i < num_reg; i++)
10625 emit_insn (stores[i]);
10626 num_reg = 0;
10627 }
35aff10b 10628
cabfd258 10629 if (mode == BLKmode)
7e69e155 10630 {
70128ad9 10631 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10632 patterns require zero offset. */
10633 if (!REG_P (XEXP (src, 0)))
b6c9286a 10634 {
cabfd258
GK
10635 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10636 src = replace_equiv_address (src, src_reg);
b6c9286a 10637 }
cabfd258 10638 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10639
cabfd258 10640 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10641 {
cabfd258
GK
10642 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10643 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10644 }
cabfd258 10645 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10646
70128ad9 10647 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10648 GEN_INT (move_bytes & 31),
10649 align_rtx));
7e69e155 10650 }
7e69e155
MM
10651 }
10652
10653 return 1;
10654}
10655
d62294f5 10656\f
9caa3eb2
DE
10657/* Return a string to perform a load_multiple operation.
10658 operands[0] is the vector.
10659 operands[1] is the source address.
10660 operands[2] is the first destination register. */
10661
10662const char *
a2369ed3 10663rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10664{
10665 /* We have to handle the case where the pseudo used to contain the address
10666 is assigned to one of the output registers. */
10667 int i, j;
10668 int words = XVECLEN (operands[0], 0);
10669 rtx xop[10];
10670
10671 if (XVECLEN (operands[0], 0) == 1)
10672 return "{l|lwz} %2,0(%1)";
10673
10674 for (i = 0; i < words; i++)
10675 if (refers_to_regno_p (REGNO (operands[2]) + i,
10676 REGNO (operands[2]) + i + 1, operands[1], 0))
10677 {
10678 if (i == words-1)
10679 {
10680 xop[0] = GEN_INT (4 * (words-1));
10681 xop[1] = operands[1];
10682 xop[2] = operands[2];
10683 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10684 return "";
10685 }
10686 else if (i == 0)
10687 {
10688 xop[0] = GEN_INT (4 * (words-1));
10689 xop[1] = operands[1];
10690 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10691 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10692 return "";
10693 }
10694 else
10695 {
10696 for (j = 0; j < words; j++)
10697 if (j != i)
10698 {
10699 xop[0] = GEN_INT (j * 4);
10700 xop[1] = operands[1];
10701 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10702 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10703 }
10704 xop[0] = GEN_INT (i * 4);
10705 xop[1] = operands[1];
10706 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10707 return "";
10708 }
10709 }
10710
10711 return "{lsi|lswi} %2,%1,%N0";
10712}
10713
9878760c 10714\f
a4f6c312
SS
10715/* A validation routine: say whether CODE, a condition code, and MODE
10716 match. The other alternatives either don't make sense or should
10717 never be generated. */
39a10a29 10718
48d72335 10719void
a2369ed3 10720validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10721{
37409796
NS
10722 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10723 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10724 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10725
10726 /* These don't make sense. */
37409796
NS
10727 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10728 || mode != CCUNSmode);
39a10a29 10729
37409796
NS
10730 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10731 || mode == CCUNSmode);
39a10a29 10732
37409796
NS
10733 gcc_assert (mode == CCFPmode
10734 || (code != ORDERED && code != UNORDERED
10735 && code != UNEQ && code != LTGT
10736 && code != UNGT && code != UNLT
10737 && code != UNGE && code != UNLE));
f676971a
EC
10738
10739 /* These should never be generated except for
bc9ec0e0 10740 flag_finite_math_only. */
37409796
NS
10741 gcc_assert (mode != CCFPmode
10742 || flag_finite_math_only
10743 || (code != LE && code != GE
10744 && code != UNEQ && code != LTGT
10745 && code != UNGT && code != UNLT));
39a10a29
GK
10746
10747 /* These are invalid; the information is not there. */
37409796 10748 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10749}
10750
9878760c
RK
10751\f
10752/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10753 mask required to convert the result of a rotate insn into a shift
b1765bde 10754 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10755
10756int
a2369ed3 10757includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10758{
e2c953b6
DE
10759 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10760
10761 shift_mask <<= INTVAL (shiftop);
9878760c 10762
b1765bde 10763 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10764}
10765
10766/* Similar, but for right shift. */
10767
10768int
a2369ed3 10769includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10770{
a7653a2c 10771 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10772
10773 shift_mask >>= INTVAL (shiftop);
10774
b1765bde 10775 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10776}
10777
c5059423
AM
10778/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10779 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10780 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10781
10782int
a2369ed3 10783includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10784{
c5059423
AM
10785 if (GET_CODE (andop) == CONST_INT)
10786 {
02071907 10787 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10788
c5059423 10789 c = INTVAL (andop);
02071907 10790 if (c == 0 || c == ~0)
c5059423 10791 return 0;
e2c953b6 10792
02071907 10793 shift_mask = ~0;
c5059423
AM
10794 shift_mask <<= INTVAL (shiftop);
10795
b6d08ca1 10796 /* Find the least significant one bit. */
c5059423
AM
10797 lsb = c & -c;
10798
10799 /* It must coincide with the LSB of the shift mask. */
10800 if (-lsb != shift_mask)
10801 return 0;
e2c953b6 10802
c5059423
AM
10803 /* Invert to look for the next transition (if any). */
10804 c = ~c;
10805
10806 /* Remove the low group of ones (originally low group of zeros). */
10807 c &= -lsb;
10808
10809 /* Again find the lsb, and check we have all 1's above. */
10810 lsb = c & -c;
10811 return c == -lsb;
10812 }
10813 else if (GET_CODE (andop) == CONST_DOUBLE
10814 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10815 {
02071907
AM
10816 HOST_WIDE_INT low, high, lsb;
10817 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10818
10819 low = CONST_DOUBLE_LOW (andop);
10820 if (HOST_BITS_PER_WIDE_INT < 64)
10821 high = CONST_DOUBLE_HIGH (andop);
10822
10823 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10824 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10825 return 0;
10826
10827 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10828 {
02071907 10829 shift_mask_high = ~0;
c5059423
AM
10830 if (INTVAL (shiftop) > 32)
10831 shift_mask_high <<= INTVAL (shiftop) - 32;
10832
10833 lsb = high & -high;
10834
10835 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10836 return 0;
10837
10838 high = ~high;
10839 high &= -lsb;
10840
10841 lsb = high & -high;
10842 return high == -lsb;
10843 }
10844
02071907 10845 shift_mask_low = ~0;
c5059423
AM
10846 shift_mask_low <<= INTVAL (shiftop);
10847
10848 lsb = low & -low;
10849
10850 if (-lsb != shift_mask_low)
10851 return 0;
10852
10853 if (HOST_BITS_PER_WIDE_INT < 64)
10854 high = ~high;
10855 low = ~low;
10856 low &= -lsb;
10857
10858 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10859 {
10860 lsb = high & -high;
10861 return high == -lsb;
10862 }
10863
10864 lsb = low & -low;
10865 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10866 }
10867 else
10868 return 0;
10869}
e2c953b6 10870
c5059423
AM
10871/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10872 to perform a left shift. It must have SHIFTOP or more least
c1207243 10873 significant 0's, with the remainder of the word 1's. */
e2c953b6 10874
c5059423 10875int
a2369ed3 10876includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10877{
e2c953b6 10878 if (GET_CODE (andop) == CONST_INT)
c5059423 10879 {
02071907 10880 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10881
02071907 10882 shift_mask = ~0;
c5059423
AM
10883 shift_mask <<= INTVAL (shiftop);
10884 c = INTVAL (andop);
10885
c1207243 10886 /* Find the least significant one bit. */
c5059423
AM
10887 lsb = c & -c;
10888
10889 /* It must be covered by the shift mask.
a4f6c312 10890 This test also rejects c == 0. */
c5059423
AM
10891 if ((lsb & shift_mask) == 0)
10892 return 0;
10893
10894 /* Check we have all 1's above the transition, and reject all 1's. */
10895 return c == -lsb && lsb != 1;
10896 }
10897 else if (GET_CODE (andop) == CONST_DOUBLE
10898 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10899 {
02071907 10900 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10901
10902 low = CONST_DOUBLE_LOW (andop);
10903
10904 if (HOST_BITS_PER_WIDE_INT < 64)
10905 {
02071907 10906 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10907
10908 high = CONST_DOUBLE_HIGH (andop);
10909
10910 if (low == 0)
10911 {
02071907 10912 shift_mask_high = ~0;
c5059423
AM
10913 if (INTVAL (shiftop) > 32)
10914 shift_mask_high <<= INTVAL (shiftop) - 32;
10915
10916 lsb = high & -high;
10917
10918 if ((lsb & shift_mask_high) == 0)
10919 return 0;
10920
10921 return high == -lsb;
10922 }
10923 if (high != ~0)
10924 return 0;
10925 }
10926
02071907 10927 shift_mask_low = ~0;
c5059423
AM
10928 shift_mask_low <<= INTVAL (shiftop);
10929
10930 lsb = low & -low;
10931
10932 if ((lsb & shift_mask_low) == 0)
10933 return 0;
10934
10935 return low == -lsb && lsb != 1;
10936 }
e2c953b6 10937 else
c5059423 10938 return 0;
9878760c 10939}
35068b43 10940
11ac38b2
DE
10941/* Return 1 if operands will generate a valid arguments to rlwimi
10942instruction for insert with right shift in 64-bit mode. The mask may
10943not start on the first bit or stop on the last bit because wrap-around
10944effects of instruction do not correspond to semantics of RTL insn. */
10945
10946int
10947insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10948{
429ec7dc
DE
10949 if (INTVAL (startop) > 32
10950 && INTVAL (startop) < 64
10951 && INTVAL (sizeop) > 1
10952 && INTVAL (sizeop) + INTVAL (startop) < 64
10953 && INTVAL (shiftop) > 0
10954 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10955 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10956 return 1;
10957
10958 return 0;
10959}
10960
35068b43 10961/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10962 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10963
10964int
a2369ed3 10965registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10966{
10967 /* We might have been passed a SUBREG. */
f676971a 10968 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10969 return 0;
f676971a 10970
90f81f99
AP
10971 /* We might have been passed non floating point registers. */
10972 if (!FP_REGNO_P (REGNO (reg1))
10973 || !FP_REGNO_P (REGNO (reg2)))
10974 return 0;
35068b43
RK
10975
10976 return (REGNO (reg1) == REGNO (reg2) - 1);
10977}
10978
a4f6c312
SS
10979/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10980 addr1 and addr2 must be in consecutive memory locations
10981 (addr2 == addr1 + 8). */
35068b43
RK
10982
10983int
90f81f99 10984mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10985{
90f81f99 10986 rtx addr1, addr2;
bb8df8a6
EC
10987 unsigned int reg1, reg2;
10988 int offset1, offset2;
35068b43 10989
90f81f99
AP
10990 /* The mems cannot be volatile. */
10991 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10992 return 0;
f676971a 10993
90f81f99
AP
10994 addr1 = XEXP (mem1, 0);
10995 addr2 = XEXP (mem2, 0);
10996
35068b43
RK
10997 /* Extract an offset (if used) from the first addr. */
10998 if (GET_CODE (addr1) == PLUS)
10999 {
11000 /* If not a REG, return zero. */
11001 if (GET_CODE (XEXP (addr1, 0)) != REG)
11002 return 0;
11003 else
11004 {
c4ad648e 11005 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11006 /* The offset must be constant! */
11007 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11008 return 0;
11009 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11010 }
11011 }
11012 else if (GET_CODE (addr1) != REG)
11013 return 0;
11014 else
11015 {
11016 reg1 = REGNO (addr1);
11017 /* This was a simple (mem (reg)) expression. Offset is 0. */
11018 offset1 = 0;
11019 }
11020
bb8df8a6
EC
11021 /* And now for the second addr. */
11022 if (GET_CODE (addr2) == PLUS)
11023 {
11024 /* If not a REG, return zero. */
11025 if (GET_CODE (XEXP (addr2, 0)) != REG)
11026 return 0;
11027 else
11028 {
11029 reg2 = REGNO (XEXP (addr2, 0));
11030 /* The offset must be constant. */
11031 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11032 return 0;
11033 offset2 = INTVAL (XEXP (addr2, 1));
11034 }
11035 }
11036 else if (GET_CODE (addr2) != REG)
35068b43 11037 return 0;
bb8df8a6
EC
11038 else
11039 {
11040 reg2 = REGNO (addr2);
11041 /* This was a simple (mem (reg)) expression. Offset is 0. */
11042 offset2 = 0;
11043 }
35068b43 11044
bb8df8a6
EC
11045 /* Both of these must have the same base register. */
11046 if (reg1 != reg2)
35068b43
RK
11047 return 0;
11048
11049 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11050 if (offset2 != offset1 + 8)
35068b43
RK
11051 return 0;
11052
11053 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11054 instructions. */
11055 return 1;
11056}
9878760c
RK
11057\f
11058/* Return the register class of a scratch register needed to copy IN into
11059 or out of a register in CLASS in MODE. If it can be done directly,
11060 NO_REGS is returned. */
11061
11062enum reg_class
3c4774e0
R
11063rs6000_secondary_reload_class (enum reg_class class,
11064 enum machine_mode mode ATTRIBUTE_UNUSED,
11065 rtx in)
9878760c 11066{
5accd822 11067 int regno;
9878760c 11068
ab82a49f
AP
11069 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11070#if TARGET_MACHO
c4ad648e 11071 && MACHOPIC_INDIRECT
ab82a49f 11072#endif
c4ad648e 11073 ))
46fad5b7
DJ
11074 {
11075 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11076 other than BASE_REGS for TARGET_ELF. So indicate that a
11077 register from BASE_REGS is needed as an intermediate
11078 register.
f676971a 11079
46fad5b7
DJ
11080 On Darwin, pic addresses require a load from memory, which
11081 needs a base register. */
11082 if (class != BASE_REGS
c4ad648e
AM
11083 && (GET_CODE (in) == SYMBOL_REF
11084 || GET_CODE (in) == HIGH
11085 || GET_CODE (in) == LABEL_REF
11086 || GET_CODE (in) == CONST))
11087 return BASE_REGS;
46fad5b7 11088 }
e7b7998a 11089
5accd822
DE
11090 if (GET_CODE (in) == REG)
11091 {
11092 regno = REGNO (in);
11093 if (regno >= FIRST_PSEUDO_REGISTER)
11094 {
11095 regno = true_regnum (in);
11096 if (regno >= FIRST_PSEUDO_REGISTER)
11097 regno = -1;
11098 }
11099 }
11100 else if (GET_CODE (in) == SUBREG)
11101 {
11102 regno = true_regnum (in);
11103 if (regno >= FIRST_PSEUDO_REGISTER)
11104 regno = -1;
11105 }
11106 else
11107 regno = -1;
11108
9878760c
RK
11109 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11110 into anything. */
11111 if (class == GENERAL_REGS || class == BASE_REGS
11112 || (regno >= 0 && INT_REGNO_P (regno)))
11113 return NO_REGS;
11114
11115 /* Constants, memory, and FP registers can go into FP registers. */
11116 if ((regno == -1 || FP_REGNO_P (regno))
11117 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
11118 return NO_REGS;
11119
0ac081f6
AH
11120 /* Memory, and AltiVec registers can go into AltiVec registers. */
11121 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11122 && class == ALTIVEC_REGS)
11123 return NO_REGS;
11124
9878760c
RK
11125 /* We can copy among the CR registers. */
11126 if ((class == CR_REGS || class == CR0_REGS)
11127 && regno >= 0 && CR_REGNO_P (regno))
11128 return NO_REGS;
11129
11130 /* Otherwise, we need GENERAL_REGS. */
11131 return GENERAL_REGS;
11132}
11133\f
11134/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11135 know this is a valid comparison.
9878760c
RK
11136
11137 SCC_P is 1 if this is for an scc. That means that %D will have been
11138 used instead of %C, so the bits will be in different places.
11139
b4ac57ab 11140 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11141
11142int
a2369ed3 11143ccr_bit (rtx op, int scc_p)
9878760c
RK
11144{
11145 enum rtx_code code = GET_CODE (op);
11146 enum machine_mode cc_mode;
11147 int cc_regnum;
11148 int base_bit;
9ebbca7d 11149 rtx reg;
9878760c 11150
ec8e098d 11151 if (!COMPARISON_P (op))
9878760c
RK
11152 return -1;
11153
9ebbca7d
GK
11154 reg = XEXP (op, 0);
11155
37409796 11156 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11157
11158 cc_mode = GET_MODE (reg);
11159 cc_regnum = REGNO (reg);
11160 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11161
39a10a29 11162 validate_condition_mode (code, cc_mode);
c5defebb 11163
b7053a3f
GK
11164 /* When generating a sCOND operation, only positive conditions are
11165 allowed. */
37409796
NS
11166 gcc_assert (!scc_p
11167 || code == EQ || code == GT || code == LT || code == UNORDERED
11168 || code == GTU || code == LTU);
f676971a 11169
9878760c
RK
11170 switch (code)
11171 {
11172 case NE:
11173 return scc_p ? base_bit + 3 : base_bit + 2;
11174 case EQ:
11175 return base_bit + 2;
1c882ea4 11176 case GT: case GTU: case UNLE:
9878760c 11177 return base_bit + 1;
1c882ea4 11178 case LT: case LTU: case UNGE:
9878760c 11179 return base_bit;
1c882ea4
GK
11180 case ORDERED: case UNORDERED:
11181 return base_bit + 3;
9878760c
RK
11182
11183 case GE: case GEU:
39a10a29 11184 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11185 unordered position. So test that bit. For integer, this is ! LT
11186 unless this is an scc insn. */
39a10a29 11187 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11188
11189 case LE: case LEU:
39a10a29 11190 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11191
9878760c 11192 default:
37409796 11193 gcc_unreachable ();
9878760c
RK
11194 }
11195}
1ff7789b 11196\f
8d30c4ee 11197/* Return the GOT register. */
1ff7789b 11198
9390387d 11199rtx
a2369ed3 11200rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11201{
a4f6c312
SS
11202 /* The second flow pass currently (June 1999) can't update
11203 regs_ever_live without disturbing other parts of the compiler, so
11204 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11205 if (!can_create_pseudo_p ()
11206 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11207 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11208
8d30c4ee 11209 current_function_uses_pic_offset_table = 1;
3cb999d8 11210
1ff7789b
MM
11211 return pic_offset_table_rtx;
11212}
a7df97e6 11213\f
e2500fed
GK
11214/* Function to init struct machine_function.
11215 This will be called, via a pointer variable,
11216 from push_function_context. */
a7df97e6 11217
e2500fed 11218static struct machine_function *
863d938c 11219rs6000_init_machine_status (void)
a7df97e6 11220{
e2500fed 11221 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11222}
9878760c 11223\f
0ba1b2ff
AM
11224/* These macros test for integers and extract the low-order bits. */
11225#define INT_P(X) \
11226((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11227 && GET_MODE (X) == VOIDmode)
11228
11229#define INT_LOWPART(X) \
11230 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11231
11232int
a2369ed3 11233extract_MB (rtx op)
0ba1b2ff
AM
11234{
11235 int i;
11236 unsigned long val = INT_LOWPART (op);
11237
11238 /* If the high bit is zero, the value is the first 1 bit we find
11239 from the left. */
11240 if ((val & 0x80000000) == 0)
11241 {
37409796 11242 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11243
11244 i = 1;
11245 while (((val <<= 1) & 0x80000000) == 0)
11246 ++i;
11247 return i;
11248 }
11249
11250 /* If the high bit is set and the low bit is not, or the mask is all
11251 1's, the value is zero. */
11252 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11253 return 0;
11254
11255 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11256 from the right. */
11257 i = 31;
11258 while (((val >>= 1) & 1) != 0)
11259 --i;
11260
11261 return i;
11262}
11263
11264int
a2369ed3 11265extract_ME (rtx op)
0ba1b2ff
AM
11266{
11267 int i;
11268 unsigned long val = INT_LOWPART (op);
11269
11270 /* If the low bit is zero, the value is the first 1 bit we find from
11271 the right. */
11272 if ((val & 1) == 0)
11273 {
37409796 11274 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11275
11276 i = 30;
11277 while (((val >>= 1) & 1) == 0)
11278 --i;
11279
11280 return i;
11281 }
11282
11283 /* If the low bit is set and the high bit is not, or the mask is all
11284 1's, the value is 31. */
11285 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11286 return 31;
11287
11288 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11289 from the left. */
11290 i = 0;
11291 while (((val <<= 1) & 0x80000000) != 0)
11292 ++i;
11293
11294 return i;
11295}
11296
c4501e62
JJ
11297/* Locate some local-dynamic symbol still in use by this function
11298 so that we can print its name in some tls_ld pattern. */
11299
11300static const char *
863d938c 11301rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11302{
11303 rtx insn;
11304
11305 if (cfun->machine->some_ld_name)
11306 return cfun->machine->some_ld_name;
11307
11308 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11309 if (INSN_P (insn)
11310 && for_each_rtx (&PATTERN (insn),
11311 rs6000_get_some_local_dynamic_name_1, 0))
11312 return cfun->machine->some_ld_name;
11313
37409796 11314 gcc_unreachable ();
c4501e62
JJ
11315}
11316
11317/* Helper function for rs6000_get_some_local_dynamic_name. */
11318
11319static int
a2369ed3 11320rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11321{
11322 rtx x = *px;
11323
11324 if (GET_CODE (x) == SYMBOL_REF)
11325 {
11326 const char *str = XSTR (x, 0);
11327 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11328 {
11329 cfun->machine->some_ld_name = str;
11330 return 1;
11331 }
11332 }
11333
11334 return 0;
11335}
11336
85b776df
AM
11337/* Write out a function code label. */
11338
11339void
11340rs6000_output_function_entry (FILE *file, const char *fname)
11341{
11342 if (fname[0] != '.')
11343 {
11344 switch (DEFAULT_ABI)
11345 {
11346 default:
37409796 11347 gcc_unreachable ();
85b776df
AM
11348
11349 case ABI_AIX:
11350 if (DOT_SYMBOLS)
11351 putc ('.', file);
11352 else
11353 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11354 break;
11355
11356 case ABI_V4:
11357 case ABI_DARWIN:
11358 break;
11359 }
11360 }
11361 if (TARGET_AIX)
11362 RS6000_OUTPUT_BASENAME (file, fname);
11363 else
11364 assemble_name (file, fname);
11365}
11366
9878760c
RK
11367/* Print an operand. Recognize special options, documented below. */
11368
38c1f2d7 11369#if TARGET_ELF
d9407988 11370#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11371#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11372#else
11373#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11374#define SMALL_DATA_REG 0
ba5e43aa
MM
11375#endif
11376
9878760c 11377void
a2369ed3 11378print_operand (FILE *file, rtx x, int code)
9878760c
RK
11379{
11380 int i;
a260abc9 11381 HOST_WIDE_INT val;
0ba1b2ff 11382 unsigned HOST_WIDE_INT uval;
9878760c
RK
11383
11384 switch (code)
11385 {
a8b3aeda 11386 case '.':
a85d226b
RK
11387 /* Write out an instruction after the call which may be replaced
11388 with glue code by the loader. This depends on the AIX version. */
11389 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11390 return;
11391
81eace42
GK
11392 /* %a is output_address. */
11393
9854d9ed
RK
11394 case 'A':
11395 /* If X is a constant integer whose low-order 5 bits are zero,
11396 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11397 in the AIX assembler where "sri" with a zero shift count
20e26713 11398 writes a trash instruction. */
9854d9ed 11399 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11400 putc ('l', file);
9854d9ed 11401 else
76229ac8 11402 putc ('r', file);
9854d9ed
RK
11403 return;
11404
11405 case 'b':
e2c953b6
DE
11406 /* If constant, low-order 16 bits of constant, unsigned.
11407 Otherwise, write normally. */
11408 if (INT_P (x))
11409 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11410 else
11411 print_operand (file, x, 0);
cad12a8d
RK
11412 return;
11413
a260abc9
DE
11414 case 'B':
11415 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11416 for 64-bit mask direction. */
9390387d 11417 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11418 return;
a260abc9 11419
81eace42
GK
11420 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11421 output_operand. */
11422
423c1189
AH
11423 case 'c':
11424 /* X is a CR register. Print the number of the GT bit of the CR. */
11425 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11426 output_operand_lossage ("invalid %%E value");
11427 else
11428 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11429 return;
11430
11431 case 'D':
cef6b86c 11432 /* Like 'J' but get to the GT bit only. */
37409796 11433 gcc_assert (GET_CODE (x) == REG);
423c1189 11434
cef6b86c
EB
11435 /* Bit 1 is GT bit. */
11436 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11437
cef6b86c
EB
11438 /* Add one for shift count in rlinm for scc. */
11439 fprintf (file, "%d", i + 1);
423c1189
AH
11440 return;
11441
9854d9ed 11442 case 'E':
39a10a29 11443 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11444 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11445 output_operand_lossage ("invalid %%E value");
78fbdbf7 11446 else
39a10a29 11447 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11448 return;
9854d9ed
RK
11449
11450 case 'f':
11451 /* X is a CR register. Print the shift count needed to move it
11452 to the high-order four bits. */
11453 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11454 output_operand_lossage ("invalid %%f value");
11455 else
9ebbca7d 11456 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11457 return;
11458
11459 case 'F':
11460 /* Similar, but print the count for the rotate in the opposite
11461 direction. */
11462 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11463 output_operand_lossage ("invalid %%F value");
11464 else
9ebbca7d 11465 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11466 return;
11467
11468 case 'G':
11469 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11470 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11471 if (GET_CODE (x) != CONST_INT)
11472 output_operand_lossage ("invalid %%G value");
11473 else if (INTVAL (x) >= 0)
76229ac8 11474 putc ('z', file);
9854d9ed 11475 else
76229ac8 11476 putc ('m', file);
9854d9ed 11477 return;
e2c953b6 11478
9878760c 11479 case 'h':
a4f6c312
SS
11480 /* If constant, output low-order five bits. Otherwise, write
11481 normally. */
9878760c 11482 if (INT_P (x))
5f59ecb7 11483 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11484 else
11485 print_operand (file, x, 0);
11486 return;
11487
64305719 11488 case 'H':
a4f6c312
SS
11489 /* If constant, output low-order six bits. Otherwise, write
11490 normally. */
64305719 11491 if (INT_P (x))
5f59ecb7 11492 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11493 else
11494 print_operand (file, x, 0);
11495 return;
11496
9854d9ed
RK
11497 case 'I':
11498 /* Print `i' if this is a constant, else nothing. */
9878760c 11499 if (INT_P (x))
76229ac8 11500 putc ('i', file);
9878760c
RK
11501 return;
11502
9854d9ed
RK
11503 case 'j':
11504 /* Write the bit number in CCR for jump. */
11505 i = ccr_bit (x, 0);
11506 if (i == -1)
11507 output_operand_lossage ("invalid %%j code");
9878760c 11508 else
9854d9ed 11509 fprintf (file, "%d", i);
9878760c
RK
11510 return;
11511
9854d9ed
RK
11512 case 'J':
11513 /* Similar, but add one for shift count in rlinm for scc and pass
11514 scc flag to `ccr_bit'. */
11515 i = ccr_bit (x, 1);
11516 if (i == -1)
11517 output_operand_lossage ("invalid %%J code");
11518 else
a0466a68
RK
11519 /* If we want bit 31, write a shift count of zero, not 32. */
11520 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11521 return;
11522
9854d9ed
RK
11523 case 'k':
11524 /* X must be a constant. Write the 1's complement of the
11525 constant. */
9878760c 11526 if (! INT_P (x))
9854d9ed 11527 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11528 else
11529 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11530 return;
11531
81eace42 11532 case 'K':
9ebbca7d
GK
11533 /* X must be a symbolic constant on ELF. Write an
11534 expression suitable for an 'addi' that adds in the low 16
11535 bits of the MEM. */
11536 if (GET_CODE (x) != CONST)
11537 {
11538 print_operand_address (file, x);
11539 fputs ("@l", file);
11540 }
11541 else
11542 {
11543 if (GET_CODE (XEXP (x, 0)) != PLUS
11544 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11545 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11546 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11547 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11548 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11549 fputs ("@l", file);
ed8d2920
MM
11550 /* For GNU as, there must be a non-alphanumeric character
11551 between 'l' and the number. The '-' is added by
11552 print_operand() already. */
11553 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11554 fputs ("+", file);
9ebbca7d
GK
11555 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11556 }
81eace42
GK
11557 return;
11558
11559 /* %l is output_asm_label. */
9ebbca7d 11560
9854d9ed
RK
11561 case 'L':
11562 /* Write second word of DImode or DFmode reference. Works on register
11563 or non-indexed memory only. */
11564 if (GET_CODE (x) == REG)
fb5c67a7 11565 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11566 else if (GET_CODE (x) == MEM)
11567 {
11568 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11569 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11570 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11571 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11572 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11573 UNITS_PER_WORD));
6fb5fa3c
DB
11574 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11575 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11576 UNITS_PER_WORD));
9854d9ed 11577 else
d7624dc0
RK
11578 output_address (XEXP (adjust_address_nv (x, SImode,
11579 UNITS_PER_WORD),
11580 0));
ed8908e7 11581
ba5e43aa 11582 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11583 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11584 reg_names[SMALL_DATA_REG]);
9854d9ed 11585 }
9878760c 11586 return;
f676971a 11587
9878760c
RK
11588 case 'm':
11589 /* MB value for a mask operand. */
b1765bde 11590 if (! mask_operand (x, SImode))
9878760c
RK
11591 output_operand_lossage ("invalid %%m value");
11592
0ba1b2ff 11593 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11594 return;
11595
11596 case 'M':
11597 /* ME value for a mask operand. */
b1765bde 11598 if (! mask_operand (x, SImode))
a260abc9 11599 output_operand_lossage ("invalid %%M value");
9878760c 11600
0ba1b2ff 11601 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11602 return;
11603
81eace42
GK
11604 /* %n outputs the negative of its operand. */
11605
9878760c
RK
11606 case 'N':
11607 /* Write the number of elements in the vector times 4. */
11608 if (GET_CODE (x) != PARALLEL)
11609 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11610 else
11611 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11612 return;
11613
11614 case 'O':
11615 /* Similar, but subtract 1 first. */
11616 if (GET_CODE (x) != PARALLEL)
1427100a 11617 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11618 else
11619 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11620 return;
11621
9854d9ed
RK
11622 case 'p':
11623 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11624 if (! INT_P (x)
2bfcf297 11625 || INT_LOWPART (x) < 0
9854d9ed
RK
11626 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11627 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11628 else
11629 fprintf (file, "%d", i);
9854d9ed
RK
11630 return;
11631
9878760c
RK
11632 case 'P':
11633 /* The operand must be an indirect memory reference. The result
8bb418a3 11634 is the register name. */
9878760c
RK
11635 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11636 || REGNO (XEXP (x, 0)) >= 32)
11637 output_operand_lossage ("invalid %%P value");
e2c953b6 11638 else
fb5c67a7 11639 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11640 return;
11641
dfbdccdb
GK
11642 case 'q':
11643 /* This outputs the logical code corresponding to a boolean
11644 expression. The expression may have one or both operands
39a10a29 11645 negated (if one, only the first one). For condition register
c4ad648e
AM
11646 logical operations, it will also treat the negated
11647 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11648 {
63bc1d05 11649 const char *const *t = 0;
dfbdccdb
GK
11650 const char *s;
11651 enum rtx_code code = GET_CODE (x);
11652 static const char * const tbl[3][3] = {
11653 { "and", "andc", "nor" },
11654 { "or", "orc", "nand" },
11655 { "xor", "eqv", "xor" } };
11656
11657 if (code == AND)
11658 t = tbl[0];
11659 else if (code == IOR)
11660 t = tbl[1];
11661 else if (code == XOR)
11662 t = tbl[2];
11663 else
11664 output_operand_lossage ("invalid %%q value");
11665
11666 if (GET_CODE (XEXP (x, 0)) != NOT)
11667 s = t[0];
11668 else
11669 {
11670 if (GET_CODE (XEXP (x, 1)) == NOT)
11671 s = t[2];
11672 else
11673 s = t[1];
11674 }
f676971a 11675
dfbdccdb
GK
11676 fputs (s, file);
11677 }
11678 return;
11679
2c4a9cff
DE
11680 case 'Q':
11681 if (TARGET_MFCRF)
3b6ce0af 11682 fputc (',', file);
5efb1046 11683 /* FALLTHRU */
2c4a9cff
DE
11684 else
11685 return;
11686
9854d9ed
RK
11687 case 'R':
11688 /* X is a CR register. Print the mask for `mtcrf'. */
11689 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11690 output_operand_lossage ("invalid %%R value");
11691 else
9ebbca7d 11692 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11693 return;
9854d9ed
RK
11694
11695 case 's':
11696 /* Low 5 bits of 32 - value */
11697 if (! INT_P (x))
11698 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11699 else
11700 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11701 return;
9854d9ed 11702
a260abc9 11703 case 'S':
0ba1b2ff 11704 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11705 CONST_INT 32-bit mask is considered sign-extended so any
11706 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11707 if (! mask64_operand (x, DImode))
a260abc9
DE
11708 output_operand_lossage ("invalid %%S value");
11709
0ba1b2ff 11710 uval = INT_LOWPART (x);
a260abc9 11711
0ba1b2ff 11712 if (uval & 1) /* Clear Left */
a260abc9 11713 {
f099d360
GK
11714#if HOST_BITS_PER_WIDE_INT > 64
11715 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11716#endif
0ba1b2ff 11717 i = 64;
a260abc9 11718 }
0ba1b2ff 11719 else /* Clear Right */
a260abc9 11720 {
0ba1b2ff 11721 uval = ~uval;
f099d360
GK
11722#if HOST_BITS_PER_WIDE_INT > 64
11723 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11724#endif
0ba1b2ff 11725 i = 63;
a260abc9 11726 }
0ba1b2ff
AM
11727 while (uval != 0)
11728 --i, uval >>= 1;
37409796 11729 gcc_assert (i >= 0);
0ba1b2ff
AM
11730 fprintf (file, "%d", i);
11731 return;
a260abc9 11732
a3170dc6
AH
11733 case 't':
11734 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11735 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11736
11737 /* Bit 3 is OV bit. */
11738 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11739
11740 /* If we want bit 31, write a shift count of zero, not 32. */
11741 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11742 return;
11743
cccf3bdc
DE
11744 case 'T':
11745 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11746 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11747 && REGNO (x) != CTR_REGNO))
cccf3bdc 11748 output_operand_lossage ("invalid %%T value");
1de43f85 11749 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11750 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11751 else
11752 fputs ("ctr", file);
11753 return;
11754
9854d9ed 11755 case 'u':
802a0058 11756 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11757 if (! INT_P (x))
11758 output_operand_lossage ("invalid %%u value");
e2c953b6 11759 else
f676971a 11760 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11761 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11762 return;
11763
802a0058
MM
11764 case 'v':
11765 /* High-order 16 bits of constant for use in signed operand. */
11766 if (! INT_P (x))
11767 output_operand_lossage ("invalid %%v value");
e2c953b6 11768 else
134c32f6
DE
11769 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11770 (INT_LOWPART (x) >> 16) & 0xffff);
11771 return;
802a0058 11772
9854d9ed
RK
11773 case 'U':
11774 /* Print `u' if this has an auto-increment or auto-decrement. */
11775 if (GET_CODE (x) == MEM
11776 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11777 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11778 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11779 putc ('u', file);
9854d9ed 11780 return;
9878760c 11781
e0cd0770
JC
11782 case 'V':
11783 /* Print the trap code for this operand. */
11784 switch (GET_CODE (x))
11785 {
11786 case EQ:
11787 fputs ("eq", file); /* 4 */
11788 break;
11789 case NE:
11790 fputs ("ne", file); /* 24 */
11791 break;
11792 case LT:
11793 fputs ("lt", file); /* 16 */
11794 break;
11795 case LE:
11796 fputs ("le", file); /* 20 */
11797 break;
11798 case GT:
11799 fputs ("gt", file); /* 8 */
11800 break;
11801 case GE:
11802 fputs ("ge", file); /* 12 */
11803 break;
11804 case LTU:
11805 fputs ("llt", file); /* 2 */
11806 break;
11807 case LEU:
11808 fputs ("lle", file); /* 6 */
11809 break;
11810 case GTU:
11811 fputs ("lgt", file); /* 1 */
11812 break;
11813 case GEU:
11814 fputs ("lge", file); /* 5 */
11815 break;
11816 default:
37409796 11817 gcc_unreachable ();
e0cd0770
JC
11818 }
11819 break;
11820
9854d9ed
RK
11821 case 'w':
11822 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11823 normally. */
11824 if (INT_P (x))
f676971a 11825 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11826 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11827 else
11828 print_operand (file, x, 0);
9878760c
RK
11829 return;
11830
9854d9ed 11831 case 'W':
e2c953b6 11832 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11833 val = (GET_CODE (x) == CONST_INT
11834 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11835
11836 if (val < 0)
11837 i = -1;
9854d9ed 11838 else
e2c953b6
DE
11839 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11840 if ((val <<= 1) < 0)
11841 break;
11842
11843#if HOST_BITS_PER_WIDE_INT == 32
11844 if (GET_CODE (x) == CONST_INT && i >= 0)
11845 i += 32; /* zero-extend high-part was all 0's */
11846 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11847 {
11848 val = CONST_DOUBLE_LOW (x);
11849
37409796
NS
11850 gcc_assert (val);
11851 if (val < 0)
e2c953b6
DE
11852 --i;
11853 else
11854 for ( ; i < 64; i++)
11855 if ((val <<= 1) < 0)
11856 break;
11857 }
11858#endif
11859
11860 fprintf (file, "%d", i + 1);
9854d9ed 11861 return;
9878760c 11862
9854d9ed
RK
11863 case 'X':
11864 if (GET_CODE (x) == MEM
6fb5fa3c
DB
11865 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
11866 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
11867 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 11868 putc ('x', file);
9854d9ed 11869 return;
9878760c 11870
9854d9ed
RK
11871 case 'Y':
11872 /* Like 'L', for third word of TImode */
11873 if (GET_CODE (x) == REG)
fb5c67a7 11874 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11875 else if (GET_CODE (x) == MEM)
9878760c 11876 {
9854d9ed
RK
11877 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11878 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11879 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
11880 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11881 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11882 else
d7624dc0 11883 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11884 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11885 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11886 reg_names[SMALL_DATA_REG]);
9878760c
RK
11887 }
11888 return;
f676971a 11889
9878760c 11890 case 'z':
b4ac57ab
RS
11891 /* X is a SYMBOL_REF. Write out the name preceded by a
11892 period and without any trailing data in brackets. Used for function
4d30c363
MM
11893 names. If we are configured for System V (or the embedded ABI) on
11894 the PowerPC, do not emit the period, since those systems do not use
11895 TOCs and the like. */
37409796 11896 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11897
c4ad648e
AM
11898 /* Mark the decl as referenced so that cgraph will output the
11899 function. */
9bf6462a 11900 if (SYMBOL_REF_DECL (x))
c4ad648e 11901 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11902
85b776df 11903 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11904 if (TARGET_MACHO)
11905 {
11906 const char *name = XSTR (x, 0);
a031e781 11907#if TARGET_MACHO
3b48085e 11908 if (MACHOPIC_INDIRECT
11abc112
MM
11909 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11910 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11911#endif
11912 assemble_name (file, name);
11913 }
85b776df 11914 else if (!DOT_SYMBOLS)
9739c90c 11915 assemble_name (file, XSTR (x, 0));
85b776df
AM
11916 else
11917 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11918 return;
11919
9854d9ed
RK
11920 case 'Z':
11921 /* Like 'L', for last word of TImode. */
11922 if (GET_CODE (x) == REG)
fb5c67a7 11923 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11924 else if (GET_CODE (x) == MEM)
11925 {
11926 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11927 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11928 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
11929 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11930 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11931 else
d7624dc0 11932 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11933 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11934 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11935 reg_names[SMALL_DATA_REG]);
9854d9ed 11936 }
5c23c401 11937 return;
0ac081f6 11938
a3170dc6 11939 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11940 case 'y':
11941 {
11942 rtx tmp;
11943
37409796 11944 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11945
11946 tmp = XEXP (x, 0);
11947
90d3ff1c 11948 /* Ugly hack because %y is overloaded. */
8ef65e3d 11949 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11950 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11951 || GET_MODE (x) == TFmode
11952 || GET_MODE (x) == TImode))
a3170dc6
AH
11953 {
11954 /* Handle [reg]. */
11955 if (GET_CODE (tmp) == REG)
11956 {
11957 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11958 break;
11959 }
11960 /* Handle [reg+UIMM]. */
11961 else if (GET_CODE (tmp) == PLUS &&
11962 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11963 {
11964 int x;
11965
37409796 11966 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11967
11968 x = INTVAL (XEXP (tmp, 1));
11969 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11970 break;
11971 }
11972
11973 /* Fall through. Must be [reg+reg]. */
11974 }
850e8d3d
DN
11975 if (TARGET_ALTIVEC
11976 && GET_CODE (tmp) == AND
11977 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11978 && INTVAL (XEXP (tmp, 1)) == -16)
11979 tmp = XEXP (tmp, 0);
0ac081f6 11980 if (GET_CODE (tmp) == REG)
c62f2db5 11981 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11982 else
0ac081f6 11983 {
37409796 11984 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11985 && REG_P (XEXP (tmp, 0))
11986 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11987
0ac081f6
AH
11988 if (REGNO (XEXP (tmp, 0)) == 0)
11989 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11990 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11991 else
11992 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11993 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11994 }
0ac081f6
AH
11995 break;
11996 }
f676971a 11997
9878760c
RK
11998 case 0:
11999 if (GET_CODE (x) == REG)
12000 fprintf (file, "%s", reg_names[REGNO (x)]);
12001 else if (GET_CODE (x) == MEM)
12002 {
12003 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12004 know the width from the mode. */
12005 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12006 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12007 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12008 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12009 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12010 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12011 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12012 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12013 else
a54d04b7 12014 output_address (XEXP (x, 0));
9878760c
RK
12015 }
12016 else
a54d04b7 12017 output_addr_const (file, x);
a85d226b 12018 return;
9878760c 12019
c4501e62
JJ
12020 case '&':
12021 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12022 return;
12023
9878760c
RK
12024 default:
12025 output_operand_lossage ("invalid %%xn code");
12026 }
12027}
12028\f
12029/* Print the address of an operand. */
12030
12031void
a2369ed3 12032print_operand_address (FILE *file, rtx x)
9878760c
RK
12033{
12034 if (GET_CODE (x) == REG)
4697a36c 12035 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12036 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12037 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12038 {
12039 output_addr_const (file, x);
ba5e43aa 12040 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12041 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12042 reg_names[SMALL_DATA_REG]);
37409796
NS
12043 else
12044 gcc_assert (!TARGET_TOC);
9878760c
RK
12045 }
12046 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12047 {
9024f4b8 12048 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12049 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12050 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12051 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12052 else
4697a36c
MM
12053 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12054 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12055 }
12056 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12057 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12058 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12059#if TARGET_ELF
12060 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12061 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12062 {
12063 output_addr_const (file, XEXP (x, 1));
12064 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12065 }
c859cda6
DJ
12066#endif
12067#if TARGET_MACHO
12068 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12069 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12070 {
12071 fprintf (file, "lo16(");
12072 output_addr_const (file, XEXP (x, 1));
12073 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12074 }
3cb999d8 12075#endif
4d588c14 12076 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12077 {
2bfcf297 12078 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12079 {
2bfcf297
DB
12080 rtx contains_minus = XEXP (x, 1);
12081 rtx minus, symref;
12082 const char *name;
f676971a 12083
9ebbca7d 12084 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12085 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12086 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12087 contains_minus = XEXP (contains_minus, 0);
12088
2bfcf297
DB
12089 minus = XEXP (contains_minus, 0);
12090 symref = XEXP (minus, 0);
12091 XEXP (contains_minus, 0) = symref;
12092 if (TARGET_ELF)
12093 {
12094 char *newname;
12095
12096 name = XSTR (symref, 0);
12097 newname = alloca (strlen (name) + sizeof ("@toc"));
12098 strcpy (newname, name);
12099 strcat (newname, "@toc");
12100 XSTR (symref, 0) = newname;
12101 }
12102 output_addr_const (file, XEXP (x, 1));
12103 if (TARGET_ELF)
12104 XSTR (symref, 0) = name;
9ebbca7d
GK
12105 XEXP (contains_minus, 0) = minus;
12106 }
12107 else
12108 output_addr_const (file, XEXP (x, 1));
12109
12110 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12111 }
9878760c 12112 else
37409796 12113 gcc_unreachable ();
9878760c
RK
12114}
12115\f
88cad84b 12116/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12117 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12118 is defined. It also needs to handle DI-mode objects on 64-bit
12119 targets. */
12120
12121static bool
a2369ed3 12122rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12123{
f4f4921e 12124#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12125 /* Special handling for SI values. */
84dcde01 12126 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12127 {
301d03af 12128 static int recurse = 0;
f676971a 12129
301d03af
RS
12130 /* For -mrelocatable, we mark all addresses that need to be fixed up
12131 in the .fixup section. */
12132 if (TARGET_RELOCATABLE
d6b5193b
RS
12133 && in_section != toc_section
12134 && in_section != text_section
4325ca90 12135 && !unlikely_text_section_p (in_section)
301d03af
RS
12136 && !recurse
12137 && GET_CODE (x) != CONST_INT
12138 && GET_CODE (x) != CONST_DOUBLE
12139 && CONSTANT_P (x))
12140 {
12141 char buf[256];
12142
12143 recurse = 1;
12144 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12145 fixuplabelno++;
12146 ASM_OUTPUT_LABEL (asm_out_file, buf);
12147 fprintf (asm_out_file, "\t.long\t(");
12148 output_addr_const (asm_out_file, x);
12149 fprintf (asm_out_file, ")@fixup\n");
12150 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12151 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12152 fprintf (asm_out_file, "\t.long\t");
12153 assemble_name (asm_out_file, buf);
12154 fprintf (asm_out_file, "\n\t.previous\n");
12155 recurse = 0;
12156 return true;
12157 }
12158 /* Remove initial .'s to turn a -mcall-aixdesc function
12159 address into the address of the descriptor, not the function
12160 itself. */
12161 else if (GET_CODE (x) == SYMBOL_REF
12162 && XSTR (x, 0)[0] == '.'
12163 && DEFAULT_ABI == ABI_AIX)
12164 {
12165 const char *name = XSTR (x, 0);
12166 while (*name == '.')
12167 name++;
12168
12169 fprintf (asm_out_file, "\t.long\t%s\n", name);
12170 return true;
12171 }
12172 }
f4f4921e 12173#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12174 return default_assemble_integer (x, size, aligned_p);
12175}
93638d7a
AM
12176
12177#ifdef HAVE_GAS_HIDDEN
12178/* Emit an assembler directive to set symbol visibility for DECL to
12179 VISIBILITY_TYPE. */
12180
5add3202 12181static void
a2369ed3 12182rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12183{
93638d7a
AM
12184 /* Functions need to have their entry point symbol visibility set as
12185 well as their descriptor symbol visibility. */
85b776df
AM
12186 if (DEFAULT_ABI == ABI_AIX
12187 && DOT_SYMBOLS
12188 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12189 {
25fdb4dc 12190 static const char * const visibility_types[] = {
c4ad648e 12191 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12192 };
12193
12194 const char *name, *type;
93638d7a
AM
12195
12196 name = ((* targetm.strip_name_encoding)
12197 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12198 type = visibility_types[vis];
93638d7a 12199
25fdb4dc
RH
12200 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12201 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12202 }
25fdb4dc
RH
12203 else
12204 default_assemble_visibility (decl, vis);
93638d7a
AM
12205}
12206#endif
301d03af 12207\f
39a10a29 12208enum rtx_code
a2369ed3 12209rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12210{
12211 /* Reversal of FP compares takes care -- an ordered compare
12212 becomes an unordered compare and vice versa. */
f676971a 12213 if (mode == CCFPmode
bc9ec0e0
GK
12214 && (!flag_finite_math_only
12215 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12216 || code == UNEQ || code == LTGT))
bab6226b 12217 return reverse_condition_maybe_unordered (code);
39a10a29 12218 else
bab6226b 12219 return reverse_condition (code);
39a10a29
GK
12220}
12221
39a10a29
GK
12222/* Generate a compare for CODE. Return a brand-new rtx that
12223 represents the result of the compare. */
a4f6c312 12224
39a10a29 12225static rtx
a2369ed3 12226rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12227{
12228 enum machine_mode comp_mode;
12229 rtx compare_result;
12230
12231 if (rs6000_compare_fp_p)
12232 comp_mode = CCFPmode;
12233 else if (code == GTU || code == LTU
c4ad648e 12234 || code == GEU || code == LEU)
39a10a29 12235 comp_mode = CCUNSmode;
60934f9c
NS
12236 else if ((code == EQ || code == NE)
12237 && GET_CODE (rs6000_compare_op0) == SUBREG
12238 && GET_CODE (rs6000_compare_op1) == SUBREG
12239 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12240 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12241 /* These are unsigned values, perhaps there will be a later
12242 ordering compare that can be shared with this one.
12243 Unfortunately we cannot detect the signedness of the operands
12244 for non-subregs. */
12245 comp_mode = CCUNSmode;
39a10a29
GK
12246 else
12247 comp_mode = CCmode;
12248
12249 /* First, the compare. */
12250 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12251
cef6b86c 12252 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12253 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12254 && rs6000_compare_fp_p)
a3170dc6 12255 {
64022b5d 12256 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12257 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12258
12259 if (op_mode == VOIDmode)
12260 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12261
cef6b86c
EB
12262 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12263 This explains the following mess. */
423c1189 12264
a3170dc6
AH
12265 switch (code)
12266 {
423c1189 12267 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12268 switch (op_mode)
12269 {
12270 case SFmode:
12271 cmp = flag_unsafe_math_optimizations
12272 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12273 rs6000_compare_op1)
12274 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12275 rs6000_compare_op1);
12276 break;
12277
12278 case DFmode:
12279 cmp = flag_unsafe_math_optimizations
12280 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12281 rs6000_compare_op1)
12282 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12283 rs6000_compare_op1);
12284 break;
12285
17caeff2
JM
12286 case TFmode:
12287 cmp = flag_unsafe_math_optimizations
12288 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12289 rs6000_compare_op1)
12290 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12291 rs6000_compare_op1);
12292 break;
12293
37409796
NS
12294 default:
12295 gcc_unreachable ();
12296 }
a3170dc6 12297 break;
bb8df8a6 12298
423c1189 12299 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12300 switch (op_mode)
12301 {
12302 case SFmode:
12303 cmp = flag_unsafe_math_optimizations
12304 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12305 rs6000_compare_op1)
12306 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12307 rs6000_compare_op1);
12308 break;
bb8df8a6 12309
37409796
NS
12310 case DFmode:
12311 cmp = flag_unsafe_math_optimizations
12312 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12313 rs6000_compare_op1)
12314 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12315 rs6000_compare_op1);
12316 break;
12317
17caeff2
JM
12318 case TFmode:
12319 cmp = flag_unsafe_math_optimizations
12320 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12321 rs6000_compare_op1)
12322 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12323 rs6000_compare_op1);
12324 break;
12325
37409796
NS
12326 default:
12327 gcc_unreachable ();
12328 }
a3170dc6 12329 break;
bb8df8a6 12330
423c1189 12331 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12332 switch (op_mode)
12333 {
12334 case SFmode:
12335 cmp = flag_unsafe_math_optimizations
12336 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12337 rs6000_compare_op1)
12338 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12339 rs6000_compare_op1);
12340 break;
bb8df8a6 12341
37409796
NS
12342 case DFmode:
12343 cmp = flag_unsafe_math_optimizations
12344 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12345 rs6000_compare_op1)
12346 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12347 rs6000_compare_op1);
12348 break;
12349
17caeff2
JM
12350 case TFmode:
12351 cmp = flag_unsafe_math_optimizations
12352 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12353 rs6000_compare_op1)
12354 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12355 rs6000_compare_op1);
12356 break;
12357
37409796
NS
12358 default:
12359 gcc_unreachable ();
12360 }
a3170dc6 12361 break;
4d4cbc0e 12362 default:
37409796 12363 gcc_unreachable ();
a3170dc6
AH
12364 }
12365
12366 /* Synthesize LE and GE from LT/GT || EQ. */
12367 if (code == LE || code == GE || code == LEU || code == GEU)
12368 {
a3170dc6
AH
12369 emit_insn (cmp);
12370
12371 switch (code)
12372 {
12373 case LE: code = LT; break;
12374 case GE: code = GT; break;
12375 case LEU: code = LT; break;
12376 case GEU: code = GT; break;
37409796 12377 default: gcc_unreachable ();
a3170dc6
AH
12378 }
12379
a3170dc6
AH
12380 compare_result2 = gen_reg_rtx (CCFPmode);
12381
12382 /* Do the EQ. */
37409796
NS
12383 switch (op_mode)
12384 {
12385 case SFmode:
12386 cmp = flag_unsafe_math_optimizations
12387 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12388 rs6000_compare_op1)
12389 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12390 rs6000_compare_op1);
12391 break;
12392
12393 case DFmode:
12394 cmp = flag_unsafe_math_optimizations
12395 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12396 rs6000_compare_op1)
12397 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12398 rs6000_compare_op1);
12399 break;
12400
17caeff2
JM
12401 case TFmode:
12402 cmp = flag_unsafe_math_optimizations
12403 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12404 rs6000_compare_op1)
12405 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12406 rs6000_compare_op1);
12407 break;
12408
37409796
NS
12409 default:
12410 gcc_unreachable ();
12411 }
a3170dc6
AH
12412 emit_insn (cmp);
12413
a3170dc6 12414 /* OR them together. */
64022b5d
AH
12415 or_result = gen_reg_rtx (CCFPmode);
12416 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12417 compare_result2);
a3170dc6
AH
12418 compare_result = or_result;
12419 code = EQ;
12420 }
12421 else
12422 {
a3170dc6 12423 if (code == NE || code == LTGT)
a3170dc6 12424 code = NE;
423c1189
AH
12425 else
12426 code = EQ;
a3170dc6
AH
12427 }
12428
12429 emit_insn (cmp);
12430 }
12431 else
de17c25f
DE
12432 {
12433 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12434 CLOBBERs to match cmptf_internal2 pattern. */
12435 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12436 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12437 && !TARGET_IEEEQUAD
de17c25f
DE
12438 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12439 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12440 gen_rtvec (9,
12441 gen_rtx_SET (VOIDmode,
12442 compare_result,
12443 gen_rtx_COMPARE (comp_mode,
12444 rs6000_compare_op0,
12445 rs6000_compare_op1)),
12446 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12447 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12448 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12449 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12450 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12451 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12452 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12453 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12454 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12455 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12456 {
12457 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12458 comp_mode = CCEQmode;
12459 compare_result = gen_reg_rtx (CCEQmode);
12460 if (TARGET_64BIT)
12461 emit_insn (gen_stack_protect_testdi (compare_result,
12462 rs6000_compare_op0, op1));
12463 else
12464 emit_insn (gen_stack_protect_testsi (compare_result,
12465 rs6000_compare_op0, op1));
12466 }
de17c25f
DE
12467 else
12468 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12469 gen_rtx_COMPARE (comp_mode,
12470 rs6000_compare_op0,
12471 rs6000_compare_op1)));
12472 }
f676971a 12473
ca5adc63 12474 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12475 under flag_finite_math_only we don't bother. */
39a10a29 12476 if (rs6000_compare_fp_p
e7108df9 12477 && !flag_finite_math_only
8ef65e3d 12478 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12479 && (code == LE || code == GE
12480 || code == UNEQ || code == LTGT
12481 || code == UNGT || code == UNLT))
12482 {
12483 enum rtx_code or1, or2;
12484 rtx or1_rtx, or2_rtx, compare2_rtx;
12485 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12486
39a10a29
GK
12487 switch (code)
12488 {
12489 case LE: or1 = LT; or2 = EQ; break;
12490 case GE: or1 = GT; or2 = EQ; break;
12491 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12492 case LTGT: or1 = LT; or2 = GT; break;
12493 case UNGT: or1 = UNORDERED; or2 = GT; break;
12494 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12495 default: gcc_unreachable ();
39a10a29
GK
12496 }
12497 validate_condition_mode (or1, comp_mode);
12498 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12499 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12500 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12501 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12502 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12503 const_true_rtx);
12504 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12505
12506 compare_result = or_result;
12507 code = EQ;
12508 }
12509
12510 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12511
1c563bed 12512 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12513}
12514
12515
12516/* Emit the RTL for an sCOND pattern. */
12517
12518void
a2369ed3 12519rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12520{
12521 rtx condition_rtx;
12522 enum machine_mode op_mode;
b7053a3f 12523 enum rtx_code cond_code;
39a10a29
GK
12524
12525 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12526 cond_code = GET_CODE (condition_rtx);
12527
8ef65e3d 12528 if (rs6000_compare_fp_p
423c1189
AH
12529 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12530 {
12531 rtx t;
12532
12533 PUT_MODE (condition_rtx, SImode);
12534 t = XEXP (condition_rtx, 0);
12535
37409796 12536 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12537
12538 if (cond_code == NE)
64022b5d 12539 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12540
64022b5d 12541 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12542 return;
12543 }
12544
b7053a3f
GK
12545 if (cond_code == NE
12546 || cond_code == GE || cond_code == LE
12547 || cond_code == GEU || cond_code == LEU
12548 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12549 {
12550 rtx not_result = gen_reg_rtx (CCEQmode);
12551 rtx not_op, rev_cond_rtx;
12552 enum machine_mode cc_mode;
f676971a 12553
b7053a3f
GK
12554 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12555
1c563bed 12556 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12557 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12558 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12559 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12560 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12561 }
39a10a29
GK
12562
12563 op_mode = GET_MODE (rs6000_compare_op0);
12564 if (op_mode == VOIDmode)
12565 op_mode = GET_MODE (rs6000_compare_op1);
12566
12567 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12568 {
12569 PUT_MODE (condition_rtx, DImode);
12570 convert_move (result, condition_rtx, 0);
12571 }
12572 else
12573 {
12574 PUT_MODE (condition_rtx, SImode);
12575 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12576 }
12577}
12578
39a10a29
GK
12579/* Emit a branch of kind CODE to location LOC. */
12580
12581void
a2369ed3 12582rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12583{
12584 rtx condition_rtx, loc_ref;
12585
12586 condition_rtx = rs6000_generate_compare (code);
12587 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12588 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12589 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12590 loc_ref, pc_rtx)));
12591}
12592
12a4e8c5
GK
12593/* Return the string to output a conditional branch to LABEL, which is
12594 the operand number of the label, or -1 if the branch is really a
f676971a 12595 conditional return.
12a4e8c5
GK
12596
12597 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12598 condition code register and its mode specifies what kind of
12599 comparison we made.
12600
a0ab749a 12601 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12602
12603 INSN is the insn. */
12604
12605char *
a2369ed3 12606output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12607{
12608 static char string[64];
12609 enum rtx_code code = GET_CODE (op);
12610 rtx cc_reg = XEXP (op, 0);
12611 enum machine_mode mode = GET_MODE (cc_reg);
12612 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12613 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12614 int really_reversed = reversed ^ need_longbranch;
12615 char *s = string;
12616 const char *ccode;
12617 const char *pred;
12618 rtx note;
12619
39a10a29
GK
12620 validate_condition_mode (code, mode);
12621
12622 /* Work out which way this really branches. We could use
12623 reverse_condition_maybe_unordered here always but this
12624 makes the resulting assembler clearer. */
12a4e8c5 12625 if (really_reversed)
de40e1df
DJ
12626 {
12627 /* Reversal of FP compares takes care -- an ordered compare
12628 becomes an unordered compare and vice versa. */
12629 if (mode == CCFPmode)
12630 code = reverse_condition_maybe_unordered (code);
12631 else
12632 code = reverse_condition (code);
12633 }
12a4e8c5 12634
8ef65e3d 12635 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12636 {
12637 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12638 to the GT bit. */
37409796
NS
12639 switch (code)
12640 {
12641 case EQ:
12642 /* Opposite of GT. */
12643 code = GT;
12644 break;
12645
12646 case NE:
12647 code = UNLE;
12648 break;
12649
12650 default:
12651 gcc_unreachable ();
12652 }
a3170dc6
AH
12653 }
12654
39a10a29 12655 switch (code)
12a4e8c5
GK
12656 {
12657 /* Not all of these are actually distinct opcodes, but
12658 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12659 case NE: case LTGT:
12660 ccode = "ne"; break;
12661 case EQ: case UNEQ:
12662 ccode = "eq"; break;
f676971a 12663 case GE: case GEU:
50a0b056 12664 ccode = "ge"; break;
f676971a 12665 case GT: case GTU: case UNGT:
50a0b056 12666 ccode = "gt"; break;
f676971a 12667 case LE: case LEU:
50a0b056 12668 ccode = "le"; break;
f676971a 12669 case LT: case LTU: case UNLT:
50a0b056 12670 ccode = "lt"; break;
12a4e8c5
GK
12671 case UNORDERED: ccode = "un"; break;
12672 case ORDERED: ccode = "nu"; break;
12673 case UNGE: ccode = "nl"; break;
12674 case UNLE: ccode = "ng"; break;
12675 default:
37409796 12676 gcc_unreachable ();
12a4e8c5 12677 }
f676971a
EC
12678
12679 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12680 The old mnemonics don't have a way to specify this information. */
f4857b9b 12681 pred = "";
12a4e8c5
GK
12682 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12683 if (note != NULL_RTX)
12684 {
12685 /* PROB is the difference from 50%. */
12686 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12687
12688 /* Only hint for highly probable/improbable branches on newer
12689 cpus as static prediction overrides processor dynamic
12690 prediction. For older cpus we may as well always hint, but
12691 assume not taken for branches that are very close to 50% as a
12692 mispredicted taken branch is more expensive than a
f676971a 12693 mispredicted not-taken branch. */
ec507f2d 12694 if (rs6000_always_hint
2c9e13f3
JH
12695 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12696 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12697 {
12698 if (abs (prob) > REG_BR_PROB_BASE / 20
12699 && ((prob > 0) ^ need_longbranch))
c4ad648e 12700 pred = "+";
f4857b9b
AM
12701 else
12702 pred = "-";
12703 }
12a4e8c5 12704 }
12a4e8c5
GK
12705
12706 if (label == NULL)
94a54f47 12707 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12708 else
94a54f47 12709 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12710
37c67319 12711 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12712 Assume they'd only be the first character.... */
37c67319
GK
12713 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12714 *s++ = '%';
94a54f47 12715 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12716
12717 if (label != NULL)
12718 {
12719 /* If the branch distance was too far, we may have to use an
12720 unconditional branch to go the distance. */
12721 if (need_longbranch)
44518ddd 12722 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12723 else
12724 s += sprintf (s, ",%s", label);
12725 }
12726
12727 return string;
12728}
50a0b056 12729
64022b5d 12730/* Return the string to flip the GT bit on a CR. */
423c1189 12731char *
64022b5d 12732output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12733{
12734 static char string[64];
12735 int a, b;
12736
37409796
NS
12737 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12738 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12739
64022b5d
AH
12740 /* GT bit. */
12741 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12742 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12743
12744 sprintf (string, "crnot %d,%d", a, b);
12745 return string;
12746}
12747
21213b4c
DP
12748/* Return insn index for the vector compare instruction for given CODE,
12749 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12750 not available. */
12751
12752static int
94ff898d 12753get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12754 enum machine_mode dest_mode,
12755 enum machine_mode op_mode)
12756{
12757 if (!TARGET_ALTIVEC)
12758 return INSN_NOT_AVAILABLE;
12759
12760 switch (code)
12761 {
12762 case EQ:
12763 if (dest_mode == V16QImode && op_mode == V16QImode)
12764 return UNSPEC_VCMPEQUB;
12765 if (dest_mode == V8HImode && op_mode == V8HImode)
12766 return UNSPEC_VCMPEQUH;
12767 if (dest_mode == V4SImode && op_mode == V4SImode)
12768 return UNSPEC_VCMPEQUW;
12769 if (dest_mode == V4SImode && op_mode == V4SFmode)
12770 return UNSPEC_VCMPEQFP;
12771 break;
12772 case GE:
12773 if (dest_mode == V4SImode && op_mode == V4SFmode)
12774 return UNSPEC_VCMPGEFP;
12775 case GT:
12776 if (dest_mode == V16QImode && op_mode == V16QImode)
12777 return UNSPEC_VCMPGTSB;
12778 if (dest_mode == V8HImode && op_mode == V8HImode)
12779 return UNSPEC_VCMPGTSH;
12780 if (dest_mode == V4SImode && op_mode == V4SImode)
12781 return UNSPEC_VCMPGTSW;
12782 if (dest_mode == V4SImode && op_mode == V4SFmode)
12783 return UNSPEC_VCMPGTFP;
12784 break;
12785 case GTU:
12786 if (dest_mode == V16QImode && op_mode == V16QImode)
12787 return UNSPEC_VCMPGTUB;
12788 if (dest_mode == V8HImode && op_mode == V8HImode)
12789 return UNSPEC_VCMPGTUH;
12790 if (dest_mode == V4SImode && op_mode == V4SImode)
12791 return UNSPEC_VCMPGTUW;
12792 break;
12793 default:
12794 break;
12795 }
12796 return INSN_NOT_AVAILABLE;
12797}
12798
12799/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12800 DMODE is expected destination mode. This is a recursive function. */
12801
12802static rtx
12803rs6000_emit_vector_compare (enum rtx_code rcode,
12804 rtx op0, rtx op1,
12805 enum machine_mode dmode)
12806{
12807 int vec_cmp_insn;
12808 rtx mask;
12809 enum machine_mode dest_mode;
12810 enum machine_mode op_mode = GET_MODE (op1);
12811
37409796
NS
12812 gcc_assert (TARGET_ALTIVEC);
12813 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12814
12815 /* Floating point vector compare instructions uses destination V4SImode.
12816 Move destination to appropriate mode later. */
12817 if (dmode == V4SFmode)
12818 dest_mode = V4SImode;
12819 else
12820 dest_mode = dmode;
12821
12822 mask = gen_reg_rtx (dest_mode);
12823 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12824
12825 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12826 {
12827 bool swap_operands = false;
12828 bool try_again = false;
12829 switch (rcode)
12830 {
12831 case LT:
12832 rcode = GT;
12833 swap_operands = true;
12834 try_again = true;
12835 break;
12836 case LTU:
12837 rcode = GTU;
12838 swap_operands = true;
12839 try_again = true;
12840 break;
12841 case NE:
370df7db
JC
12842 case UNLE:
12843 case UNLT:
12844 case UNGE:
12845 case UNGT:
12846 /* Invert condition and try again.
12847 e.g., A != B becomes ~(A==B). */
21213b4c 12848 {
370df7db 12849 enum rtx_code rev_code;
21213b4c 12850 enum insn_code nor_code;
d1123cde 12851 rtx eq_rtx;
370df7db
JC
12852
12853 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12854 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12855 dest_mode);
94ff898d 12856
166cdb08 12857 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 12858 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12859 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12860
12861 if (dmode != dest_mode)
12862 {
12863 rtx temp = gen_reg_rtx (dest_mode);
12864 convert_move (temp, mask, 0);
12865 return temp;
12866 }
12867 return mask;
12868 }
12869 break;
12870 case GE:
12871 case GEU:
12872 case LE:
12873 case LEU:
12874 /* Try GT/GTU/LT/LTU OR EQ */
12875 {
12876 rtx c_rtx, eq_rtx;
12877 enum insn_code ior_code;
12878 enum rtx_code new_code;
12879
37409796
NS
12880 switch (rcode)
12881 {
12882 case GE:
12883 new_code = GT;
12884 break;
12885
12886 case GEU:
12887 new_code = GTU;
12888 break;
12889
12890 case LE:
12891 new_code = LT;
12892 break;
12893
12894 case LEU:
12895 new_code = LTU;
12896 break;
12897
12898 default:
12899 gcc_unreachable ();
12900 }
21213b4c
DP
12901
12902 c_rtx = rs6000_emit_vector_compare (new_code,
12903 op0, op1, dest_mode);
12904 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12905 dest_mode);
12906
166cdb08 12907 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 12908 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12909 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12910 if (dmode != dest_mode)
12911 {
12912 rtx temp = gen_reg_rtx (dest_mode);
12913 convert_move (temp, mask, 0);
12914 return temp;
12915 }
12916 return mask;
12917 }
12918 break;
12919 default:
37409796 12920 gcc_unreachable ();
21213b4c
DP
12921 }
12922
12923 if (try_again)
12924 {
12925 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12926 /* You only get two chances. */
12927 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12928 }
12929
12930 if (swap_operands)
12931 {
12932 rtx tmp;
12933 tmp = op0;
12934 op0 = op1;
12935 op1 = tmp;
12936 }
12937 }
12938
915167f5
GK
12939 emit_insn (gen_rtx_SET (VOIDmode, mask,
12940 gen_rtx_UNSPEC (dest_mode,
12941 gen_rtvec (2, op0, op1),
12942 vec_cmp_insn)));
21213b4c
DP
12943 if (dmode != dest_mode)
12944 {
12945 rtx temp = gen_reg_rtx (dest_mode);
12946 convert_move (temp, mask, 0);
12947 return temp;
12948 }
12949 return mask;
12950}
12951
12952/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12953 valid insn doesn exist for given mode. */
12954
12955static int
12956get_vsel_insn (enum machine_mode mode)
12957{
12958 switch (mode)
12959 {
12960 case V4SImode:
12961 return UNSPEC_VSEL4SI;
12962 break;
12963 case V4SFmode:
12964 return UNSPEC_VSEL4SF;
12965 break;
12966 case V8HImode:
12967 return UNSPEC_VSEL8HI;
12968 break;
12969 case V16QImode:
12970 return UNSPEC_VSEL16QI;
12971 break;
12972 default:
12973 return INSN_NOT_AVAILABLE;
12974 break;
12975 }
12976 return INSN_NOT_AVAILABLE;
12977}
12978
12979/* Emit vector select insn where DEST is destination using
12980 operands OP1, OP2 and MASK. */
12981
12982static void
12983rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12984{
12985 rtx t, temp;
12986 enum machine_mode dest_mode = GET_MODE (dest);
12987 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12988
12989 temp = gen_reg_rtx (dest_mode);
94ff898d 12990
bb8df8a6 12991 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12992 select op2. */
915167f5
GK
12993 t = gen_rtx_SET (VOIDmode, temp,
12994 gen_rtx_UNSPEC (dest_mode,
12995 gen_rtvec (3, op2, op1, mask),
12996 vsel_insn_index));
21213b4c
DP
12997 emit_insn (t);
12998 emit_move_insn (dest, temp);
12999 return;
13000}
13001
94ff898d 13002/* Emit vector conditional expression.
21213b4c
DP
13003 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13004 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13005
13006int
13007rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13008 rtx cond, rtx cc_op0, rtx cc_op1)
13009{
13010 enum machine_mode dest_mode = GET_MODE (dest);
13011 enum rtx_code rcode = GET_CODE (cond);
13012 rtx mask;
13013
13014 if (!TARGET_ALTIVEC)
13015 return 0;
13016
13017 /* Get the vector mask for the given relational operations. */
13018 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13019
13020 rs6000_emit_vector_select (dest, op1, op2, mask);
13021
13022 return 1;
13023}
13024
50a0b056
GK
13025/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13026 operands of the last comparison is nonzero/true, FALSE_COND if it
13027 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13028
50a0b056 13029int
a2369ed3 13030rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13031{
13032 enum rtx_code code = GET_CODE (op);
13033 rtx op0 = rs6000_compare_op0;
13034 rtx op1 = rs6000_compare_op1;
13035 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13036 enum machine_mode compare_mode = GET_MODE (op0);
13037 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13038 rtx temp;
add2402e 13039 bool is_against_zero;
50a0b056 13040
a3c9585f 13041 /* These modes should always match. */
a3170dc6
AH
13042 if (GET_MODE (op1) != compare_mode
13043 /* In the isel case however, we can use a compare immediate, so
13044 op1 may be a small constant. */
13045 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13046 return 0;
178c3eff 13047 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13048 return 0;
178c3eff 13049 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13050 return 0;
13051
50a0b056 13052 /* First, work out if the hardware can do this at all, or
a3c9585f 13053 if it's too slow.... */
50a0b056 13054 if (! rs6000_compare_fp_p)
a3170dc6
AH
13055 {
13056 if (TARGET_ISEL)
13057 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13058 return 0;
13059 }
8ef65e3d 13060 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13061 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13062 return 0;
50a0b056 13063
add2402e 13064 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13065
add2402e
GK
13066 /* A floating-point subtract might overflow, underflow, or produce
13067 an inexact result, thus changing the floating-point flags, so it
13068 can't be generated if we care about that. It's safe if one side
13069 of the construct is zero, since then no subtract will be
13070 generated. */
ebb109ad 13071 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13072 && flag_trapping_math && ! is_against_zero)
13073 return 0;
13074
50a0b056
GK
13075 /* Eliminate half of the comparisons by switching operands, this
13076 makes the remaining code simpler. */
13077 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13078 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13079 {
13080 code = reverse_condition_maybe_unordered (code);
13081 temp = true_cond;
13082 true_cond = false_cond;
13083 false_cond = temp;
13084 }
13085
13086 /* UNEQ and LTGT take four instructions for a comparison with zero,
13087 it'll probably be faster to use a branch here too. */
bc9ec0e0 13088 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13089 return 0;
f676971a 13090
50a0b056
GK
13091 if (GET_CODE (op1) == CONST_DOUBLE)
13092 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13093
b6d08ca1 13094 /* We're going to try to implement comparisons by performing
50a0b056
GK
13095 a subtract, then comparing against zero. Unfortunately,
13096 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13097 know that the operand is finite and the comparison
50a0b056 13098 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13099 if (HONOR_INFINITIES (compare_mode)
50a0b056 13100 && code != GT && code != UNGE
045572c7 13101 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13102 /* Constructs of the form (a OP b ? a : b) are safe. */
13103 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13104 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13105 && ! rtx_equal_p (op1, true_cond))))
13106 return 0;
add2402e 13107
50a0b056
GK
13108 /* At this point we know we can use fsel. */
13109
13110 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13111 if (! is_against_zero)
13112 {
13113 temp = gen_reg_rtx (compare_mode);
13114 emit_insn (gen_rtx_SET (VOIDmode, temp,
13115 gen_rtx_MINUS (compare_mode, op0, op1)));
13116 op0 = temp;
13117 op1 = CONST0_RTX (compare_mode);
13118 }
50a0b056
GK
13119
13120 /* If we don't care about NaNs we can reduce some of the comparisons
13121 down to faster ones. */
bc9ec0e0 13122 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13123 switch (code)
13124 {
13125 case GT:
13126 code = LE;
13127 temp = true_cond;
13128 true_cond = false_cond;
13129 false_cond = temp;
13130 break;
13131 case UNGE:
13132 code = GE;
13133 break;
13134 case UNEQ:
13135 code = EQ;
13136 break;
13137 default:
13138 break;
13139 }
13140
13141 /* Now, reduce everything down to a GE. */
13142 switch (code)
13143 {
13144 case GE:
13145 break;
13146
13147 case LE:
3148ad6d
DJ
13148 temp = gen_reg_rtx (compare_mode);
13149 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13150 op0 = temp;
13151 break;
13152
13153 case ORDERED:
3148ad6d
DJ
13154 temp = gen_reg_rtx (compare_mode);
13155 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13156 op0 = temp;
13157 break;
13158
13159 case EQ:
3148ad6d 13160 temp = gen_reg_rtx (compare_mode);
f676971a 13161 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13162 gen_rtx_NEG (compare_mode,
13163 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13164 op0 = temp;
13165 break;
13166
13167 case UNGE:
bc9ec0e0 13168 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13169 temp = gen_reg_rtx (result_mode);
50a0b056 13170 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13171 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13172 gen_rtx_GE (VOIDmode,
13173 op0, op1),
13174 true_cond, false_cond)));
bc9ec0e0
GK
13175 false_cond = true_cond;
13176 true_cond = temp;
50a0b056 13177
3148ad6d
DJ
13178 temp = gen_reg_rtx (compare_mode);
13179 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13180 op0 = temp;
13181 break;
13182
13183 case GT:
bc9ec0e0 13184 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13185 temp = gen_reg_rtx (result_mode);
50a0b056 13186 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13187 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13188 gen_rtx_GE (VOIDmode,
13189 op0, op1),
13190 true_cond, false_cond)));
bc9ec0e0
GK
13191 true_cond = false_cond;
13192 false_cond = temp;
50a0b056 13193
3148ad6d
DJ
13194 temp = gen_reg_rtx (compare_mode);
13195 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13196 op0 = temp;
13197 break;
13198
13199 default:
37409796 13200 gcc_unreachable ();
50a0b056
GK
13201 }
13202
13203 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13204 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13205 gen_rtx_GE (VOIDmode,
13206 op0, op1),
13207 true_cond, false_cond)));
13208 return 1;
13209}
13210
a3170dc6
AH
13211/* Same as above, but for ints (isel). */
13212
13213static int
a2369ed3 13214rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13215{
13216 rtx condition_rtx, cr;
13217
13218 /* All isel implementations thus far are 32-bits. */
13219 if (GET_MODE (rs6000_compare_op0) != SImode)
13220 return 0;
13221
13222 /* We still have to do the compare, because isel doesn't do a
13223 compare, it just looks at the CRx bits set by a previous compare
13224 instruction. */
13225 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13226 cr = XEXP (condition_rtx, 0);
13227
13228 if (GET_MODE (cr) == CCmode)
13229 emit_insn (gen_isel_signed (dest, condition_rtx,
13230 true_cond, false_cond, cr));
13231 else
13232 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13233 true_cond, false_cond, cr));
13234
13235 return 1;
13236}
13237
13238const char *
a2369ed3 13239output_isel (rtx *operands)
a3170dc6
AH
13240{
13241 enum rtx_code code;
13242
13243 code = GET_CODE (operands[1]);
13244 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13245 {
13246 PUT_CODE (operands[1], reverse_condition (code));
13247 return "isel %0,%3,%2,%j1";
13248 }
13249 else
13250 return "isel %0,%2,%3,%j1";
13251}
13252
50a0b056 13253void
a2369ed3 13254rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13255{
13256 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13257 enum rtx_code c;
50a0b056 13258 rtx target;
5dc8d536
AH
13259
13260 if (code == SMAX || code == SMIN)
13261 c = GE;
13262 else
13263 c = GEU;
13264
50a0b056 13265 if (code == SMAX || code == UMAX)
f676971a 13266 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13267 op0, op1, mode, 0);
13268 else
f676971a 13269 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13270 op1, op0, mode, 0);
37409796 13271 gcc_assert (target);
50a0b056
GK
13272 if (target != dest)
13273 emit_move_insn (dest, target);
13274}
46c07df8 13275
915167f5
GK
13276/* Emit instructions to perform a load-reserved/store-conditional operation.
13277 The operation performed is an atomic
13278 (set M (CODE:MODE M OP))
13279 If not NULL, BEFORE is atomically set to M before the operation, and
13280 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13281 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13282 Either OP or M may be wrapped in a NOT operation. */
13283
13284void
13285rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13286 rtx m, rtx op, rtx before_param, rtx after_param,
13287 bool sync_p)
13288{
13289 enum machine_mode used_mode;
13290 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13291 rtx used_m;
13292 rtvec vec;
13293 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13294 rtx shift = NULL_RTX;
bb8df8a6 13295
915167f5
GK
13296 if (sync_p)
13297 emit_insn (gen_memory_barrier ());
bb8df8a6 13298
915167f5
GK
13299 if (GET_CODE (m) == NOT)
13300 used_m = XEXP (m, 0);
13301 else
13302 used_m = m;
13303
13304 /* If this is smaller than SImode, we'll have to use SImode with
13305 adjustments. */
13306 if (mode == QImode || mode == HImode)
13307 {
13308 rtx newop, oldop;
13309
13310 if (MEM_ALIGN (used_m) >= 32)
13311 {
13312 int ishift = 0;
13313 if (BYTES_BIG_ENDIAN)
13314 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13315
915167f5 13316 shift = GEN_INT (ishift);
c75c6d11 13317 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13318 }
13319 else
13320 {
13321 rtx addrSI, aligned_addr;
a9c9d3fa 13322 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13323
c75c6d11
JJ
13324 addrSI = gen_lowpart_common (SImode,
13325 force_reg (Pmode, XEXP (used_m, 0)));
13326 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13327 shift = gen_reg_rtx (SImode);
13328
13329 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13330 GEN_INT (shift_mask)));
13331 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13332
13333 aligned_addr = expand_binop (Pmode, and_optab,
13334 XEXP (used_m, 0),
13335 GEN_INT (-4), NULL_RTX,
13336 1, OPTAB_LIB_WIDEN);
13337 used_m = change_address (used_m, SImode, aligned_addr);
13338 set_mem_align (used_m, 32);
915167f5 13339 }
c75c6d11
JJ
13340 /* It's safe to keep the old alias set of USED_M, because
13341 the operation is atomic and only affects the original
13342 USED_M. */
13343 if (GET_CODE (m) == NOT)
13344 m = gen_rtx_NOT (SImode, used_m);
13345 else
13346 m = used_m;
915167f5
GK
13347
13348 if (GET_CODE (op) == NOT)
13349 {
13350 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13351 oldop = gen_rtx_NOT (SImode, oldop);
13352 }
13353 else
13354 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13355
915167f5
GK
13356 switch (code)
13357 {
13358 case IOR:
13359 case XOR:
13360 newop = expand_binop (SImode, and_optab,
13361 oldop, GEN_INT (imask), NULL_RTX,
13362 1, OPTAB_LIB_WIDEN);
13363 emit_insn (gen_ashlsi3 (newop, newop, shift));
13364 break;
13365
13366 case AND:
13367 newop = expand_binop (SImode, ior_optab,
13368 oldop, GEN_INT (~imask), NULL_RTX,
13369 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13370 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13371 break;
13372
13373 case PLUS:
9f0076e5 13374 case MINUS:
915167f5
GK
13375 {
13376 rtx mask;
bb8df8a6 13377
915167f5
GK
13378 newop = expand_binop (SImode, and_optab,
13379 oldop, GEN_INT (imask), NULL_RTX,
13380 1, OPTAB_LIB_WIDEN);
13381 emit_insn (gen_ashlsi3 (newop, newop, shift));
13382
13383 mask = gen_reg_rtx (SImode);
13384 emit_move_insn (mask, GEN_INT (imask));
13385 emit_insn (gen_ashlsi3 (mask, mask, shift));
13386
9f0076e5
DE
13387 if (code == PLUS)
13388 newop = gen_rtx_PLUS (SImode, m, newop);
13389 else
13390 newop = gen_rtx_MINUS (SImode, m, newop);
13391 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13392 newop = gen_rtx_IOR (SImode, newop,
13393 gen_rtx_AND (SImode,
13394 gen_rtx_NOT (SImode, mask),
13395 m));
13396 break;
13397 }
13398
13399 default:
13400 gcc_unreachable ();
13401 }
13402
a9c9d3fa
GK
13403 if (GET_CODE (m) == NOT)
13404 {
13405 rtx mask, xorm;
13406
13407 mask = gen_reg_rtx (SImode);
13408 emit_move_insn (mask, GEN_INT (imask));
13409 emit_insn (gen_ashlsi3 (mask, mask, shift));
13410
13411 xorm = gen_rtx_XOR (SImode, used_m, mask);
13412 /* Depending on the value of 'op', the XOR or the operation might
13413 be able to be simplified away. */
13414 newop = simplify_gen_binary (code, SImode, xorm, newop);
13415 }
915167f5
GK
13416 op = newop;
13417 used_mode = SImode;
13418 before = gen_reg_rtx (used_mode);
13419 after = gen_reg_rtx (used_mode);
13420 }
13421 else
13422 {
13423 used_mode = mode;
13424 before = before_param;
13425 after = after_param;
13426
13427 if (before == NULL_RTX)
13428 before = gen_reg_rtx (used_mode);
13429 if (after == NULL_RTX)
13430 after = gen_reg_rtx (used_mode);
13431 }
bb8df8a6 13432
9f0076e5
DE
13433 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13434 && used_mode != mode)
915167f5
GK
13435 the_op = op; /* Computed above. */
13436 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13437 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13438 else
13439 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13440
13441 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13442 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13443 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13444 gen_rtx_UNSPEC (used_mode,
13445 gen_rtvec (1, the_op),
13446 UNSPEC_SYNC_OP));
915167f5
GK
13447 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13448
9f0076e5 13449 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13450 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13451 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13452 else
13453 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13454 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13455
13456 /* Shift and mask the return values properly. */
13457 if (used_mode != mode && before_param)
13458 {
13459 emit_insn (gen_lshrsi3 (before, before, shift));
13460 convert_move (before_param, before, 1);
13461 }
13462
13463 if (used_mode != mode && after_param)
13464 {
13465 emit_insn (gen_lshrsi3 (after, after, shift));
13466 convert_move (after_param, after, 1);
13467 }
13468
13469 /* The previous sequence will end with a branch that's dependent on
13470 the conditional store, so placing an isync will ensure that no
13471 other instructions (especially, no load or store instructions)
13472 can start before the atomic operation completes. */
13473 if (sync_p)
13474 emit_insn (gen_isync ());
13475}
13476
b52110d4
DE
13477/* A subroutine of the atomic operation splitters. Jump to LABEL if
13478 COND is true. Mark the jump as unlikely to be taken. */
13479
13480static void
13481emit_unlikely_jump (rtx cond, rtx label)
13482{
13483 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13484 rtx x;
13485
13486 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13487 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13488 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13489}
13490
13491/* A subroutine of the atomic operation splitters. Emit a load-locked
13492 instruction in MODE. */
13493
13494static void
13495emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13496{
13497 rtx (*fn) (rtx, rtx) = NULL;
13498 if (mode == SImode)
13499 fn = gen_load_locked_si;
13500 else if (mode == DImode)
13501 fn = gen_load_locked_di;
13502 emit_insn (fn (reg, mem));
13503}
13504
13505/* A subroutine of the atomic operation splitters. Emit a store-conditional
13506 instruction in MODE. */
13507
13508static void
13509emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13510{
13511 rtx (*fn) (rtx, rtx, rtx) = NULL;
13512 if (mode == SImode)
13513 fn = gen_store_conditional_si;
13514 else if (mode == DImode)
13515 fn = gen_store_conditional_di;
13516
9f0076e5 13517 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13518 if (PPC405_ERRATUM77)
13519 emit_insn (gen_memory_barrier ());
13520
13521 emit_insn (fn (res, mem, val));
13522}
13523
ea2c620c 13524/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13525 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13526 operand of the binary operator. BEFORE and AFTER are optional locations to
13527 return the value of MEM either before of after the operation. SCRATCH is
13528 a scratch register. */
13529
13530void
13531rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13532 rtx before, rtx after, rtx scratch)
13533{
13534 enum machine_mode mode = GET_MODE (mem);
13535 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13536
13537 emit_insn (gen_memory_barrier ());
13538
13539 label = gen_label_rtx ();
13540 emit_label (label);
13541 label = gen_rtx_LABEL_REF (VOIDmode, label);
13542
13543 if (before == NULL_RTX)
13544 before = scratch;
13545 emit_load_locked (mode, before, mem);
13546
13547 if (code == NOT)
13548 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13549 else if (code == AND)
13550 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13551 else
13552 x = gen_rtx_fmt_ee (code, mode, before, val);
13553
13554 if (after != NULL_RTX)
13555 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13556 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13557
13558 emit_store_conditional (mode, cond, mem, scratch);
13559
13560 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13561 emit_unlikely_jump (x, label);
13562
13563 emit_insn (gen_isync ());
13564}
13565
b52110d4
DE
13566/* Expand an atomic compare and swap operation. MEM is the memory on which
13567 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13568 value to be stored. SCRATCH is a scratch GPR. */
13569
13570void
13571rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13572 rtx scratch)
13573{
13574 enum machine_mode mode = GET_MODE (mem);
13575 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13576
13577 emit_insn (gen_memory_barrier ());
13578
13579 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13580 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13581 emit_label (XEXP (label1, 0));
13582
13583 emit_load_locked (mode, retval, mem);
13584
13585 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13586 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13587
13588 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13589 emit_unlikely_jump (x, label2);
13590
13591 emit_move_insn (scratch, newval);
13592 emit_store_conditional (mode, cond, mem, scratch);
13593
13594 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13595 emit_unlikely_jump (x, label1);
13596
13597 emit_insn (gen_isync ());
13598 emit_label (XEXP (label2, 0));
13599}
13600
13601/* Expand an atomic test and set operation. MEM is the memory on which
13602 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13603
13604void
13605rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13606{
13607 enum machine_mode mode = GET_MODE (mem);
13608 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13609
13610 emit_insn (gen_memory_barrier ());
13611
13612 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13613 emit_label (XEXP (label, 0));
13614
13615 emit_load_locked (mode, retval, mem);
13616 emit_move_insn (scratch, val);
13617 emit_store_conditional (mode, cond, mem, scratch);
13618
13619 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13620 emit_unlikely_jump (x, label);
13621
13622 emit_insn (gen_isync ());
13623}
13624
9fc75b97
DE
13625void
13626rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13627{
13628 enum machine_mode mode = GET_MODE (mem);
13629 rtx addrSI, align, wdst, shift, mask;
13630 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13631 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13632
13633 /* Shift amount for subword relative to aligned word. */
13634 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13635 shift = gen_reg_rtx (SImode);
13636 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13637 GEN_INT (shift_mask)));
13638 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13639
13640 /* Shift and mask old value into position within word. */
13641 oldval = convert_modes (SImode, mode, oldval, 1);
13642 oldval = expand_binop (SImode, and_optab,
13643 oldval, GEN_INT (imask), NULL_RTX,
13644 1, OPTAB_LIB_WIDEN);
13645 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13646
13647 /* Shift and mask new value into position within word. */
13648 newval = convert_modes (SImode, mode, newval, 1);
13649 newval = expand_binop (SImode, and_optab,
13650 newval, GEN_INT (imask), NULL_RTX,
13651 1, OPTAB_LIB_WIDEN);
13652 emit_insn (gen_ashlsi3 (newval, newval, shift));
13653
13654 /* Mask for insertion. */
13655 mask = gen_reg_rtx (SImode);
13656 emit_move_insn (mask, GEN_INT (imask));
13657 emit_insn (gen_ashlsi3 (mask, mask, shift));
13658
13659 /* Address of aligned word containing subword. */
13660 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13661 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13662 mem = change_address (mem, SImode, align);
13663 set_mem_align (mem, 32);
13664 MEM_VOLATILE_P (mem) = 1;
13665
13666 wdst = gen_reg_rtx (SImode);
13667 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13668 oldval, newval, mem));
13669
13670 emit_move_insn (dst, gen_lowpart (mode, wdst));
13671}
13672
13673void
13674rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13675 rtx oldval, rtx newval, rtx mem,
13676 rtx scratch)
13677{
13678 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13679
13680 emit_insn (gen_memory_barrier ());
13681 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13682 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13683 emit_label (XEXP (label1, 0));
13684
13685 emit_load_locked (SImode, scratch, mem);
13686
13687 /* Mask subword within loaded value for comparison with oldval.
13688 Use UNSPEC_AND to avoid clobber.*/
13689 emit_insn (gen_rtx_SET (SImode, dest,
13690 gen_rtx_UNSPEC (SImode,
13691 gen_rtvec (2, scratch, mask),
13692 UNSPEC_AND)));
13693
13694 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13695 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13696
13697 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13698 emit_unlikely_jump (x, label2);
13699
13700 /* Clear subword within loaded value for insertion of new value. */
13701 emit_insn (gen_rtx_SET (SImode, scratch,
13702 gen_rtx_AND (SImode,
13703 gen_rtx_NOT (SImode, mask), scratch)));
13704 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13705 emit_store_conditional (SImode, cond, mem, scratch);
13706
13707 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13708 emit_unlikely_jump (x, label1);
13709
13710 emit_insn (gen_isync ());
13711 emit_label (XEXP (label2, 0));
13712}
13713
13714
b52110d4 13715 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13716 multi-register moves. It will emit at most one instruction for
13717 each register that is accessed; that is, it won't emit li/lis pairs
13718 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13719 register. */
46c07df8 13720
46c07df8 13721void
a9baceb1 13722rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13723{
a9baceb1
GK
13724 /* The register number of the first register being moved. */
13725 int reg;
13726 /* The mode that is to be moved. */
13727 enum machine_mode mode;
13728 /* The mode that the move is being done in, and its size. */
13729 enum machine_mode reg_mode;
13730 int reg_mode_size;
13731 /* The number of registers that will be moved. */
13732 int nregs;
13733
13734 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13735 mode = GET_MODE (dst);
c8b622ff 13736 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13737 if (FP_REGNO_P (reg))
7393f7f8 13738 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13739 else if (ALTIVEC_REGNO_P (reg))
13740 reg_mode = V16QImode;
4d4447b5
PB
13741 else if (TARGET_E500_DOUBLE && (mode == TFmode || mode == TDmode))
13742 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13743 else
13744 reg_mode = word_mode;
13745 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13746
37409796 13747 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13748
a9baceb1
GK
13749 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13750 {
13751 /* Move register range backwards, if we might have destructive
13752 overlap. */
13753 int i;
13754 for (i = nregs - 1; i >= 0; i--)
f676971a 13755 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13756 simplify_gen_subreg (reg_mode, dst, mode,
13757 i * reg_mode_size),
13758 simplify_gen_subreg (reg_mode, src, mode,
13759 i * reg_mode_size)));
13760 }
46c07df8
HP
13761 else
13762 {
a9baceb1
GK
13763 int i;
13764 int j = -1;
13765 bool used_update = false;
46c07df8 13766
c1e55850 13767 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13768 {
13769 rtx breg;
3a1f863f 13770
a9baceb1
GK
13771 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13772 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13773 {
13774 rtx delta_rtx;
a9baceb1 13775 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13776 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13777 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13778 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13779 emit_insn (TARGET_32BIT
13780 ? gen_addsi3 (breg, breg, delta_rtx)
13781 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13782 src = replace_equiv_address (src, breg);
3a1f863f 13783 }
d04b6e6e 13784 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13785 {
13e2e16e 13786 rtx basereg;
c1e55850
GK
13787 basereg = gen_rtx_REG (Pmode, reg);
13788 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13789 src = replace_equiv_address (src, basereg);
c1e55850 13790 }
3a1f863f 13791
0423421f
AM
13792 breg = XEXP (src, 0);
13793 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13794 breg = XEXP (breg, 0);
13795
13796 /* If the base register we are using to address memory is
13797 also a destination reg, then change that register last. */
13798 if (REG_P (breg)
13799 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13800 && REGNO (breg) < REGNO (dst) + nregs)
13801 j = REGNO (breg) - REGNO (dst);
c4ad648e 13802 }
46c07df8 13803
a9baceb1 13804 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13805 {
13806 rtx breg;
13807
a9baceb1
GK
13808 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13809 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13810 {
13811 rtx delta_rtx;
a9baceb1 13812 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13813 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13814 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13815 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13816
13817 /* We have to update the breg before doing the store.
13818 Use store with update, if available. */
13819
13820 if (TARGET_UPDATE)
13821 {
a9baceb1 13822 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13823 emit_insn (TARGET_32BIT
13824 ? (TARGET_POWERPC64
13825 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13826 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13827 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13828 used_update = true;
3a1f863f
DE
13829 }
13830 else
a9baceb1
GK
13831 emit_insn (TARGET_32BIT
13832 ? gen_addsi3 (breg, breg, delta_rtx)
13833 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13834 dst = replace_equiv_address (dst, breg);
3a1f863f 13835 }
37409796 13836 else
d04b6e6e 13837 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13838 }
13839
46c07df8 13840 for (i = 0; i < nregs; i++)
f676971a 13841 {
3a1f863f
DE
13842 /* Calculate index to next subword. */
13843 ++j;
f676971a 13844 if (j == nregs)
3a1f863f 13845 j = 0;
46c07df8 13846
112cdef5 13847 /* If compiler already emitted move of first word by
a9baceb1 13848 store with update, no need to do anything. */
3a1f863f 13849 if (j == 0 && used_update)
a9baceb1 13850 continue;
f676971a 13851
a9baceb1
GK
13852 emit_insn (gen_rtx_SET (VOIDmode,
13853 simplify_gen_subreg (reg_mode, dst, mode,
13854 j * reg_mode_size),
13855 simplify_gen_subreg (reg_mode, src, mode,
13856 j * reg_mode_size)));
3a1f863f 13857 }
46c07df8
HP
13858 }
13859}
13860
12a4e8c5 13861\f
a4f6c312
SS
13862/* This page contains routines that are used to determine what the
13863 function prologue and epilogue code will do and write them out. */
9878760c 13864
a4f6c312
SS
13865/* Return the first fixed-point register that is required to be
13866 saved. 32 if none. */
9878760c
RK
13867
13868int
863d938c 13869first_reg_to_save (void)
9878760c
RK
13870{
13871 int first_reg;
13872
13873 /* Find lowest numbered live register. */
13874 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 13875 if (df_regs_ever_live_p (first_reg)
a38d360d 13876 && (! call_used_regs[first_reg]
1db02437 13877 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13878 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13879 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13880 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13881 break;
13882
ee890fe2 13883#if TARGET_MACHO
93638d7a
AM
13884 if (flag_pic
13885 && current_function_uses_pic_offset_table
13886 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13887 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13888#endif
13889
9878760c
RK
13890 return first_reg;
13891}
13892
13893/* Similar, for FP regs. */
13894
13895int
863d938c 13896first_fp_reg_to_save (void)
9878760c
RK
13897{
13898 int first_reg;
13899
13900 /* Find lowest numbered live register. */
13901 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 13902 if (df_regs_ever_live_p (first_reg))
9878760c
RK
13903 break;
13904
13905 return first_reg;
13906}
00b960c7
AH
13907
13908/* Similar, for AltiVec regs. */
13909
13910static int
863d938c 13911first_altivec_reg_to_save (void)
00b960c7
AH
13912{
13913 int i;
13914
13915 /* Stack frame remains as is unless we are in AltiVec ABI. */
13916 if (! TARGET_ALTIVEC_ABI)
13917 return LAST_ALTIVEC_REGNO + 1;
13918
22fa69da 13919 /* On Darwin, the unwind routines are compiled without
982afe02 13920 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13921 altivec registers when necessary. */
13922 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13923 && ! TARGET_ALTIVEC)
13924 return FIRST_ALTIVEC_REGNO + 20;
13925
00b960c7
AH
13926 /* Find lowest numbered live register. */
13927 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13928 if (df_regs_ever_live_p (i))
00b960c7
AH
13929 break;
13930
13931 return i;
13932}
13933
13934/* Return a 32-bit mask of the AltiVec registers we need to set in
13935 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13936 the 32-bit word is 0. */
13937
13938static unsigned int
863d938c 13939compute_vrsave_mask (void)
00b960c7
AH
13940{
13941 unsigned int i, mask = 0;
13942
22fa69da 13943 /* On Darwin, the unwind routines are compiled without
982afe02 13944 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13945 call-saved altivec registers when necessary. */
13946 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13947 && ! TARGET_ALTIVEC)
13948 mask |= 0xFFF;
13949
00b960c7
AH
13950 /* First, find out if we use _any_ altivec registers. */
13951 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13952 if (df_regs_ever_live_p (i))
00b960c7
AH
13953 mask |= ALTIVEC_REG_BIT (i);
13954
13955 if (mask == 0)
13956 return mask;
13957
00b960c7
AH
13958 /* Next, remove the argument registers from the set. These must
13959 be in the VRSAVE mask set by the caller, so we don't need to add
13960 them in again. More importantly, the mask we compute here is
13961 used to generate CLOBBERs in the set_vrsave insn, and we do not
13962 wish the argument registers to die. */
a6cf80f2 13963 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13964 mask &= ~ALTIVEC_REG_BIT (i);
13965
13966 /* Similarly, remove the return value from the set. */
13967 {
13968 bool yes = false;
13969 diddle_return_value (is_altivec_return_reg, &yes);
13970 if (yes)
13971 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13972 }
13973
13974 return mask;
13975}
13976
d62294f5 13977/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13978 size of prologues/epilogues by calling our own save/restore-the-world
13979 routines. */
d62294f5
FJ
13980
13981static void
f57fe068
AM
13982compute_save_world_info (rs6000_stack_t *info_ptr)
13983{
13984 info_ptr->world_save_p = 1;
13985 info_ptr->world_save_p
13986 = (WORLD_SAVE_P (info_ptr)
13987 && DEFAULT_ABI == ABI_DARWIN
13988 && ! (current_function_calls_setjmp && flag_exceptions)
13989 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13990 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13991 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13992 && info_ptr->cr_save_p);
f676971a 13993
d62294f5
FJ
13994 /* This will not work in conjunction with sibcalls. Make sure there
13995 are none. (This check is expensive, but seldom executed.) */
f57fe068 13996 if (WORLD_SAVE_P (info_ptr))
f676971a 13997 {
d62294f5
FJ
13998 rtx insn;
13999 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14000 if ( GET_CODE (insn) == CALL_INSN
14001 && SIBLING_CALL_P (insn))
14002 {
14003 info_ptr->world_save_p = 0;
14004 break;
14005 }
d62294f5 14006 }
f676971a 14007
f57fe068 14008 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14009 {
14010 /* Even if we're not touching VRsave, make sure there's room on the
14011 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14012 will attempt to save it. */
d62294f5
FJ
14013 info_ptr->vrsave_size = 4;
14014
14015 /* "Save" the VRsave register too if we're saving the world. */
14016 if (info_ptr->vrsave_mask == 0)
c4ad648e 14017 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14018
14019 /* Because the Darwin register save/restore routines only handle
c4ad648e 14020 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14021 check. */
37409796
NS
14022 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14023 && (info_ptr->first_altivec_reg_save
14024 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14025 }
f676971a 14026 return;
d62294f5
FJ
14027}
14028
14029
00b960c7 14030static void
a2369ed3 14031is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14032{
14033 bool *yes = (bool *) xyes;
14034 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14035 *yes = true;
14036}
14037
4697a36c
MM
14038\f
14039/* Calculate the stack information for the current function. This is
14040 complicated by having two separate calling sequences, the AIX calling
14041 sequence and the V.4 calling sequence.
14042
592696dd 14043 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14044 32-bit 64-bit
4697a36c 14045 SP----> +---------------------------------------+
a260abc9 14046 | back chain to caller | 0 0
4697a36c 14047 +---------------------------------------+
a260abc9 14048 | saved CR | 4 8 (8-11)
4697a36c 14049 +---------------------------------------+
a260abc9 14050 | saved LR | 8 16
4697a36c 14051 +---------------------------------------+
a260abc9 14052 | reserved for compilers | 12 24
4697a36c 14053 +---------------------------------------+
a260abc9 14054 | reserved for binders | 16 32
4697a36c 14055 +---------------------------------------+
a260abc9 14056 | saved TOC pointer | 20 40
4697a36c 14057 +---------------------------------------+
a260abc9 14058 | Parameter save area (P) | 24 48
4697a36c 14059 +---------------------------------------+
a260abc9 14060 | Alloca space (A) | 24+P etc.
802a0058 14061 +---------------------------------------+
a7df97e6 14062 | Local variable space (L) | 24+P+A
4697a36c 14063 +---------------------------------------+
a7df97e6 14064 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14065 +---------------------------------------+
00b960c7
AH
14066 | Save area for AltiVec registers (W) | 24+P+A+L+X
14067 +---------------------------------------+
14068 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14069 +---------------------------------------+
14070 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14071 +---------------------------------------+
00b960c7
AH
14072 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14073 +---------------------------------------+
14074 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14075 +---------------------------------------+
14076 old SP->| back chain to caller's caller |
14077 +---------------------------------------+
14078
5376a30c
KR
14079 The required alignment for AIX configurations is two words (i.e., 8
14080 or 16 bytes).
14081
14082
4697a36c
MM
14083 V.4 stack frames look like:
14084
14085 SP----> +---------------------------------------+
14086 | back chain to caller | 0
14087 +---------------------------------------+
5eb387b8 14088 | caller's saved LR | 4
4697a36c
MM
14089 +---------------------------------------+
14090 | Parameter save area (P) | 8
14091 +---------------------------------------+
a7df97e6 14092 | Alloca space (A) | 8+P
f676971a 14093 +---------------------------------------+
a7df97e6 14094 | Varargs save area (V) | 8+P+A
f676971a 14095 +---------------------------------------+
a7df97e6 14096 | Local variable space (L) | 8+P+A+V
f676971a 14097 +---------------------------------------+
a7df97e6 14098 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14099 +---------------------------------------+
00b960c7
AH
14100 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14101 +---------------------------------------+
14102 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14103 +---------------------------------------+
14104 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14105 +---------------------------------------+
c4ad648e
AM
14106 | SPE: area for 64-bit GP registers |
14107 +---------------------------------------+
14108 | SPE alignment padding |
14109 +---------------------------------------+
00b960c7 14110 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14111 +---------------------------------------+
00b960c7 14112 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14113 +---------------------------------------+
00b960c7 14114 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14115 +---------------------------------------+
14116 old SP->| back chain to caller's caller |
14117 +---------------------------------------+
b6c9286a 14118
5376a30c
KR
14119 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14120 given. (But note below and in sysv4.h that we require only 8 and
14121 may round up the size of our stack frame anyways. The historical
14122 reason is early versions of powerpc-linux which didn't properly
14123 align the stack at program startup. A happy side-effect is that
14124 -mno-eabi libraries can be used with -meabi programs.)
14125
50d440bc 14126 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14127 the stack alignment requirements may differ. If -mno-eabi is not
14128 given, the required stack alignment is 8 bytes; if -mno-eabi is
14129 given, the required alignment is 16 bytes. (But see V.4 comment
14130 above.) */
4697a36c 14131
61b2fbe7
MM
14132#ifndef ABI_STACK_BOUNDARY
14133#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14134#endif
14135
d1d0c603 14136static rs6000_stack_t *
863d938c 14137rs6000_stack_info (void)
4697a36c 14138{
022123e6 14139 static rs6000_stack_t info;
4697a36c 14140 rs6000_stack_t *info_ptr = &info;
327e5343 14141 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14142 int ehrd_size;
64045029 14143 int save_align;
8070c91a 14144 int first_gp;
44688022 14145 HOST_WIDE_INT non_fixed_size;
4697a36c 14146
022123e6 14147 memset (&info, 0, sizeof (info));
4697a36c 14148
c19de7aa
AH
14149 if (TARGET_SPE)
14150 {
14151 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14152 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14153 cfun->machine->insn_chain_scanned_p
14154 = spe_func_has_64bit_regs_p () + 1;
14155 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14156 }
14157
a4f6c312 14158 /* Select which calling sequence. */
178274da 14159 info_ptr->abi = DEFAULT_ABI;
9878760c 14160
a4f6c312 14161 /* Calculate which registers need to be saved & save area size. */
4697a36c 14162 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14163 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14164 even if it currently looks like we won't. Reload may need it to
14165 get at a constant; if so, it will have already created a constant
14166 pool entry for it. */
2bfcf297 14167 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14168 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14169 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14170 && current_function_uses_const_pool
1db02437 14171 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14172 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14173 else
8070c91a
DJ
14174 first_gp = info_ptr->first_gp_reg_save;
14175
14176 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14177
a3170dc6
AH
14178 /* For the SPE, we have an additional upper 32-bits on each GPR.
14179 Ideally we should save the entire 64-bits only when the upper
14180 half is used in SIMD instructions. Since we only record
14181 registers live (not the size they are used in), this proves
14182 difficult because we'd have to traverse the instruction chain at
14183 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14184 so we opt to save the GPRs in 64-bits always if but one register
14185 gets used in 64-bits. Otherwise, all the registers in the frame
14186 get saved in 32-bits.
a3170dc6 14187
c19de7aa 14188 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14189 traditional GP save area will be empty. */
c19de7aa 14190 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14191 info_ptr->gp_size = 0;
14192
4697a36c
MM
14193 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14194 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14195
00b960c7
AH
14196 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14197 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14198 - info_ptr->first_altivec_reg_save);
14199
592696dd 14200 /* Does this function call anything? */
71f123ca
FS
14201 info_ptr->calls_p = (! current_function_is_leaf
14202 || cfun->machine->ra_needs_full_frame);
b6c9286a 14203
a4f6c312 14204 /* Determine if we need to save the link register. */
022123e6
AM
14205 if ((DEFAULT_ABI == ABI_AIX
14206 && current_function_profile
14207 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14208#ifdef TARGET_RELOCATABLE
14209 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14210#endif
14211 || (info_ptr->first_fp_reg_save != 64
14212 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 14213 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 14214 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14215 || info_ptr->calls_p
14216 || rs6000_ra_ever_killed ())
4697a36c
MM
14217 {
14218 info_ptr->lr_save_p = 1;
1de43f85 14219 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14220 }
14221
9ebbca7d 14222 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14223 if (df_regs_ever_live_p (CR2_REGNO)
14224 || df_regs_ever_live_p (CR3_REGNO)
14225 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14226 {
14227 info_ptr->cr_save_p = 1;
178274da 14228 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14229 info_ptr->cr_size = reg_size;
14230 }
14231
83720594
RH
14232 /* If the current function calls __builtin_eh_return, then we need
14233 to allocate stack space for registers that will hold data for
14234 the exception handler. */
14235 if (current_function_calls_eh_return)
14236 {
14237 unsigned int i;
14238 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14239 continue;
a3170dc6
AH
14240
14241 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14242 ehrd_size = i * (TARGET_SPE_ABI
14243 && info_ptr->spe_64bit_regs_used != 0
14244 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14245 }
14246 else
14247 ehrd_size = 0;
14248
592696dd 14249 /* Determine various sizes. */
4697a36c
MM
14250 info_ptr->reg_size = reg_size;
14251 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14252 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14253 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14254 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14255 if (FRAME_GROWS_DOWNWARD)
14256 info_ptr->vars_size
5b667039
JJ
14257 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14258 + info_ptr->parm_size,
7d5175e1 14259 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14260 - (info_ptr->fixed_size + info_ptr->vars_size
14261 + info_ptr->parm_size);
00b960c7 14262
c19de7aa 14263 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14264 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14265 else
14266 info_ptr->spe_gp_size = 0;
14267
4d774ff8
HP
14268 if (TARGET_ALTIVEC_ABI)
14269 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14270 else
4d774ff8
HP
14271 info_ptr->vrsave_mask = 0;
14272
14273 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14274 info_ptr->vrsave_size = 4;
14275 else
14276 info_ptr->vrsave_size = 0;
b6c9286a 14277
d62294f5
FJ
14278 compute_save_world_info (info_ptr);
14279
592696dd 14280 /* Calculate the offsets. */
178274da 14281 switch (DEFAULT_ABI)
4697a36c 14282 {
b6c9286a 14283 case ABI_NONE:
24d304eb 14284 default:
37409796 14285 gcc_unreachable ();
b6c9286a
MM
14286
14287 case ABI_AIX:
ee890fe2 14288 case ABI_DARWIN:
b6c9286a
MM
14289 info_ptr->fp_save_offset = - info_ptr->fp_size;
14290 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14291
14292 if (TARGET_ALTIVEC_ABI)
14293 {
14294 info_ptr->vrsave_save_offset
14295 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14296
982afe02 14297 /* Align stack so vector save area is on a quadword boundary.
9278121c 14298 The padding goes above the vectors. */
00b960c7
AH
14299 if (info_ptr->altivec_size != 0)
14300 info_ptr->altivec_padding_size
9278121c 14301 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14302 else
14303 info_ptr->altivec_padding_size = 0;
14304
14305 info_ptr->altivec_save_offset
14306 = info_ptr->vrsave_save_offset
14307 - info_ptr->altivec_padding_size
14308 - info_ptr->altivec_size;
9278121c
GK
14309 gcc_assert (info_ptr->altivec_size == 0
14310 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14311
14312 /* Adjust for AltiVec case. */
14313 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14314 }
14315 else
14316 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14317 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14318 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14319 break;
14320
14321 case ABI_V4:
b6c9286a
MM
14322 info_ptr->fp_save_offset = - info_ptr->fp_size;
14323 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14324 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14325
c19de7aa 14326 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14327 {
14328 /* Align stack so SPE GPR save area is aligned on a
14329 double-word boundary. */
14330 if (info_ptr->spe_gp_size != 0)
14331 info_ptr->spe_padding_size
14332 = 8 - (-info_ptr->cr_save_offset % 8);
14333 else
14334 info_ptr->spe_padding_size = 0;
14335
14336 info_ptr->spe_gp_save_offset
14337 = info_ptr->cr_save_offset
14338 - info_ptr->spe_padding_size
14339 - info_ptr->spe_gp_size;
14340
14341 /* Adjust for SPE case. */
022123e6 14342 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14343 }
a3170dc6 14344 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14345 {
14346 info_ptr->vrsave_save_offset
14347 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14348
14349 /* Align stack so vector save area is on a quadword boundary. */
14350 if (info_ptr->altivec_size != 0)
14351 info_ptr->altivec_padding_size
14352 = 16 - (-info_ptr->vrsave_save_offset % 16);
14353 else
14354 info_ptr->altivec_padding_size = 0;
14355
14356 info_ptr->altivec_save_offset
14357 = info_ptr->vrsave_save_offset
14358 - info_ptr->altivec_padding_size
14359 - info_ptr->altivec_size;
14360
14361 /* Adjust for AltiVec case. */
022123e6 14362 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14363 }
14364 else
022123e6
AM
14365 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14366 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14367 info_ptr->lr_save_offset = reg_size;
14368 break;
4697a36c
MM
14369 }
14370
64045029 14371 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14372 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14373 + info_ptr->gp_size
14374 + info_ptr->altivec_size
14375 + info_ptr->altivec_padding_size
a3170dc6
AH
14376 + info_ptr->spe_gp_size
14377 + info_ptr->spe_padding_size
00b960c7
AH
14378 + ehrd_size
14379 + info_ptr->cr_size
022123e6 14380 + info_ptr->vrsave_size,
64045029 14381 save_align);
00b960c7 14382
44688022 14383 non_fixed_size = (info_ptr->vars_size
ff381587 14384 + info_ptr->parm_size
5b667039 14385 + info_ptr->save_size);
ff381587 14386
44688022
AM
14387 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14388 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14389
14390 /* Determine if we need to allocate any stack frame:
14391
a4f6c312
SS
14392 For AIX we need to push the stack if a frame pointer is needed
14393 (because the stack might be dynamically adjusted), if we are
14394 debugging, if we make calls, or if the sum of fp_save, gp_save,
14395 and local variables are more than the space needed to save all
14396 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14397 + 18*8 = 288 (GPR13 reserved).
ff381587 14398
a4f6c312
SS
14399 For V.4 we don't have the stack cushion that AIX uses, but assume
14400 that the debugger can handle stackless frames. */
ff381587
MM
14401
14402 if (info_ptr->calls_p)
14403 info_ptr->push_p = 1;
14404
178274da 14405 else if (DEFAULT_ABI == ABI_V4)
44688022 14406 info_ptr->push_p = non_fixed_size != 0;
ff381587 14407
178274da
AM
14408 else if (frame_pointer_needed)
14409 info_ptr->push_p = 1;
14410
14411 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14412 info_ptr->push_p = 1;
14413
ff381587 14414 else
44688022 14415 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14416
a4f6c312 14417 /* Zero offsets if we're not saving those registers. */
8dda1a21 14418 if (info_ptr->fp_size == 0)
4697a36c
MM
14419 info_ptr->fp_save_offset = 0;
14420
8dda1a21 14421 if (info_ptr->gp_size == 0)
4697a36c
MM
14422 info_ptr->gp_save_offset = 0;
14423
00b960c7
AH
14424 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14425 info_ptr->altivec_save_offset = 0;
14426
14427 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14428 info_ptr->vrsave_save_offset = 0;
14429
c19de7aa
AH
14430 if (! TARGET_SPE_ABI
14431 || info_ptr->spe_64bit_regs_used == 0
14432 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14433 info_ptr->spe_gp_save_offset = 0;
14434
c81fc13e 14435 if (! info_ptr->lr_save_p)
4697a36c
MM
14436 info_ptr->lr_save_offset = 0;
14437
c81fc13e 14438 if (! info_ptr->cr_save_p)
4697a36c
MM
14439 info_ptr->cr_save_offset = 0;
14440
14441 return info_ptr;
14442}
14443
c19de7aa
AH
14444/* Return true if the current function uses any GPRs in 64-bit SIMD
14445 mode. */
14446
14447static bool
863d938c 14448spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14449{
14450 rtx insns, insn;
14451
14452 /* Functions that save and restore all the call-saved registers will
14453 need to save/restore the registers in 64-bits. */
14454 if (current_function_calls_eh_return
14455 || current_function_calls_setjmp
14456 || current_function_has_nonlocal_goto)
14457 return true;
14458
14459 insns = get_insns ();
14460
14461 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14462 {
14463 if (INSN_P (insn))
14464 {
14465 rtx i;
14466
b5a5beb9
AH
14467 /* FIXME: This should be implemented with attributes...
14468
14469 (set_attr "spe64" "true")....then,
14470 if (get_spe64(insn)) return true;
14471
14472 It's the only reliable way to do the stuff below. */
14473
c19de7aa 14474 i = PATTERN (insn);
f82f556d
AH
14475 if (GET_CODE (i) == SET)
14476 {
14477 enum machine_mode mode = GET_MODE (SET_SRC (i));
14478
14479 if (SPE_VECTOR_MODE (mode))
14480 return true;
4d4447b5
PB
14481 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
14482 || mode == DDmode || mode == TDmode))
f82f556d
AH
14483 return true;
14484 }
c19de7aa
AH
14485 }
14486 }
14487
14488 return false;
14489}
14490
d1d0c603 14491static void
a2369ed3 14492debug_stack_info (rs6000_stack_t *info)
9878760c 14493{
d330fd93 14494 const char *abi_string;
24d304eb 14495
c81fc13e 14496 if (! info)
4697a36c
MM
14497 info = rs6000_stack_info ();
14498
14499 fprintf (stderr, "\nStack information for function %s:\n",
14500 ((current_function_decl && DECL_NAME (current_function_decl))
14501 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14502 : "<unknown>"));
14503
24d304eb
RK
14504 switch (info->abi)
14505 {
b6c9286a
MM
14506 default: abi_string = "Unknown"; break;
14507 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14508 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14509 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14510 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14511 }
14512
14513 fprintf (stderr, "\tABI = %5s\n", abi_string);
14514
00b960c7
AH
14515 if (TARGET_ALTIVEC_ABI)
14516 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14517
a3170dc6
AH
14518 if (TARGET_SPE_ABI)
14519 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14520
4697a36c
MM
14521 if (info->first_gp_reg_save != 32)
14522 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14523
14524 if (info->first_fp_reg_save != 64)
14525 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14526
00b960c7
AH
14527 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14528 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14529 info->first_altivec_reg_save);
14530
4697a36c
MM
14531 if (info->lr_save_p)
14532 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14533
4697a36c
MM
14534 if (info->cr_save_p)
14535 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14536
00b960c7
AH
14537 if (info->vrsave_mask)
14538 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14539
4697a36c
MM
14540 if (info->push_p)
14541 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14542
14543 if (info->calls_p)
14544 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14545
4697a36c
MM
14546 if (info->gp_save_offset)
14547 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14548
14549 if (info->fp_save_offset)
14550 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14551
00b960c7
AH
14552 if (info->altivec_save_offset)
14553 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14554 info->altivec_save_offset);
14555
a3170dc6
AH
14556 if (info->spe_gp_save_offset)
14557 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14558 info->spe_gp_save_offset);
14559
00b960c7
AH
14560 if (info->vrsave_save_offset)
14561 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14562 info->vrsave_save_offset);
14563
4697a36c
MM
14564 if (info->lr_save_offset)
14565 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14566
14567 if (info->cr_save_offset)
14568 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14569
14570 if (info->varargs_save_offset)
14571 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14572
14573 if (info->total_size)
d1d0c603
JJ
14574 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14575 info->total_size);
4697a36c 14576
4697a36c 14577 if (info->vars_size)
d1d0c603
JJ
14578 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14579 info->vars_size);
4697a36c
MM
14580
14581 if (info->parm_size)
14582 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14583
14584 if (info->fixed_size)
14585 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14586
14587 if (info->gp_size)
14588 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14589
a3170dc6
AH
14590 if (info->spe_gp_size)
14591 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14592
4697a36c
MM
14593 if (info->fp_size)
14594 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14595
00b960c7
AH
14596 if (info->altivec_size)
14597 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14598
14599 if (info->vrsave_size)
14600 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14601
14602 if (info->altivec_padding_size)
14603 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14604 info->altivec_padding_size);
14605
a3170dc6
AH
14606 if (info->spe_padding_size)
14607 fprintf (stderr, "\tspe_padding_size = %5d\n",
14608 info->spe_padding_size);
14609
4697a36c
MM
14610 if (info->cr_size)
14611 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14612
14613 if (info->save_size)
14614 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14615
14616 if (info->reg_size != 4)
14617 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14618
14619 fprintf (stderr, "\n");
9878760c 14620}
71f123ca
FS
14621
14622rtx
a2369ed3 14623rs6000_return_addr (int count, rtx frame)
71f123ca 14624{
a4f6c312
SS
14625 /* Currently we don't optimize very well between prolog and body
14626 code and for PIC code the code can be actually quite bad, so
14627 don't try to be too clever here. */
f1384257 14628 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14629 {
14630 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14631
14632 return
14633 gen_rtx_MEM
14634 (Pmode,
14635 memory_address
14636 (Pmode,
14637 plus_constant (copy_to_reg
14638 (gen_rtx_MEM (Pmode,
14639 memory_address (Pmode, frame))),
14640 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14641 }
14642
8c29550d 14643 cfun->machine->ra_need_lr = 1;
1de43f85 14644 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14645}
14646
5e1bf043
DJ
14647/* Say whether a function is a candidate for sibcall handling or not.
14648 We do not allow indirect calls to be optimized into sibling calls.
14649 Also, we can't do it if there are any vector parameters; there's
14650 nowhere to put the VRsave code so it works; note that functions with
14651 vector parameters are required to have a prototype, so the argument
14652 type info must be available here. (The tail recursion case can work
14653 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14654static bool
a2369ed3 14655rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14656{
14657 tree type;
4977bab6 14658 if (decl)
5e1bf043
DJ
14659 {
14660 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14661 {
4977bab6 14662 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14663 type; type = TREE_CHAIN (type))
14664 {
c15b529f 14665 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14666 return false;
5e1bf043 14667 }
c4ad648e 14668 }
5e1bf043 14669 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14670 || ((*targetm.binds_local_p) (decl)
14671 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14672 {
4977bab6 14673 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14674
14675 if (!lookup_attribute ("longcall", attr_list)
14676 || lookup_attribute ("shortcall", attr_list))
4977bab6 14677 return true;
2bcc50d0 14678 }
5e1bf043 14679 }
4977bab6 14680 return false;
5e1bf043
DJ
14681}
14682
e7e64a25
AS
14683/* NULL if INSN insn is valid within a low-overhead loop.
14684 Otherwise return why doloop cannot be applied.
9419649c
DE
14685 PowerPC uses the COUNT register for branch on table instructions. */
14686
e7e64a25 14687static const char *
3101faab 14688rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14689{
14690 if (CALL_P (insn))
e7e64a25 14691 return "Function call in the loop.";
9419649c
DE
14692
14693 if (JUMP_P (insn)
14694 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14695 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14696 return "Computed branch in the loop.";
9419649c 14697
e7e64a25 14698 return NULL;
9419649c
DE
14699}
14700
71f123ca 14701static int
863d938c 14702rs6000_ra_ever_killed (void)
71f123ca
FS
14703{
14704 rtx top;
5e1bf043
DJ
14705 rtx reg;
14706 rtx insn;
71f123ca 14707
dd292d0a 14708 if (current_function_is_thunk)
71f123ca 14709 return 0;
eb0424da 14710
36f7e964
AH
14711 /* regs_ever_live has LR marked as used if any sibcalls are present,
14712 but this should not force saving and restoring in the
14713 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14714 clobbers LR, so that is inappropriate. */
36f7e964 14715
5e1bf043
DJ
14716 /* Also, the prologue can generate a store into LR that
14717 doesn't really count, like this:
36f7e964 14718
5e1bf043
DJ
14719 move LR->R0
14720 bcl to set PIC register
14721 move LR->R31
14722 move R0->LR
36f7e964
AH
14723
14724 When we're called from the epilogue, we need to avoid counting
14725 this as a store. */
f676971a 14726
71f123ca
FS
14727 push_topmost_sequence ();
14728 top = get_insns ();
14729 pop_topmost_sequence ();
1de43f85 14730 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14731
5e1bf043
DJ
14732 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14733 {
14734 if (INSN_P (insn))
14735 {
022123e6
AM
14736 if (CALL_P (insn))
14737 {
14738 if (!SIBLING_CALL_P (insn))
14739 return 1;
14740 }
1de43f85 14741 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14742 return 1;
36f7e964
AH
14743 else if (set_of (reg, insn) != NULL_RTX
14744 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14745 return 1;
14746 }
14747 }
14748 return 0;
71f123ca 14749}
4697a36c 14750\f
9ebbca7d 14751/* Emit instructions needed to load the TOC register.
c7ca610e 14752 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14753 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14754
14755void
a2369ed3 14756rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14757{
6fb5fa3c 14758 rtx dest;
1db02437 14759 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14760
7f970b70 14761 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14762 {
7f970b70 14763 char buf[30];
e65a3857 14764 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14765
14766 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14767 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14768 if (flag_pic == 2)
14769 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14770 else
14771 got = rs6000_got_sym ();
14772 tmp1 = tmp2 = dest;
14773 if (!fromprolog)
14774 {
14775 tmp1 = gen_reg_rtx (Pmode);
14776 tmp2 = gen_reg_rtx (Pmode);
14777 }
6fb5fa3c
DB
14778 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14779 emit_move_insn (tmp1,
1de43f85 14780 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14781 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14782 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14783 }
14784 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14785 {
6fb5fa3c 14786 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14787 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14788 }
14789 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14790 {
14791 char buf[30];
20b71b17
AM
14792 rtx temp0 = (fromprolog
14793 ? gen_rtx_REG (Pmode, 0)
14794 : gen_reg_rtx (Pmode));
20b71b17 14795
20b71b17
AM
14796 if (fromprolog)
14797 {
ccbca5e4 14798 rtx symF, symL;
38c1f2d7 14799
20b71b17
AM
14800 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14801 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14802
20b71b17
AM
14803 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14804 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14805
6fb5fa3c
DB
14806 emit_insn (gen_load_toc_v4_PIC_1 (symF));
14807 emit_move_insn (dest,
1de43f85 14808 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 14809 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
14810 }
14811 else
20b71b17
AM
14812 {
14813 rtx tocsym;
20b71b17
AM
14814
14815 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14816 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14817 emit_move_insn (dest,
1de43f85 14818 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 14819 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14820 }
6fb5fa3c 14821 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 14822 }
20b71b17
AM
14823 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14824 {
14825 /* This is for AIX code running in non-PIC ELF32. */
14826 char buf[30];
14827 rtx realsym;
14828 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14829 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14830
6fb5fa3c
DB
14831 emit_insn (gen_elf_high (dest, realsym));
14832 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 14833 }
37409796 14834 else
9ebbca7d 14835 {
37409796 14836 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14837
9ebbca7d 14838 if (TARGET_32BIT)
6fb5fa3c 14839 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14840 else
6fb5fa3c 14841 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
14842 }
14843}
14844
d1d0c603
JJ
14845/* Emit instructions to restore the link register after determining where
14846 its value has been stored. */
14847
14848void
14849rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14850{
14851 rs6000_stack_t *info = rs6000_stack_info ();
14852 rtx operands[2];
14853
14854 operands[0] = source;
14855 operands[1] = scratch;
14856
14857 if (info->lr_save_p)
14858 {
14859 rtx frame_rtx = stack_pointer_rtx;
14860 HOST_WIDE_INT sp_offset = 0;
14861 rtx tmp;
14862
14863 if (frame_pointer_needed
14864 || current_function_calls_alloca
14865 || info->total_size > 32767)
14866 {
0be76840 14867 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14868 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14869 frame_rtx = operands[1];
14870 }
14871 else if (info->push_p)
14872 sp_offset = info->total_size;
14873
14874 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14875 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14876 emit_move_insn (tmp, operands[0]);
14877 }
14878 else
1de43f85 14879 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
14880}
14881
4862826d 14882static GTY(()) alias_set_type set = -1;
f103e34d 14883
4862826d 14884alias_set_type
863d938c 14885get_TOC_alias_set (void)
9ebbca7d 14886{
f103e34d
GK
14887 if (set == -1)
14888 set = new_alias_set ();
14889 return set;
f676971a 14890}
9ebbca7d 14891
c1207243 14892/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14893 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14894 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14895#if TARGET_ELF
3c9eb5f4 14896static int
f676971a 14897uses_TOC (void)
9ebbca7d 14898{
c4501e62 14899 rtx insn;
38c1f2d7 14900
c4501e62
JJ
14901 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14902 if (INSN_P (insn))
14903 {
14904 rtx pat = PATTERN (insn);
14905 int i;
9ebbca7d 14906
f676971a 14907 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14908 for (i = 0; i < XVECLEN (pat, 0); i++)
14909 {
14910 rtx sub = XVECEXP (pat, 0, i);
14911 if (GET_CODE (sub) == USE)
14912 {
14913 sub = XEXP (sub, 0);
14914 if (GET_CODE (sub) == UNSPEC
14915 && XINT (sub, 1) == UNSPEC_TOC)
14916 return 1;
14917 }
14918 }
14919 }
14920 return 0;
9ebbca7d 14921}
c954844a 14922#endif
38c1f2d7 14923
9ebbca7d 14924rtx
f676971a 14925create_TOC_reference (rtx symbol)
9ebbca7d 14926{
b3a13419 14927 if (!can_create_pseudo_p ())
6fb5fa3c 14928 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 14929 return gen_rtx_PLUS (Pmode,
a8a05998 14930 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14931 gen_rtx_CONST (Pmode,
14932 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14933 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14934}
38c1f2d7 14935
fc4767bb
JJ
14936/* If _Unwind_* has been called from within the same module,
14937 toc register is not guaranteed to be saved to 40(1) on function
14938 entry. Save it there in that case. */
c7ca610e 14939
9ebbca7d 14940void
863d938c 14941rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14942{
14943 rtx mem;
14944 rtx stack_top = gen_reg_rtx (Pmode);
14945 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14946 rtx opcode = gen_reg_rtx (SImode);
14947 rtx tocompare = gen_reg_rtx (SImode);
14948 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14949
8308679f 14950 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14951 emit_move_insn (stack_top, mem);
14952
8308679f
DE
14953 mem = gen_frame_mem (Pmode,
14954 gen_rtx_PLUS (Pmode, stack_top,
14955 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14956 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14957 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14958 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14959 : 0xE8410028, SImode));
9ebbca7d 14960
fc4767bb 14961 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14962 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14963 no_toc_save_needed);
9ebbca7d 14964
8308679f
DE
14965 mem = gen_frame_mem (Pmode,
14966 gen_rtx_PLUS (Pmode, stack_top,
14967 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14968 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14969 emit_label (no_toc_save_needed);
9ebbca7d 14970}
38c1f2d7 14971\f
0be76840
DE
14972/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14973 and the change to the stack pointer. */
ba4828e0 14974
9ebbca7d 14975static void
863d938c 14976rs6000_emit_stack_tie (void)
9ebbca7d 14977{
0be76840
DE
14978 rtx mem = gen_frame_mem (BLKmode,
14979 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14980
9ebbca7d
GK
14981 emit_insn (gen_stack_tie (mem));
14982}
38c1f2d7 14983
9ebbca7d
GK
14984/* Emit the correct code for allocating stack space, as insns.
14985 If COPY_R12, make sure a copy of the old frame is left in r12.
14986 The generated code may use hard register 0 as a temporary. */
14987
14988static void
a2369ed3 14989rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14990{
9ebbca7d
GK
14991 rtx insn;
14992 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14993 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14994 rtx todec = gen_int_mode (-size, Pmode);
14995
14996 if (INTVAL (todec) != -size)
14997 {
d4ee4d25 14998 warning (0, "stack frame too large");
61168ff1
RS
14999 emit_insn (gen_trap ());
15000 return;
15001 }
a157febd
GK
15002
15003 if (current_function_limit_stack)
15004 {
15005 if (REG_P (stack_limit_rtx)
f676971a 15006 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15007 && REGNO (stack_limit_rtx) <= 31)
15008 {
5b71a4e7 15009 emit_insn (TARGET_32BIT
9ebbca7d
GK
15010 ? gen_addsi3 (tmp_reg,
15011 stack_limit_rtx,
15012 GEN_INT (size))
15013 : gen_adddi3 (tmp_reg,
15014 stack_limit_rtx,
15015 GEN_INT (size)));
5b71a4e7 15016
9ebbca7d
GK
15017 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15018 const0_rtx));
a157febd
GK
15019 }
15020 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15021 && TARGET_32BIT
f607bc57 15022 && DEFAULT_ABI == ABI_V4)
a157febd 15023 {
9ebbca7d 15024 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15025 gen_rtx_PLUS (Pmode,
15026 stack_limit_rtx,
9ebbca7d 15027 GEN_INT (size)));
5b71a4e7 15028
9ebbca7d
GK
15029 emit_insn (gen_elf_high (tmp_reg, toload));
15030 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15031 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15032 const0_rtx));
a157febd
GK
15033 }
15034 else
d4ee4d25 15035 warning (0, "stack limit expression is not supported");
a157febd
GK
15036 }
15037
9ebbca7d
GK
15038 if (copy_r12 || ! TARGET_UPDATE)
15039 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
15040
38c1f2d7
MM
15041 if (TARGET_UPDATE)
15042 {
9ebbca7d 15043 if (size > 32767)
38c1f2d7 15044 {
9ebbca7d 15045 /* Need a note here so that try_split doesn't get confused. */
9390387d 15046 if (get_last_insn () == NULL_RTX)
2e040219 15047 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15048 insn = emit_move_insn (tmp_reg, todec);
15049 try_split (PATTERN (insn), insn, 0);
15050 todec = tmp_reg;
38c1f2d7 15051 }
5b71a4e7
DE
15052
15053 insn = emit_insn (TARGET_32BIT
15054 ? gen_movsi_update (stack_reg, stack_reg,
15055 todec, stack_reg)
c4ad648e 15056 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15057 todec, stack_reg));
38c1f2d7
MM
15058 }
15059 else
15060 {
5b71a4e7
DE
15061 insn = emit_insn (TARGET_32BIT
15062 ? gen_addsi3 (stack_reg, stack_reg, todec)
15063 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
15064 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
15065 gen_rtx_REG (Pmode, 12));
15066 }
f676971a 15067
9ebbca7d 15068 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15069 REG_NOTES (insn) =
9ebbca7d 15070 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15071 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15072 gen_rtx_PLUS (Pmode, stack_reg,
15073 GEN_INT (-size))),
15074 REG_NOTES (insn));
15075}
15076
a4f6c312
SS
15077/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15078 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15079 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15080 deduce these equivalences by itself so it wasn't necessary to hold
15081 its hand so much. */
9ebbca7d
GK
15082
15083static void
f676971a 15084rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15085 rtx reg2, rtx rreg)
9ebbca7d
GK
15086{
15087 rtx real, temp;
15088
e56c4463
JL
15089 /* copy_rtx will not make unique copies of registers, so we need to
15090 ensure we don't have unwanted sharing here. */
15091 if (reg == reg2)
15092 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15093
15094 if (reg == rreg)
15095 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15096
9ebbca7d
GK
15097 real = copy_rtx (PATTERN (insn));
15098
89e7058f
AH
15099 if (reg2 != NULL_RTX)
15100 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15101
15102 real = replace_rtx (real, reg,
9ebbca7d
GK
15103 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15104 STACK_POINTER_REGNUM),
15105 GEN_INT (val)));
f676971a 15106
9ebbca7d
GK
15107 /* We expect that 'real' is either a SET or a PARALLEL containing
15108 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15109 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15110
15111 if (GET_CODE (real) == SET)
15112 {
15113 rtx set = real;
f676971a 15114
9ebbca7d
GK
15115 temp = simplify_rtx (SET_SRC (set));
15116 if (temp)
15117 SET_SRC (set) = temp;
15118 temp = simplify_rtx (SET_DEST (set));
15119 if (temp)
15120 SET_DEST (set) = temp;
15121 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15122 {
9ebbca7d
GK
15123 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15124 if (temp)
15125 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15126 }
38c1f2d7 15127 }
37409796 15128 else
9ebbca7d
GK
15129 {
15130 int i;
37409796
NS
15131
15132 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15133 for (i = 0; i < XVECLEN (real, 0); i++)
15134 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15135 {
15136 rtx set = XVECEXP (real, 0, i);
f676971a 15137
9ebbca7d
GK
15138 temp = simplify_rtx (SET_SRC (set));
15139 if (temp)
15140 SET_SRC (set) = temp;
15141 temp = simplify_rtx (SET_DEST (set));
15142 if (temp)
15143 SET_DEST (set) = temp;
15144 if (GET_CODE (SET_DEST (set)) == MEM)
15145 {
15146 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15147 if (temp)
15148 XEXP (SET_DEST (set), 0) = temp;
15149 }
15150 RTX_FRAME_RELATED_P (set) = 1;
15151 }
15152 }
c19de7aa
AH
15153
15154 if (TARGET_SPE)
15155 real = spe_synthesize_frame_save (real);
15156
9ebbca7d
GK
15157 RTX_FRAME_RELATED_P (insn) = 1;
15158 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15159 real,
15160 REG_NOTES (insn));
38c1f2d7
MM
15161}
15162
c19de7aa
AH
15163/* Given an SPE frame note, return a PARALLEL of SETs with the
15164 original note, plus a synthetic register save. */
15165
15166static rtx
a2369ed3 15167spe_synthesize_frame_save (rtx real)
c19de7aa
AH
15168{
15169 rtx synth, offset, reg, real2;
15170
15171 if (GET_CODE (real) != SET
15172 || GET_MODE (SET_SRC (real)) != V2SImode)
15173 return real;
15174
15175 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
15176 frame related note. The parallel contains a set of the register
41f3a930 15177 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
15178 This is so we can differentiate between 64-bit and 32-bit saves.
15179 Words cannot describe this nastiness. */
15180
37409796
NS
15181 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
15182 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
15183 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
15184
15185 /* Transform:
15186 (set (mem (plus (reg x) (const y)))
15187 (reg z))
15188 into:
15189 (set (mem (plus (reg x) (const y+4)))
41f3a930 15190 (reg z+1200))
c19de7aa
AH
15191 */
15192
15193 real2 = copy_rtx (real);
15194 PUT_MODE (SET_DEST (real2), SImode);
15195 reg = SET_SRC (real2);
15196 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
15197 synth = copy_rtx (real2);
15198
15199 if (BYTES_BIG_ENDIAN)
15200 {
15201 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
15202 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
15203 }
15204
15205 reg = SET_SRC (synth);
41f3a930 15206
c19de7aa 15207 synth = replace_rtx (synth, reg,
41f3a930 15208 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
15209
15210 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
15211 synth = replace_rtx (synth, offset,
15212 GEN_INT (INTVAL (offset)
15213 + (BYTES_BIG_ENDIAN ? 0 : 4)));
15214
15215 RTX_FRAME_RELATED_P (synth) = 1;
15216 RTX_FRAME_RELATED_P (real2) = 1;
15217 if (BYTES_BIG_ENDIAN)
15218 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
15219 else
15220 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
15221
15222 return real;
15223}
15224
00b960c7
AH
15225/* Returns an insn that has a vrsave set operation with the
15226 appropriate CLOBBERs. */
15227
15228static rtx
a2369ed3 15229generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15230{
15231 int nclobs, i;
15232 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15233 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15234
a004eb82
AH
15235 clobs[0]
15236 = gen_rtx_SET (VOIDmode,
15237 vrsave,
15238 gen_rtx_UNSPEC_VOLATILE (SImode,
15239 gen_rtvec (2, reg, vrsave),
3aca4bff 15240 UNSPECV_SET_VRSAVE));
00b960c7
AH
15241
15242 nclobs = 1;
15243
9aa86737
AH
15244 /* We need to clobber the registers in the mask so the scheduler
15245 does not move sets to VRSAVE before sets of AltiVec registers.
15246
15247 However, if the function receives nonlocal gotos, reload will set
15248 all call saved registers live. We will end up with:
15249
15250 (set (reg 999) (mem))
15251 (parallel [ (set (reg vrsave) (unspec blah))
15252 (clobber (reg 999))])
15253
15254 The clobber will cause the store into reg 999 to be dead, and
15255 flow will attempt to delete an epilogue insn. In this case, we
15256 need an unspec use/set of the register. */
00b960c7
AH
15257
15258 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15259 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15260 {
15261 if (!epiloguep || call_used_regs [i])
15262 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15263 gen_rtx_REG (V4SImode, i));
15264 else
15265 {
15266 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15267
15268 clobs[nclobs++]
a004eb82
AH
15269 = gen_rtx_SET (VOIDmode,
15270 reg,
15271 gen_rtx_UNSPEC (V4SImode,
15272 gen_rtvec (1, reg), 27));
9aa86737
AH
15273 }
15274 }
00b960c7
AH
15275
15276 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15277
15278 for (i = 0; i < nclobs; ++i)
15279 XVECEXP (insn, 0, i) = clobs[i];
15280
15281 return insn;
15282}
15283
89e7058f
AH
15284/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15285 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15286
15287static void
f676971a 15288emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15289 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15290{
15291 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15292 rtx replacea, replaceb;
15293
15294 int_rtx = GEN_INT (offset);
15295
15296 /* Some cases that need register indexed addressing. */
15297 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4447b5 15298 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode))
a3170dc6
AH
15299 || (TARGET_SPE_ABI
15300 && SPE_VECTOR_MODE (mode)
15301 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15302 {
15303 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15304 flow path of instructions in the prologue. */
89e7058f
AH
15305 offset_rtx = gen_rtx_REG (Pmode, 11);
15306 emit_move_insn (offset_rtx, int_rtx);
15307
15308 replacea = offset_rtx;
15309 replaceb = int_rtx;
15310 }
15311 else
15312 {
15313 offset_rtx = int_rtx;
15314 replacea = NULL_RTX;
15315 replaceb = NULL_RTX;
15316 }
15317
15318 reg = gen_rtx_REG (mode, regno);
15319 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15320 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15321
15322 insn = emit_move_insn (mem, reg);
15323
15324 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15325}
15326
a3170dc6
AH
15327/* Emit an offset memory reference suitable for a frame store, while
15328 converting to a valid addressing mode. */
15329
15330static rtx
a2369ed3 15331gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15332{
15333 rtx int_rtx, offset_rtx;
15334
15335 int_rtx = GEN_INT (offset);
15336
4d4cbc0e 15337 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4d4447b5 15338 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode)))
a3170dc6
AH
15339 {
15340 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15341 emit_move_insn (offset_rtx, int_rtx);
15342 }
15343 else
15344 offset_rtx = int_rtx;
15345
0be76840 15346 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15347}
15348
6d0a8091
DJ
15349/* Look for user-defined global regs. We should not save and restore these,
15350 and cannot use stmw/lmw if there are any in its range. */
15351
15352static bool
15353no_global_regs_above (int first_greg)
15354{
15355 int i;
15356 for (i = 0; i < 32 - first_greg; i++)
15357 if (global_regs[first_greg + i])
15358 return false;
15359 return true;
15360}
15361
699c914a
MS
15362#ifndef TARGET_FIX_AND_CONTINUE
15363#define TARGET_FIX_AND_CONTINUE 0
15364#endif
15365
52ff33d0
NF
15366/* Determine whether the gp REG is really used. */
15367
15368static bool
15369rs6000_reg_live_or_pic_offset_p (int reg)
15370{
6fb5fa3c 15371 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15372 && (!call_used_regs[reg]
15373 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15374 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15375 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15376 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15377 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15378}
15379
9ebbca7d
GK
15380/* Emit function prologue as insns. */
15381
9878760c 15382void
863d938c 15383rs6000_emit_prologue (void)
9878760c 15384{
4697a36c 15385 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15386 enum machine_mode reg_mode = Pmode;
327e5343 15387 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15388 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15389 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15390 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15391 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15392 rtx insn;
15393 int saving_FPRs_inline;
15394 int using_store_multiple;
15395 HOST_WIDE_INT sp_offset = 0;
f676971a 15396
699c914a
MS
15397 if (TARGET_FIX_AND_CONTINUE)
15398 {
15399 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15400 address by modifying the first 5 instructions of the function
699c914a
MS
15401 to branch to the overriding function. This is necessary to
15402 permit function pointers that point to the old function to
15403 actually forward to the new function. */
15404 emit_insn (gen_nop ());
15405 emit_insn (gen_nop ());
de2ab0ca 15406 emit_insn (gen_nop ());
699c914a
MS
15407 emit_insn (gen_nop ());
15408 emit_insn (gen_nop ());
15409 }
15410
15411 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15412 {
15413 reg_mode = V2SImode;
15414 reg_size = 8;
15415 }
a3170dc6 15416
9ebbca7d 15417 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15418 && (!TARGET_SPE_ABI
15419 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15420 && info->first_gp_reg_save < 31
15421 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15422 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15423 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15424 || current_function_calls_eh_return
8c29550d 15425 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15426
15427 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15428 if (! WORLD_SAVE_P (info)
15429 && info->push_p
acd0b319
AM
15430 && (DEFAULT_ABI == ABI_V4
15431 || current_function_calls_eh_return))
9ebbca7d
GK
15432 {
15433 if (info->total_size < 32767)
15434 sp_offset = info->total_size;
15435 else
15436 frame_reg_rtx = frame_ptr_rtx;
f676971a 15437 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15438 (frame_reg_rtx != sp_reg_rtx
15439 && (info->cr_save_p
15440 || info->lr_save_p
15441 || info->first_fp_reg_save < 64
15442 || info->first_gp_reg_save < 32
15443 )));
15444 if (frame_reg_rtx != sp_reg_rtx)
15445 rs6000_emit_stack_tie ();
15446 }
15447
d62294f5 15448 /* Handle world saves specially here. */
f57fe068 15449 if (WORLD_SAVE_P (info))
d62294f5
FJ
15450 {
15451 int i, j, sz;
15452 rtx treg;
15453 rtvec p;
22fa69da 15454 rtx reg0;
d62294f5
FJ
15455
15456 /* save_world expects lr in r0. */
22fa69da 15457 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15458 if (info->lr_save_p)
c4ad648e 15459 {
22fa69da 15460 insn = emit_move_insn (reg0,
1de43f85 15461 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15462 RTX_FRAME_RELATED_P (insn) = 1;
15463 }
d62294f5
FJ
15464
15465 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15466 assumptions about the offsets of various bits of the stack
992d08b1 15467 frame. */
37409796
NS
15468 gcc_assert (info->gp_save_offset == -220
15469 && info->fp_save_offset == -144
15470 && info->lr_save_offset == 8
15471 && info->cr_save_offset == 4
15472 && info->push_p
15473 && info->lr_save_p
15474 && (!current_function_calls_eh_return
15475 || info->ehrd_offset == -432)
15476 && info->vrsave_save_offset == -224
22fa69da 15477 && info->altivec_save_offset == -416);
d62294f5
FJ
15478
15479 treg = gen_rtx_REG (SImode, 11);
15480 emit_move_insn (treg, GEN_INT (-info->total_size));
15481
15482 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15483 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15484
15485 /* Preserve CR2 for save_world prologues */
22fa69da 15486 sz = 5;
d62294f5
FJ
15487 sz += 32 - info->first_gp_reg_save;
15488 sz += 64 - info->first_fp_reg_save;
15489 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15490 p = rtvec_alloc (sz);
15491 j = 0;
15492 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15493 gen_rtx_REG (SImode,
1de43f85 15494 LR_REGNO));
d62294f5 15495 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15496 gen_rtx_SYMBOL_REF (Pmode,
15497 "*save_world"));
d62294f5 15498 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15499 properly. */
15500 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15501 {
15502 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15503 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15504 GEN_INT (info->fp_save_offset
15505 + sp_offset + 8 * i));
0be76840 15506 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15507
15508 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15509 }
d62294f5 15510 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15511 {
15512 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15513 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15514 GEN_INT (info->altivec_save_offset
15515 + sp_offset + 16 * i));
0be76840 15516 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15517
15518 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15519 }
d62294f5 15520 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15521 {
15522 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15523 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15524 GEN_INT (info->gp_save_offset
15525 + sp_offset + reg_size * i));
0be76840 15526 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15527
15528 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15529 }
15530
15531 {
15532 /* CR register traditionally saved as CR2. */
15533 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15534 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15535 GEN_INT (info->cr_save_offset
15536 + sp_offset));
0be76840 15537 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15538
15539 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15540 }
22fa69da
GK
15541 /* Explain about use of R0. */
15542 if (info->lr_save_p)
15543 {
15544 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15545 GEN_INT (info->lr_save_offset
15546 + sp_offset));
15547 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15548
22fa69da
GK
15549 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15550 }
15551 /* Explain what happens to the stack pointer. */
15552 {
15553 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15554 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15555 }
d62294f5
FJ
15556
15557 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15558 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15559 treg, GEN_INT (-info->total_size));
15560 sp_offset = info->total_size;
d62294f5
FJ
15561 }
15562
9ebbca7d 15563 /* If we use the link register, get it into r0. */
f57fe068 15564 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15565 {
52ff33d0
NF
15566 rtx addr, reg, mem;
15567
f8a57be8 15568 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15569 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15570 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15571
15572 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15573 GEN_INT (info->lr_save_offset + sp_offset));
15574 reg = gen_rtx_REG (Pmode, 0);
15575 mem = gen_rtx_MEM (Pmode, addr);
15576 /* This should not be of rs6000_sr_alias_set, because of
15577 __builtin_return_address. */
15578
15579 insn = emit_move_insn (mem, reg);
15580 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15581 NULL_RTX, NULL_RTX);
f8a57be8 15582 }
9ebbca7d
GK
15583
15584 /* If we need to save CR, put it into r12. */
f57fe068 15585 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15586 {
f8a57be8 15587 rtx set;
f676971a 15588
9ebbca7d 15589 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15590 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15591 RTX_FRAME_RELATED_P (insn) = 1;
15592 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15593 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15594 But that's OK. All we have to do is specify that _one_ condition
15595 code register is saved in this stack slot. The thrower's epilogue
15596 will then restore all the call-saved registers.
15597 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15598 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15599 gen_rtx_REG (SImode, CR2_REGNO));
15600 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15601 set,
15602 REG_NOTES (insn));
9ebbca7d
GK
15603 }
15604
a4f6c312
SS
15605 /* Do any required saving of fpr's. If only one or two to save, do
15606 it ourselves. Otherwise, call function. */
f57fe068 15607 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15608 {
15609 int i;
15610 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15611 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15612 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15613 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15614 info->first_fp_reg_save + i,
15615 info->fp_save_offset + sp_offset + 8 * i,
15616 info->total_size);
9ebbca7d 15617 }
f57fe068 15618 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15619 {
15620 int i;
15621 char rname[30];
520a57c8 15622 const char *alloc_rname;
9ebbca7d
GK
15623 rtvec p;
15624 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15625
15626 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15627 gen_rtx_REG (Pmode,
1de43f85 15628 LR_REGNO));
9ebbca7d
GK
15629 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15630 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15631 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15632 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15633 gen_rtx_SYMBOL_REF (Pmode,
15634 alloc_rname));
15635 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15636 {
15637 rtx addr, reg, mem;
15638 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15639 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15640 GEN_INT (info->fp_save_offset
9ebbca7d 15641 + sp_offset + 8*i));
0be76840 15642 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15643
15644 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15645 }
15646 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15647 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15648 NULL_RTX, NULL_RTX);
15649 }
b6c9286a 15650
9ebbca7d
GK
15651 /* Save GPRs. This is done as a PARALLEL if we are using
15652 the store-multiple instructions. */
f57fe068 15653 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15654 {
308c142a 15655 rtvec p;
9ebbca7d
GK
15656 int i;
15657 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15658 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15659 {
15660 rtx addr, reg, mem;
15661 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15662 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15663 GEN_INT (info->gp_save_offset
15664 + sp_offset
9ebbca7d 15665 + reg_size * i));
0be76840 15666 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15667
15668 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15669 }
15670 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15671 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15672 NULL_RTX, NULL_RTX);
b6c9286a 15673 }
52ff33d0
NF
15674 else if (!WORLD_SAVE_P (info)
15675 && TARGET_SPE_ABI
15676 && info->spe_64bit_regs_used != 0
15677 && info->first_gp_reg_save != 32)
15678 {
15679 int i;
15680 rtx spe_save_area_ptr;
15681 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15682 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15683 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15684
15685 /* Determine whether we can address all of the registers that need
15686 to be saved with an offset from the stack pointer that fits in
15687 the small const field for SPE memory instructions. */
15688 int spe_regs_addressable_via_sp
15689 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15690 + (32 - info->first_gp_reg_save - 1) * reg_size);
15691 int spe_offset;
15692
15693 if (spe_regs_addressable_via_sp)
15694 {
30895f30 15695 spe_save_area_ptr = frame_reg_rtx;
52ff33d0
NF
15696 spe_offset = info->spe_gp_save_offset + sp_offset;
15697 }
15698 else
15699 {
15700 /* Make r11 point to the start of the SPE save area. We need
15701 to be careful here if r11 is holding the static chain. If
15702 it is, then temporarily save it in r0. We would use r0 as
15703 our base register here, but using r0 as a base register in
15704 loads and stores means something different from what we
15705 would like. */
15706 if (using_static_chain_p)
15707 {
15708 rtx r0 = gen_rtx_REG (Pmode, 0);
15709
15710 gcc_assert (info->first_gp_reg_save > 11);
15711
15712 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15713 }
15714
15715 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
30895f30 15716 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
52ff33d0
NF
15717 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15718
15719 spe_offset = 0;
15720 }
15721
15722 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15723 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15724 {
15725 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15726 rtx offset, addr, mem;
15727
15728 /* We're doing all this to ensure that the offset fits into
15729 the immediate offset of 'evstdd'. */
15730 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15731
15732 offset = GEN_INT (reg_size * i + spe_offset);
15733 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15734 mem = gen_rtx_MEM (V2SImode, addr);
15735
15736 insn = emit_move_insn (mem, reg);
15737
15738 rs6000_frame_related (insn, spe_save_area_ptr,
15739 info->spe_gp_save_offset
15740 + sp_offset + reg_size * i,
15741 offset, const0_rtx);
15742 }
15743
15744 /* Move the static chain pointer back. */
15745 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15746 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15747 }
f57fe068 15748 else if (!WORLD_SAVE_P (info))
b6c9286a 15749 {
9ebbca7d
GK
15750 int i;
15751 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15752 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15753 {
15754 rtx addr, reg, mem;
15755 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15756
52ff33d0
NF
15757 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15758 GEN_INT (info->gp_save_offset
15759 + sp_offset
15760 + reg_size * i));
15761 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15762
52ff33d0
NF
15763 insn = emit_move_insn (mem, reg);
15764 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15765 NULL_RTX, NULL_RTX);
15766 }
9ebbca7d
GK
15767 }
15768
83720594
RH
15769 /* ??? There's no need to emit actual instructions here, but it's the
15770 easiest way to get the frame unwind information emitted. */
22fa69da 15771 if (current_function_calls_eh_return)
83720594 15772 {
78e1b90d
DE
15773 unsigned int i, regno;
15774
fc4767bb
JJ
15775 /* In AIX ABI we need to pretend we save r2 here. */
15776 if (TARGET_AIX)
15777 {
15778 rtx addr, reg, mem;
15779
15780 reg = gen_rtx_REG (reg_mode, 2);
15781 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15782 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15783 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15784
15785 insn = emit_move_insn (mem, reg);
f676971a 15786 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15787 NULL_RTX, NULL_RTX);
15788 PATTERN (insn) = gen_blockage ();
15789 }
15790
83720594
RH
15791 for (i = 0; ; ++i)
15792 {
83720594
RH
15793 regno = EH_RETURN_DATA_REGNO (i);
15794 if (regno == INVALID_REGNUM)
15795 break;
15796
89e7058f
AH
15797 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15798 info->ehrd_offset + sp_offset
15799 + reg_size * (int) i,
15800 info->total_size);
83720594
RH
15801 }
15802 }
15803
9ebbca7d 15804 /* Save CR if we use any that must be preserved. */
f57fe068 15805 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15806 {
15807 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15808 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15809 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15810 /* See the large comment above about why CR2_REGNO is used. */
15811 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15812
9ebbca7d
GK
15813 /* If r12 was used to hold the original sp, copy cr into r0 now
15814 that it's free. */
15815 if (REGNO (frame_reg_rtx) == 12)
15816 {
f8a57be8
GK
15817 rtx set;
15818
9ebbca7d 15819 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15820 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15821 RTX_FRAME_RELATED_P (insn) = 1;
15822 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15823 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15824 set,
15825 REG_NOTES (insn));
f676971a 15826
9ebbca7d
GK
15827 }
15828 insn = emit_move_insn (mem, cr_save_rtx);
15829
f676971a 15830 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15831 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15832 }
15833
f676971a 15834 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15835 for which it was done previously. */
f57fe068 15836 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15837 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 15838 {
bcb2d701 15839 if (info->total_size < 32767)
2b2c2fe5 15840 sp_offset = info->total_size;
bcb2d701
EC
15841 else
15842 frame_reg_rtx = frame_ptr_rtx;
15843 rs6000_emit_allocate_stack (info->total_size,
15844 (frame_reg_rtx != sp_reg_rtx
15845 && ((info->altivec_size != 0)
15846 || (info->vrsave_mask != 0)
15847 )));
15848 if (frame_reg_rtx != sp_reg_rtx)
15849 rs6000_emit_stack_tie ();
2b2c2fe5 15850 }
9ebbca7d
GK
15851
15852 /* Set frame pointer, if needed. */
15853 if (frame_pointer_needed)
15854 {
7d5175e1 15855 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15856 sp_reg_rtx);
15857 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15858 }
9878760c 15859
2b2c2fe5
EC
15860 /* Save AltiVec registers if needed. Save here because the red zone does
15861 not include AltiVec registers. */
15862 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15863 {
15864 int i;
15865
15866 /* There should be a non inline version of this, for when we
15867 are saving lots of vector registers. */
15868 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15869 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15870 {
15871 rtx areg, savereg, mem;
15872 int offset;
15873
15874 offset = info->altivec_save_offset + sp_offset
15875 + 16 * (i - info->first_altivec_reg_save);
15876
15877 savereg = gen_rtx_REG (V4SImode, i);
15878
15879 areg = gen_rtx_REG (Pmode, 0);
15880 emit_move_insn (areg, GEN_INT (offset));
15881
15882 /* AltiVec addressing mode is [reg+reg]. */
15883 mem = gen_frame_mem (V4SImode,
15884 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15885
15886 insn = emit_move_insn (mem, savereg);
15887
15888 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15889 areg, GEN_INT (offset));
15890 }
15891 }
15892
15893 /* VRSAVE is a bit vector representing which AltiVec registers
15894 are used. The OS uses this to determine which vector
15895 registers to save on a context switch. We need to save
15896 VRSAVE on the stack frame, add whatever AltiVec registers we
15897 used in this function, and do the corresponding magic in the
15898 epilogue. */
15899
15900 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15901 && info->vrsave_mask != 0)
15902 {
15903 rtx reg, mem, vrsave;
15904 int offset;
15905
15906 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15907 as frame_reg_rtx and r11 as the static chain pointer for
15908 nested functions. */
15909 reg = gen_rtx_REG (SImode, 0);
15910 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15911 if (TARGET_MACHO)
15912 emit_insn (gen_get_vrsave_internal (reg));
15913 else
15914 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15915
15916 if (!WORLD_SAVE_P (info))
15917 {
15918 /* Save VRSAVE. */
15919 offset = info->vrsave_save_offset + sp_offset;
15920 mem = gen_frame_mem (SImode,
15921 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15922 GEN_INT (offset)));
15923 insn = emit_move_insn (mem, reg);
15924 }
15925
15926 /* Include the registers in the mask. */
15927 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15928
15929 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15930 }
15931
1db02437 15932 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15933 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15934 || (DEFAULT_ABI == ABI_V4
15935 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 15936 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
15937 {
15938 /* If emit_load_toc_table will use the link register, we need to save
15939 it. We use R12 for this purpose because emit_load_toc_table
15940 can use register 0. This allows us to use a plain 'blr' to return
15941 from the procedure more often. */
15942 int save_LR_around_toc_setup = (TARGET_ELF
15943 && DEFAULT_ABI != ABI_AIX
15944 && flag_pic
15945 && ! info->lr_save_p
15946 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15947 if (save_LR_around_toc_setup)
15948 {
1de43f85 15949 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 15950
c4ad648e 15951 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 15952 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15953
c4ad648e 15954 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15955
c4ad648e 15956 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
15957 RTX_FRAME_RELATED_P (insn) = 1;
15958 }
15959 else
15960 rs6000_emit_load_toc_table (TRUE);
15961 }
ee890fe2 15962
fcce224d 15963#if TARGET_MACHO
ee890fe2
SS
15964 if (DEFAULT_ABI == ABI_DARWIN
15965 && flag_pic && current_function_uses_pic_offset_table)
15966 {
1de43f85 15967 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 15968 rtx src = machopic_function_base_sym ();
ee890fe2 15969
6d0a8091
DJ
15970 /* Save and restore LR locally around this call (in R0). */
15971 if (!info->lr_save_p)
6fb5fa3c 15972 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 15973
6fb5fa3c 15974 emit_insn (gen_load_macho_picbase (src));
ee890fe2 15975
6fb5fa3c
DB
15976 emit_move_insn (gen_rtx_REG (Pmode,
15977 RS6000_PIC_OFFSET_TABLE_REGNUM),
15978 lr);
6d0a8091
DJ
15979
15980 if (!info->lr_save_p)
6fb5fa3c 15981 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 15982 }
fcce224d 15983#endif
9ebbca7d
GK
15984}
15985
9ebbca7d 15986/* Write function prologue. */
a4f6c312 15987
08c148a8 15988static void
f676971a 15989rs6000_output_function_prologue (FILE *file,
a2369ed3 15990 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15991{
15992 rs6000_stack_t *info = rs6000_stack_info ();
15993
4697a36c
MM
15994 if (TARGET_DEBUG_STACK)
15995 debug_stack_info (info);
9878760c 15996
a4f6c312
SS
15997 /* Write .extern for any function we will call to save and restore
15998 fp values. */
15999 if (info->first_fp_reg_save < 64
16000 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16001 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16002 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
16003 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
16004 RESTORE_FP_SUFFIX);
9878760c 16005
c764f757
RK
16006 /* Write .extern for AIX common mode routines, if needed. */
16007 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16008 {
f6709c70
JW
16009 fputs ("\t.extern __mulh\n", file);
16010 fputs ("\t.extern __mull\n", file);
16011 fputs ("\t.extern __divss\n", file);
16012 fputs ("\t.extern __divus\n", file);
16013 fputs ("\t.extern __quoss\n", file);
16014 fputs ("\t.extern __quous\n", file);
c764f757
RK
16015 common_mode_defined = 1;
16016 }
9878760c 16017
9ebbca7d 16018 if (! HAVE_prologue)
979721f8 16019 {
9ebbca7d 16020 start_sequence ();
9dda4cc8 16021
a4f6c312
SS
16022 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16023 the "toplevel" insn chain. */
2e040219 16024 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16025 rs6000_emit_prologue ();
2e040219 16026 emit_note (NOTE_INSN_DELETED);
178c3eff 16027
a3c9585f 16028 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16029 {
16030 rtx insn;
16031 unsigned addr = 0;
16032 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16033 {
16034 INSN_ADDRESSES_NEW (insn, addr);
16035 addr += 4;
16036 }
16037 }
9dda4cc8 16038
9ebbca7d 16039 if (TARGET_DEBUG_STACK)
a4f6c312 16040 debug_rtx_list (get_insns (), 100);
c9d691e9 16041 final (get_insns (), file, FALSE);
9ebbca7d 16042 end_sequence ();
979721f8
MM
16043 }
16044
9ebbca7d
GK
16045 rs6000_pic_labelno++;
16046}
f676971a 16047
9ebbca7d 16048/* Emit function epilogue as insns.
9878760c 16049
9ebbca7d
GK
16050 At present, dwarf2out_frame_debug_expr doesn't understand
16051 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16052 anywhere in the epilogue. Most of the insns below would in any case
16053 need special notes to explain where r11 is in relation to the stack. */
9878760c 16054
9ebbca7d 16055void
a2369ed3 16056rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16057{
16058 rs6000_stack_t *info;
16059 int restoring_FPRs_inline;
16060 int using_load_multiple;
d296e02e 16061 int using_mtcr_multiple;
9ebbca7d
GK
16062 int use_backchain_to_restore_sp;
16063 int sp_offset = 0;
16064 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16065 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16066 enum machine_mode reg_mode = Pmode;
327e5343 16067 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16068 int i;
16069
c19de7aa
AH
16070 info = rs6000_stack_info ();
16071
16072 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16073 {
16074 reg_mode = V2SImode;
16075 reg_size = 8;
16076 }
16077
9ebbca7d 16078 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
16079 && (!TARGET_SPE_ABI
16080 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
16081 && info->first_gp_reg_save < 31
16082 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 16083 restoring_FPRs_inline = (sibcall
83720594 16084 || current_function_calls_eh_return
9ebbca7d
GK
16085 || info->first_fp_reg_save == 64
16086 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 16087 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
16088 || current_function_calls_alloca
16089 || info->total_size > 32767);
d296e02e 16090 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16091 || rs6000_cpu == PROCESSOR_PPC603
16092 || rs6000_cpu == PROCESSOR_PPC750
16093 || optimize_size);
16094
f57fe068 16095 if (WORLD_SAVE_P (info))
d62294f5
FJ
16096 {
16097 int i, j;
16098 char rname[30];
16099 const char *alloc_rname;
16100 rtvec p;
16101
16102 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16103 stack slot (which is not likely to be our caller.)
16104 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16105 rest_world is similar, except any R10 parameter is ignored.
16106 The exception-handling stuff that was here in 2.95 is no
16107 longer necessary. */
d62294f5
FJ
16108
16109 p = rtvec_alloc (9
16110 + 1
f676971a 16111 + 32 - info->first_gp_reg_save
c4ad648e
AM
16112 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16113 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16114
c4ad648e
AM
16115 strcpy (rname, ((current_function_calls_eh_return) ?
16116 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16117 alloc_rname = ggc_strdup (rname);
16118
16119 j = 0;
16120 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16121 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16122 gen_rtx_REG (Pmode,
1de43f85 16123 LR_REGNO));
d62294f5 16124 RTVEC_ELT (p, j++)
c4ad648e 16125 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16126 /* The instruction pattern requires a clobber here;
c4ad648e 16127 it is shared with the restVEC helper. */
d62294f5 16128 RTVEC_ELT (p, j++)
c4ad648e 16129 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16130
16131 {
c4ad648e
AM
16132 /* CR register traditionally saved as CR2. */
16133 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16134 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16135 GEN_INT (info->cr_save_offset));
0be76840 16136 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16137
16138 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16139 }
16140
16141 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16142 {
16143 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16144 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16145 GEN_INT (info->gp_save_offset
16146 + reg_size * i));
0be76840 16147 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16148
16149 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16150 }
d62294f5 16151 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16152 {
16153 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16154 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16155 GEN_INT (info->altivec_save_offset
16156 + 16 * i));
0be76840 16157 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16158
16159 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16160 }
d62294f5 16161 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16162 {
16163 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16164 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16165 GEN_INT (info->fp_save_offset
16166 + 8 * i));
0be76840 16167 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16168
16169 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16170 }
d62294f5 16171 RTVEC_ELT (p, j++)
c4ad648e 16172 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16173 RTVEC_ELT (p, j++)
c4ad648e 16174 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16175 RTVEC_ELT (p, j++)
c4ad648e 16176 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16177 RTVEC_ELT (p, j++)
c4ad648e 16178 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16179 RTVEC_ELT (p, j++)
c4ad648e 16180 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16181 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16182
16183 return;
16184 }
16185
45b194f8
AM
16186 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16187 if (info->push_p)
2b2c2fe5 16188 sp_offset = info->total_size;
f676971a 16189
9aa86737
AH
16190 /* Restore AltiVec registers if needed. */
16191 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16192 {
16193 int i;
16194
16195 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16196 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16197 {
16198 rtx addr, areg, mem;
16199
16200 areg = gen_rtx_REG (Pmode, 0);
16201 emit_move_insn
16202 (areg, GEN_INT (info->altivec_save_offset
16203 + sp_offset
16204 + 16 * (i - info->first_altivec_reg_save)));
16205
16206 /* AltiVec addressing mode is [reg+reg]. */
16207 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16208 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16209
16210 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16211 }
16212 }
16213
2b2c2fe5
EC
16214 /* If we have a frame pointer, a call to alloca, or a large stack
16215 frame, restore the old stack pointer using the backchain. Otherwise,
16216 we know what size to update it with. */
16217 if (use_backchain_to_restore_sp)
16218 {
16219 /* Under V.4, don't reset the stack pointer until after we're done
16220 loading the saved registers. */
16221 if (DEFAULT_ABI == ABI_V4)
16222 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16223
16224 emit_move_insn (frame_reg_rtx,
16225 gen_rtx_MEM (Pmode, sp_reg_rtx));
45b194f8 16226 sp_offset = 0;
2b2c2fe5 16227 }
45b194f8
AM
16228 else if (info->push_p
16229 && DEFAULT_ABI != ABI_V4
16230 && !current_function_calls_eh_return)
2b2c2fe5 16231 {
45b194f8
AM
16232 emit_insn (TARGET_32BIT
16233 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16234 GEN_INT (info->total_size))
16235 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16236 GEN_INT (info->total_size)));
16237 sp_offset = 0;
2b2c2fe5
EC
16238 }
16239
554c2941
AM
16240 /* Restore VRSAVE if needed. */
16241 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16242 && info->vrsave_mask != 0)
16243 {
16244 rtx addr, mem, reg;
16245
16246 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16247 GEN_INT (info->vrsave_save_offset + sp_offset));
16248 mem = gen_frame_mem (SImode, addr);
16249 reg = gen_rtx_REG (SImode, 12);
16250 emit_move_insn (reg, mem);
16251
16252 emit_insn (generate_set_vrsave (reg, info, 1));
16253 }
16254
9ebbca7d
GK
16255 /* Get the old lr if we saved it. */
16256 if (info->lr_save_p)
b6c9286a 16257 {
a3170dc6
AH
16258 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16259 info->lr_save_offset + sp_offset);
ba4828e0 16260
9ebbca7d 16261 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16262 }
f676971a 16263
9ebbca7d
GK
16264 /* Get the old cr if we saved it. */
16265 if (info->cr_save_p)
16266 {
16267 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16268 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16269 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16270
9ebbca7d
GK
16271 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16272 }
f676971a 16273
9ebbca7d 16274 /* Set LR here to try to overlap restores below. */
4697a36c 16275 if (info->lr_save_p)
1de43f85 16276 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16277 gen_rtx_REG (Pmode, 0));
f676971a 16278
83720594
RH
16279 /* Load exception handler data registers, if needed. */
16280 if (current_function_calls_eh_return)
16281 {
78e1b90d
DE
16282 unsigned int i, regno;
16283
fc4767bb
JJ
16284 if (TARGET_AIX)
16285 {
16286 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16287 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16288 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16289
16290 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16291 }
16292
83720594
RH
16293 for (i = 0; ; ++i)
16294 {
a3170dc6 16295 rtx mem;
83720594
RH
16296
16297 regno = EH_RETURN_DATA_REGNO (i);
16298 if (regno == INVALID_REGNUM)
16299 break;
16300
a3170dc6
AH
16301 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16302 info->ehrd_offset + sp_offset
16303 + reg_size * (int) i);
83720594
RH
16304
16305 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16306 }
16307 }
f676971a 16308
9ebbca7d
GK
16309 /* Restore GPRs. This is done as a PARALLEL if we are using
16310 the load-multiple instructions. */
16311 if (using_load_multiple)
979721f8 16312 {
9ebbca7d
GK
16313 rtvec p;
16314 p = rtvec_alloc (32 - info->first_gp_reg_save);
16315 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16316 {
f676971a
EC
16317 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16318 GEN_INT (info->gp_save_offset
16319 + sp_offset
9ebbca7d 16320 + reg_size * i));
0be76840 16321 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16322
f676971a 16323 RTVEC_ELT (p, i) =
9ebbca7d
GK
16324 gen_rtx_SET (VOIDmode,
16325 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16326 mem);
979721f8 16327 }
9ebbca7d 16328 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16329 }
52ff33d0
NF
16330 else if (TARGET_SPE_ABI
16331 && info->spe_64bit_regs_used != 0
16332 && info->first_gp_reg_save != 32)
16333 {
52ff33d0
NF
16334 /* Determine whether we can address all of the registers that need
16335 to be saved with an offset from the stack pointer that fits in
16336 the small const field for SPE memory instructions. */
16337 int spe_regs_addressable_via_sp
16338 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16339 + (32 - info->first_gp_reg_save - 1) * reg_size);
16340 int spe_offset;
16341
16342 if (spe_regs_addressable_via_sp)
45b194f8 16343 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16344 else
16345 {
45b194f8 16346 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16347 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16348 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16349 There's no need to worry here because the static chain is passed
16350 anew to every function. */
45b194f8
AM
16351 if (frame_reg_rtx == sp_reg_rtx)
16352 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16353 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16354 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16355 /* Keep the invariant that frame_reg_rtx + sp_offset points
16356 at the top of the stack frame. */
16357 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16358
16359 spe_offset = 0;
16360 }
16361
16362 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16363 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16364 {
16365 rtx offset, addr, mem;
16366
16367 /* We're doing all this to ensure that the immediate offset
16368 fits into the immediate field of 'evldd'. */
16369 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16370
16371 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16372 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16373 mem = gen_rtx_MEM (V2SImode, addr);
16374
16375 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16376 mem);
16377 }
16378 }
9ebbca7d
GK
16379 else
16380 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16381 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16382 {
f676971a
EC
16383 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16384 GEN_INT (info->gp_save_offset
16385 + sp_offset
9ebbca7d 16386 + reg_size * i));
0be76840 16387 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16388
f676971a 16389 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16390 info->first_gp_reg_save + i), mem);
9ebbca7d 16391 }
9878760c 16392
9ebbca7d
GK
16393 /* Restore fpr's if we need to do it without calling a function. */
16394 if (restoring_FPRs_inline)
16395 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16396 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16397 && ! call_used_regs[info->first_fp_reg_save+i]))
16398 {
16399 rtx addr, mem;
16400 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16401 GEN_INT (info->fp_save_offset
16402 + sp_offset
a4f6c312 16403 + 8 * i));
0be76840 16404 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16405
f676971a 16406 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16407 info->first_fp_reg_save + i),
16408 mem);
16409 }
8d30c4ee 16410
9ebbca7d
GK
16411 /* If we saved cr, restore it here. Just those that were used. */
16412 if (info->cr_save_p)
979721f8 16413 {
9ebbca7d 16414 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16415 int count = 0;
f676971a 16416
d296e02e 16417 if (using_mtcr_multiple)
979721f8 16418 {
9ebbca7d 16419 for (i = 0; i < 8; i++)
6fb5fa3c 16420 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16421 count++;
37409796 16422 gcc_assert (count);
e35b9579
GK
16423 }
16424
d296e02e 16425 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16426 {
16427 rtvec p;
16428 int ndx;
f676971a 16429
e35b9579 16430 p = rtvec_alloc (count);
9ebbca7d 16431
e35b9579 16432 ndx = 0;
9ebbca7d 16433 for (i = 0; i < 8; i++)
6fb5fa3c 16434 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16435 {
16436 rtvec r = rtvec_alloc (2);
16437 RTVEC_ELT (r, 0) = r12_rtx;
16438 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16439 RTVEC_ELT (p, ndx) =
f676971a 16440 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16441 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16442 ndx++;
9ebbca7d
GK
16443 }
16444 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16445 gcc_assert (ndx == count);
979721f8
MM
16446 }
16447 else
9ebbca7d 16448 for (i = 0; i < 8; i++)
6fb5fa3c 16449 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16450 {
f676971a 16451 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16452 CR0_REGNO+i),
16453 r12_rtx));
979721f8 16454 }
979721f8
MM
16455 }
16456
9ebbca7d 16457 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16458 have been done. */
16459 if (frame_reg_rtx != sp_reg_rtx)
16460 {
16461 /* This blockage is needed so that sched doesn't decide to move
16462 the sp change before the register restores. */
16463 rs6000_emit_stack_tie ();
45b194f8
AM
16464 if (sp_offset != 0)
16465 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16466 GEN_INT (sp_offset)));
52ff33d0
NF
16467 else
16468 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16469 }
16470 else if (sp_offset != 0)
16471 emit_insn (TARGET_32BIT
16472 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16473 GEN_INT (sp_offset))
16474 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16475 GEN_INT (sp_offset)));
b6c9286a 16476
83720594
RH
16477 if (current_function_calls_eh_return)
16478 {
16479 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16480 emit_insn (TARGET_32BIT
83720594
RH
16481 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16482 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16483 }
16484
9ebbca7d
GK
16485 if (!sibcall)
16486 {
16487 rtvec p;
16488 if (! restoring_FPRs_inline)
16489 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16490 else
16491 p = rtvec_alloc (2);
b6c9286a 16492
e35b9579 16493 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16494 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16495 gen_rtx_REG (Pmode,
1de43f85 16496 LR_REGNO));
9ebbca7d
GK
16497
16498 /* If we have to restore more than two FP registers, branch to the
16499 restore function. It will return to our caller. */
16500 if (! restoring_FPRs_inline)
16501 {
16502 int i;
16503 char rname[30];
520a57c8 16504 const char *alloc_rname;
979721f8 16505
f676971a 16506 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16507 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16508 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16509 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16510 gen_rtx_SYMBOL_REF (Pmode,
16511 alloc_rname));
b6c9286a 16512
9ebbca7d
GK
16513 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16514 {
16515 rtx addr, mem;
16516 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16517 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16518 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16519
f676971a 16520 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16521 gen_rtx_SET (VOIDmode,
16522 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16523 mem);
b6c9286a
MM
16524 }
16525 }
f676971a 16526
9ebbca7d 16527 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16528 }
9878760c
RK
16529}
16530
16531/* Write function epilogue. */
16532
08c148a8 16533static void
f676971a 16534rs6000_output_function_epilogue (FILE *file,
a2369ed3 16535 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16536{
9ebbca7d 16537 if (! HAVE_epilogue)
9878760c 16538 {
9ebbca7d
GK
16539 rtx insn = get_last_insn ();
16540 /* If the last insn was a BARRIER, we don't have to write anything except
16541 the trace table. */
16542 if (GET_CODE (insn) == NOTE)
16543 insn = prev_nonnote_insn (insn);
16544 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16545 {
9ebbca7d
GK
16546 /* This is slightly ugly, but at least we don't have two
16547 copies of the epilogue-emitting code. */
16548 start_sequence ();
16549
16550 /* A NOTE_INSN_DELETED is supposed to be at the start
16551 and end of the "toplevel" insn chain. */
2e040219 16552 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16553 rs6000_emit_epilogue (FALSE);
2e040219 16554 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16555
a3c9585f 16556 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16557 {
16558 rtx insn;
16559 unsigned addr = 0;
16560 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16561 {
16562 INSN_ADDRESSES_NEW (insn, addr);
16563 addr += 4;
16564 }
16565 }
16566
9ebbca7d 16567 if (TARGET_DEBUG_STACK)
a4f6c312 16568 debug_rtx_list (get_insns (), 100);
c9d691e9 16569 final (get_insns (), file, FALSE);
9ebbca7d 16570 end_sequence ();
4697a36c 16571 }
9878760c 16572 }
b4ac57ab 16573
efdba735
SH
16574#if TARGET_MACHO
16575 macho_branch_islands ();
0e5da0be
GK
16576 /* Mach-O doesn't support labels at the end of objects, so if
16577 it looks like we might want one, insert a NOP. */
16578 {
16579 rtx insn = get_last_insn ();
16580 while (insn
16581 && NOTE_P (insn)
a38e7aa5 16582 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16583 insn = PREV_INSN (insn);
f676971a
EC
16584 if (insn
16585 && (LABEL_P (insn)
0e5da0be 16586 || (NOTE_P (insn)
a38e7aa5 16587 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16588 fputs ("\tnop\n", file);
16589 }
16590#endif
16591
9b30bae2 16592 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16593 on its format.
16594
16595 We don't output a traceback table if -finhibit-size-directive was
16596 used. The documentation for -finhibit-size-directive reads
16597 ``don't output a @code{.size} assembler directive, or anything
16598 else that would cause trouble if the function is split in the
16599 middle, and the two halves are placed at locations far apart in
16600 memory.'' The traceback table has this property, since it
16601 includes the offset from the start of the function to the
4d30c363
MM
16602 traceback table itself.
16603
16604 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16605 different traceback table. */
57ac7be9 16606 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16607 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16608 {
69c75916 16609 const char *fname = NULL;
3ac88239 16610 const char *language_string = lang_hooks.name;
6041bf2f 16611 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16612 int i;
57ac7be9 16613 int optional_tbtab;
8097c268 16614 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16615
16616 if (rs6000_traceback == traceback_full)
16617 optional_tbtab = 1;
16618 else if (rs6000_traceback == traceback_part)
16619 optional_tbtab = 0;
16620 else
16621 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16622
69c75916
AM
16623 if (optional_tbtab)
16624 {
16625 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16626 while (*fname == '.') /* V.4 encodes . in the name */
16627 fname++;
16628
16629 /* Need label immediately before tbtab, so we can compute
16630 its offset from the function start. */
16631 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16632 ASM_OUTPUT_LABEL (file, fname);
16633 }
314fc5a9
ILT
16634
16635 /* The .tbtab pseudo-op can only be used for the first eight
16636 expressions, since it can't handle the possibly variable
16637 length fields that follow. However, if you omit the optional
16638 fields, the assembler outputs zeros for all optional fields
16639 anyways, giving each variable length field is minimum length
16640 (as defined in sys/debug.h). Thus we can not use the .tbtab
16641 pseudo-op at all. */
16642
16643 /* An all-zero word flags the start of the tbtab, for debuggers
16644 that have to find it by searching forward from the entry
16645 point or from the current pc. */
19d2d16f 16646 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16647
16648 /* Tbtab format type. Use format type 0. */
19d2d16f 16649 fputs ("\t.byte 0,", file);
314fc5a9 16650
5fc921c1
DE
16651 /* Language type. Unfortunately, there does not seem to be any
16652 official way to discover the language being compiled, so we
16653 use language_string.
16654 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16655 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16656 a number, so for now use 9. */
5fc921c1 16657 if (! strcmp (language_string, "GNU C"))
314fc5a9 16658 i = 0;
6de9cd9a
DN
16659 else if (! strcmp (language_string, "GNU F77")
16660 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16661 i = 1;
8b83775b 16662 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16663 i = 2;
5fc921c1
DE
16664 else if (! strcmp (language_string, "GNU Ada"))
16665 i = 3;
56438901
AM
16666 else if (! strcmp (language_string, "GNU C++")
16667 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16668 i = 9;
9517ead8
AG
16669 else if (! strcmp (language_string, "GNU Java"))
16670 i = 13;
5fc921c1
DE
16671 else if (! strcmp (language_string, "GNU Objective-C"))
16672 i = 14;
314fc5a9 16673 else
37409796 16674 gcc_unreachable ();
314fc5a9
ILT
16675 fprintf (file, "%d,", i);
16676
16677 /* 8 single bit fields: global linkage (not set for C extern linkage,
16678 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16679 from start of procedure stored in tbtab, internal function, function
16680 has controlled storage, function has no toc, function uses fp,
16681 function logs/aborts fp operations. */
16682 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16683 fprintf (file, "%d,",
16684 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16685
16686 /* 6 bitfields: function is interrupt handler, name present in
16687 proc table, function calls alloca, on condition directives
16688 (controls stack walks, 3 bits), saves condition reg, saves
16689 link reg. */
16690 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16691 set up as a frame pointer, even when there is no alloca call. */
16692 fprintf (file, "%d,",
6041bf2f
DE
16693 ((optional_tbtab << 6)
16694 | ((optional_tbtab & frame_pointer_needed) << 5)
16695 | (info->cr_save_p << 1)
16696 | (info->lr_save_p)));
314fc5a9 16697
6041bf2f 16698 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16699 (6 bits). */
16700 fprintf (file, "%d,",
4697a36c 16701 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16702
16703 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16704 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16705
6041bf2f
DE
16706 if (optional_tbtab)
16707 {
16708 /* Compute the parameter info from the function decl argument
16709 list. */
16710 tree decl;
16711 int next_parm_info_bit = 31;
314fc5a9 16712
6041bf2f
DE
16713 for (decl = DECL_ARGUMENTS (current_function_decl);
16714 decl; decl = TREE_CHAIN (decl))
16715 {
16716 rtx parameter = DECL_INCOMING_RTL (decl);
16717 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16718
6041bf2f
DE
16719 if (GET_CODE (parameter) == REG)
16720 {
ebb109ad 16721 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16722 {
16723 int bits;
16724
16725 float_parms++;
16726
37409796
NS
16727 switch (mode)
16728 {
16729 case SFmode:
16730 bits = 0x2;
16731 break;
16732
16733 case DFmode:
7393f7f8 16734 case DDmode:
37409796 16735 case TFmode:
7393f7f8 16736 case TDmode:
37409796
NS
16737 bits = 0x3;
16738 break;
16739
16740 default:
16741 gcc_unreachable ();
16742 }
6041bf2f
DE
16743
16744 /* If only one bit will fit, don't or in this entry. */
16745 if (next_parm_info_bit > 0)
16746 parm_info |= (bits << (next_parm_info_bit - 1));
16747 next_parm_info_bit -= 2;
16748 }
16749 else
16750 {
16751 fixed_parms += ((GET_MODE_SIZE (mode)
16752 + (UNITS_PER_WORD - 1))
16753 / UNITS_PER_WORD);
16754 next_parm_info_bit -= 1;
16755 }
16756 }
16757 }
16758 }
314fc5a9
ILT
16759
16760 /* Number of fixed point parameters. */
16761 /* This is actually the number of words of fixed point parameters; thus
16762 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16763 fprintf (file, "%d,", fixed_parms);
16764
16765 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16766 all on stack. */
16767 /* This is actually the number of fp registers that hold parameters;
16768 and thus the maximum value is 13. */
16769 /* Set parameters on stack bit if parameters are not in their original
16770 registers, regardless of whether they are on the stack? Xlc
16771 seems to set the bit when not optimizing. */
16772 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16773
6041bf2f
DE
16774 if (! optional_tbtab)
16775 return;
16776
314fc5a9
ILT
16777 /* Optional fields follow. Some are variable length. */
16778
16779 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16780 11 double float. */
16781 /* There is an entry for each parameter in a register, in the order that
16782 they occur in the parameter list. Any intervening arguments on the
16783 stack are ignored. If the list overflows a long (max possible length
16784 34 bits) then completely leave off all elements that don't fit. */
16785 /* Only emit this long if there was at least one parameter. */
16786 if (fixed_parms || float_parms)
16787 fprintf (file, "\t.long %d\n", parm_info);
16788
16789 /* Offset from start of code to tb table. */
19d2d16f 16790 fputs ("\t.long ", file);
314fc5a9 16791 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16792 if (TARGET_AIX)
16793 RS6000_OUTPUT_BASENAME (file, fname);
16794 else
16795 assemble_name (file, fname);
16796 putc ('-', file);
16797 rs6000_output_function_entry (file, fname);
19d2d16f 16798 putc ('\n', file);
314fc5a9
ILT
16799
16800 /* Interrupt handler mask. */
16801 /* Omit this long, since we never set the interrupt handler bit
16802 above. */
16803
16804 /* Number of CTL (controlled storage) anchors. */
16805 /* Omit this long, since the has_ctl bit is never set above. */
16806
16807 /* Displacement into stack of each CTL anchor. */
16808 /* Omit this list of longs, because there are no CTL anchors. */
16809
16810 /* Length of function name. */
69c75916
AM
16811 if (*fname == '*')
16812 ++fname;
296b8152 16813 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16814
16815 /* Function name. */
16816 assemble_string (fname, strlen (fname));
16817
16818 /* Register for alloca automatic storage; this is always reg 31.
16819 Only emit this if the alloca bit was set above. */
16820 if (frame_pointer_needed)
19d2d16f 16821 fputs ("\t.byte 31\n", file);
b1765bde
DE
16822
16823 fputs ("\t.align 2\n", file);
9b30bae2 16824 }
9878760c 16825}
17167fd8 16826\f
a4f6c312
SS
16827/* A C compound statement that outputs the assembler code for a thunk
16828 function, used to implement C++ virtual function calls with
16829 multiple inheritance. The thunk acts as a wrapper around a virtual
16830 function, adjusting the implicit object parameter before handing
16831 control off to the real function.
16832
16833 First, emit code to add the integer DELTA to the location that
16834 contains the incoming first argument. Assume that this argument
16835 contains a pointer, and is the one used to pass the `this' pointer
16836 in C++. This is the incoming argument *before* the function
16837 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16838 values of all other incoming arguments.
17167fd8
MM
16839
16840 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16841 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16842 not touch the return address. Hence returning from FUNCTION will
16843 return to whoever called the current `thunk'.
17167fd8 16844
a4f6c312
SS
16845 The effect must be as if FUNCTION had been called directly with the
16846 adjusted first argument. This macro is responsible for emitting
16847 all of the code for a thunk function; output_function_prologue()
16848 and output_function_epilogue() are not invoked.
17167fd8 16849
a4f6c312
SS
16850 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16851 been extracted from it.) It might possibly be useful on some
16852 targets, but probably not.
17167fd8 16853
a4f6c312
SS
16854 If you do not define this macro, the target-independent code in the
16855 C++ frontend will generate a less efficient heavyweight thunk that
16856 calls FUNCTION instead of jumping to it. The generic approach does
16857 not support varargs. */
17167fd8 16858
3961e8fe 16859static void
f676971a
EC
16860rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16861 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16862 tree function)
17167fd8 16863{
5b71a4e7 16864 rtx this, insn, funexp;
17167fd8 16865
5b71a4e7 16866 reload_completed = 1;
fe3ad572 16867 epilogue_completed = 1;
56a7189a 16868
5b71a4e7 16869 /* Mark the end of the (empty) prologue. */
2e040219 16870 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16871
5b71a4e7
DE
16872 /* Find the "this" pointer. If the function returns a structure,
16873 the structure return pointer is in r3. */
61f71b34 16874 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16875 this = gen_rtx_REG (Pmode, 4);
56a7189a 16876 else
5b71a4e7 16877 this = gen_rtx_REG (Pmode, 3);
17167fd8 16878
5b71a4e7
DE
16879 /* Apply the constant offset, if required. */
16880 if (delta)
16881 {
16882 rtx delta_rtx = GEN_INT (delta);
16883 emit_insn (TARGET_32BIT
16884 ? gen_addsi3 (this, this, delta_rtx)
16885 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16886 }
16887
5b71a4e7
DE
16888 /* Apply the offset from the vtable, if required. */
16889 if (vcall_offset)
17167fd8 16890 {
5b71a4e7
DE
16891 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16892 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16893
5b71a4e7 16894 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16895 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16896 {
16897 emit_insn (TARGET_32BIT
16898 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16899 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16900 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16901 }
16902 else
16903 {
16904 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16905
16906 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16907 }
5b71a4e7
DE
16908 emit_insn (TARGET_32BIT
16909 ? gen_addsi3 (this, this, tmp)
16910 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16911 }
16912
5b71a4e7
DE
16913 /* Generate a tail call to the target function. */
16914 if (!TREE_USED (function))
16915 {
16916 assemble_external (function);
16917 TREE_USED (function) = 1;
16918 }
16919 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16920 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16921
16922#if TARGET_MACHO
ab82a49f 16923 if (MACHOPIC_INDIRECT)
5b71a4e7 16924 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16925#endif
5b71a4e7
DE
16926
16927 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16928 generate sibcall RTL explicitly. */
5b71a4e7
DE
16929 insn = emit_call_insn (
16930 gen_rtx_PARALLEL (VOIDmode,
16931 gen_rtvec (4,
16932 gen_rtx_CALL (VOIDmode,
16933 funexp, const0_rtx),
16934 gen_rtx_USE (VOIDmode, const0_rtx),
16935 gen_rtx_USE (VOIDmode,
16936 gen_rtx_REG (SImode,
1de43f85 16937 LR_REGNO)),
5b71a4e7
DE
16938 gen_rtx_RETURN (VOIDmode))));
16939 SIBLING_CALL_P (insn) = 1;
16940 emit_barrier ();
16941
16942 /* Run just enough of rest_of_compilation to get the insns emitted.
16943 There's not really enough bulk here to make other passes such as
16944 instruction scheduling worth while. Note that use_thunk calls
16945 assemble_start_function and assemble_end_function. */
16946 insn = get_insns ();
55e092c4 16947 insn_locators_alloc ();
5b71a4e7
DE
16948 shorten_branches (insn);
16949 final_start_function (insn, file, 1);
c9d691e9 16950 final (insn, file, 1);
5b71a4e7
DE
16951 final_end_function ();
16952
16953 reload_completed = 0;
fe3ad572 16954 epilogue_completed = 0;
9ebbca7d 16955}
9ebbca7d
GK
16956\f
16957/* A quick summary of the various types of 'constant-pool tables'
16958 under PowerPC:
16959
f676971a 16960 Target Flags Name One table per
9ebbca7d
GK
16961 AIX (none) AIX TOC object file
16962 AIX -mfull-toc AIX TOC object file
16963 AIX -mminimal-toc AIX minimal TOC translation unit
16964 SVR4/EABI (none) SVR4 SDATA object file
16965 SVR4/EABI -fpic SVR4 pic object file
16966 SVR4/EABI -fPIC SVR4 PIC translation unit
16967 SVR4/EABI -mrelocatable EABI TOC function
16968 SVR4/EABI -maix AIX TOC object file
f676971a 16969 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16970 AIX minimal TOC translation unit
16971
16972 Name Reg. Set by entries contains:
16973 made by addrs? fp? sum?
16974
16975 AIX TOC 2 crt0 as Y option option
16976 AIX minimal TOC 30 prolog gcc Y Y option
16977 SVR4 SDATA 13 crt0 gcc N Y N
16978 SVR4 pic 30 prolog ld Y not yet N
16979 SVR4 PIC 30 prolog gcc Y option option
16980 EABI TOC 30 prolog gcc Y option option
16981
16982*/
16983
9ebbca7d
GK
16984/* Hash functions for the hash table. */
16985
16986static unsigned
a2369ed3 16987rs6000_hash_constant (rtx k)
9ebbca7d 16988{
46b33600
RH
16989 enum rtx_code code = GET_CODE (k);
16990 enum machine_mode mode = GET_MODE (k);
16991 unsigned result = (code << 3) ^ mode;
16992 const char *format;
16993 int flen, fidx;
f676971a 16994
46b33600
RH
16995 format = GET_RTX_FORMAT (code);
16996 flen = strlen (format);
16997 fidx = 0;
9ebbca7d 16998
46b33600
RH
16999 switch (code)
17000 {
17001 case LABEL_REF:
17002 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17003
17004 case CONST_DOUBLE:
17005 if (mode != VOIDmode)
17006 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17007 flen = 2;
17008 break;
17009
17010 case CODE_LABEL:
17011 fidx = 3;
17012 break;
17013
17014 default:
17015 break;
17016 }
9ebbca7d
GK
17017
17018 for (; fidx < flen; fidx++)
17019 switch (format[fidx])
17020 {
17021 case 's':
17022 {
17023 unsigned i, len;
17024 const char *str = XSTR (k, fidx);
17025 len = strlen (str);
17026 result = result * 613 + len;
17027 for (i = 0; i < len; i++)
17028 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17029 break;
17030 }
9ebbca7d
GK
17031 case 'u':
17032 case 'e':
17033 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17034 break;
17035 case 'i':
17036 case 'n':
17037 result = result * 613 + (unsigned) XINT (k, fidx);
17038 break;
17039 case 'w':
17040 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17041 result = result * 613 + (unsigned) XWINT (k, fidx);
17042 else
17043 {
17044 size_t i;
9390387d 17045 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17046 result = result * 613 + (unsigned) (XWINT (k, fidx)
17047 >> CHAR_BIT * i);
17048 }
17049 break;
09501938
DE
17050 case '0':
17051 break;
9ebbca7d 17052 default:
37409796 17053 gcc_unreachable ();
9ebbca7d 17054 }
46b33600 17055
9ebbca7d
GK
17056 return result;
17057}
17058
17059static unsigned
a2369ed3 17060toc_hash_function (const void *hash_entry)
9ebbca7d 17061{
f676971a 17062 const struct toc_hash_struct *thc =
a9098fd0
GK
17063 (const struct toc_hash_struct *) hash_entry;
17064 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17065}
17066
17067/* Compare H1 and H2 for equivalence. */
17068
17069static int
a2369ed3 17070toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17071{
17072 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17073 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17074
a9098fd0
GK
17075 if (((const struct toc_hash_struct *) h1)->key_mode
17076 != ((const struct toc_hash_struct *) h2)->key_mode)
17077 return 0;
17078
5692c7bc 17079 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17080}
17081
28e510bd
MM
17082/* These are the names given by the C++ front-end to vtables, and
17083 vtable-like objects. Ideally, this logic should not be here;
17084 instead, there should be some programmatic way of inquiring as
17085 to whether or not an object is a vtable. */
17086
17087#define VTABLE_NAME_P(NAME) \
9390387d 17088 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17089 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17090 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17091 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17092 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17093
17094void
a2369ed3 17095rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17096{
17097 /* Currently C++ toc references to vtables can be emitted before it
17098 is decided whether the vtable is public or private. If this is
17099 the case, then the linker will eventually complain that there is
f676971a 17100 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17101 we emit the TOC reference to reference the symbol and not the
17102 section. */
17103 const char *name = XSTR (x, 0);
54ee9799 17104
f676971a 17105 if (VTABLE_NAME_P (name))
54ee9799
DE
17106 {
17107 RS6000_OUTPUT_BASENAME (file, name);
17108 }
17109 else
17110 assemble_name (file, name);
28e510bd
MM
17111}
17112
a4f6c312
SS
17113/* Output a TOC entry. We derive the entry name from what is being
17114 written. */
9878760c
RK
17115
17116void
a2369ed3 17117output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17118{
17119 char buf[256];
3cce094d 17120 const char *name = buf;
ec940faa 17121 const char *real_name;
9878760c 17122 rtx base = x;
16fdeb48 17123 HOST_WIDE_INT offset = 0;
9878760c 17124
37409796 17125 gcc_assert (!TARGET_NO_TOC);
4697a36c 17126
9ebbca7d
GK
17127 /* When the linker won't eliminate them, don't output duplicate
17128 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17129 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17130 CODE_LABELs. */
17131 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17132 {
17133 struct toc_hash_struct *h;
17134 void * * found;
f676971a 17135
17211ab5 17136 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17137 time because GGC is not initialized at that point. */
17211ab5 17138 if (toc_hash_table == NULL)
f676971a 17139 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17140 toc_hash_eq, NULL);
17141
9ebbca7d
GK
17142 h = ggc_alloc (sizeof (*h));
17143 h->key = x;
a9098fd0 17144 h->key_mode = mode;
9ebbca7d 17145 h->labelno = labelno;
f676971a 17146
9ebbca7d
GK
17147 found = htab_find_slot (toc_hash_table, h, 1);
17148 if (*found == NULL)
17149 *found = h;
f676971a 17150 else /* This is indeed a duplicate.
9ebbca7d
GK
17151 Set this label equal to that label. */
17152 {
17153 fputs ("\t.set ", file);
17154 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17155 fprintf (file, "%d,", labelno);
17156 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17157 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17158 found)->labelno));
17159 return;
17160 }
17161 }
17162
17163 /* If we're going to put a double constant in the TOC, make sure it's
17164 aligned properly when strict alignment is on. */
ff1720ed
RK
17165 if (GET_CODE (x) == CONST_DOUBLE
17166 && STRICT_ALIGNMENT
a9098fd0 17167 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17168 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17169 ASM_OUTPUT_ALIGN (file, 3);
17170 }
17171
4977bab6 17172 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17173
37c37a57
RK
17174 /* Handle FP constants specially. Note that if we have a minimal
17175 TOC, things we put here aren't actually in the TOC, so we can allow
17176 FP constants. */
00b79d54
BE
17177 if (GET_CODE (x) == CONST_DOUBLE &&
17178 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17179 {
17180 REAL_VALUE_TYPE rv;
17181 long k[4];
17182
17183 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17184 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17185 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17186 else
17187 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17188
17189 if (TARGET_64BIT)
17190 {
17191 if (TARGET_MINIMAL_TOC)
17192 fputs (DOUBLE_INT_ASM_OP, file);
17193 else
17194 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17195 k[0] & 0xffffffff, k[1] & 0xffffffff,
17196 k[2] & 0xffffffff, k[3] & 0xffffffff);
17197 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17198 k[0] & 0xffffffff, k[1] & 0xffffffff,
17199 k[2] & 0xffffffff, k[3] & 0xffffffff);
17200 return;
17201 }
17202 else
17203 {
17204 if (TARGET_MINIMAL_TOC)
17205 fputs ("\t.long ", file);
17206 else
17207 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17208 k[0] & 0xffffffff, k[1] & 0xffffffff,
17209 k[2] & 0xffffffff, k[3] & 0xffffffff);
17210 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17211 k[0] & 0xffffffff, k[1] & 0xffffffff,
17212 k[2] & 0xffffffff, k[3] & 0xffffffff);
17213 return;
17214 }
17215 }
00b79d54
BE
17216 else if (GET_CODE (x) == CONST_DOUBLE &&
17217 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17218 {
042259f2
DE
17219 REAL_VALUE_TYPE rv;
17220 long k[2];
0adc764e 17221
042259f2 17222 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17223
17224 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17225 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17226 else
17227 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17228
13ded975
DE
17229 if (TARGET_64BIT)
17230 {
17231 if (TARGET_MINIMAL_TOC)
2bfcf297 17232 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17233 else
2f0552b6
AM
17234 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17235 k[0] & 0xffffffff, k[1] & 0xffffffff);
17236 fprintf (file, "0x%lx%08lx\n",
17237 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17238 return;
17239 }
1875cc88 17240 else
13ded975
DE
17241 {
17242 if (TARGET_MINIMAL_TOC)
2bfcf297 17243 fputs ("\t.long ", file);
13ded975 17244 else
2f0552b6
AM
17245 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17246 k[0] & 0xffffffff, k[1] & 0xffffffff);
17247 fprintf (file, "0x%lx,0x%lx\n",
17248 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17249 return;
17250 }
9878760c 17251 }
00b79d54
BE
17252 else if (GET_CODE (x) == CONST_DOUBLE &&
17253 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17254 {
042259f2
DE
17255 REAL_VALUE_TYPE rv;
17256 long l;
9878760c 17257
042259f2 17258 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17259 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17260 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17261 else
17262 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17263
31bfaa0b
DE
17264 if (TARGET_64BIT)
17265 {
17266 if (TARGET_MINIMAL_TOC)
2bfcf297 17267 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17268 else
2f0552b6
AM
17269 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17270 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17271 return;
17272 }
042259f2 17273 else
31bfaa0b
DE
17274 {
17275 if (TARGET_MINIMAL_TOC)
2bfcf297 17276 fputs ("\t.long ", file);
31bfaa0b 17277 else
2f0552b6
AM
17278 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17279 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17280 return;
17281 }
042259f2 17282 }
f176e826 17283 else if (GET_MODE (x) == VOIDmode
a9098fd0 17284 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17285 {
e2c953b6 17286 unsigned HOST_WIDE_INT low;
042259f2
DE
17287 HOST_WIDE_INT high;
17288
17289 if (GET_CODE (x) == CONST_DOUBLE)
17290 {
17291 low = CONST_DOUBLE_LOW (x);
17292 high = CONST_DOUBLE_HIGH (x);
17293 }
17294 else
17295#if HOST_BITS_PER_WIDE_INT == 32
17296 {
17297 low = INTVAL (x);
0858c623 17298 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17299 }
17300#else
17301 {
c4ad648e
AM
17302 low = INTVAL (x) & 0xffffffff;
17303 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17304 }
17305#endif
9878760c 17306
a9098fd0
GK
17307 /* TOC entries are always Pmode-sized, but since this
17308 is a bigendian machine then if we're putting smaller
17309 integer constants in the TOC we have to pad them.
17310 (This is still a win over putting the constants in
17311 a separate constant pool, because then we'd have
02a4ec28
FS
17312 to have both a TOC entry _and_ the actual constant.)
17313
17314 For a 32-bit target, CONST_INT values are loaded and shifted
17315 entirely within `low' and can be stored in one TOC entry. */
17316
37409796
NS
17317 /* It would be easy to make this work, but it doesn't now. */
17318 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17319
17320 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17321 {
17322#if HOST_BITS_PER_WIDE_INT == 32
17323 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17324 POINTER_SIZE, &low, &high, 0);
17325#else
17326 low |= high << 32;
17327 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17328 high = (HOST_WIDE_INT) low >> 32;
17329 low &= 0xffffffff;
17330#endif
17331 }
a9098fd0 17332
13ded975
DE
17333 if (TARGET_64BIT)
17334 {
17335 if (TARGET_MINIMAL_TOC)
2bfcf297 17336 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17337 else
2f0552b6
AM
17338 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17339 (long) high & 0xffffffff, (long) low & 0xffffffff);
17340 fprintf (file, "0x%lx%08lx\n",
17341 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17342 return;
17343 }
1875cc88 17344 else
13ded975 17345 {
02a4ec28
FS
17346 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17347 {
17348 if (TARGET_MINIMAL_TOC)
2bfcf297 17349 fputs ("\t.long ", file);
02a4ec28 17350 else
2bfcf297 17351 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17352 (long) high & 0xffffffff, (long) low & 0xffffffff);
17353 fprintf (file, "0x%lx,0x%lx\n",
17354 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17355 }
13ded975 17356 else
02a4ec28
FS
17357 {
17358 if (TARGET_MINIMAL_TOC)
2bfcf297 17359 fputs ("\t.long ", file);
02a4ec28 17360 else
2f0552b6
AM
17361 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17362 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17363 }
13ded975
DE
17364 return;
17365 }
9878760c
RK
17366 }
17367
17368 if (GET_CODE (x) == CONST)
17369 {
37409796 17370 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17371
9878760c
RK
17372 base = XEXP (XEXP (x, 0), 0);
17373 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17374 }
f676971a 17375
37409796
NS
17376 switch (GET_CODE (base))
17377 {
17378 case SYMBOL_REF:
17379 name = XSTR (base, 0);
17380 break;
17381
17382 case LABEL_REF:
17383 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17384 CODE_LABEL_NUMBER (XEXP (base, 0)));
17385 break;
17386
17387 case CODE_LABEL:
17388 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17389 break;
17390
17391 default:
17392 gcc_unreachable ();
17393 }
9878760c 17394
772c5265 17395 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17396 if (TARGET_MINIMAL_TOC)
2bfcf297 17397 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17398 else
17399 {
b6c9286a 17400 fprintf (file, "\t.tc %s", real_name);
9878760c 17401
1875cc88 17402 if (offset < 0)
16fdeb48 17403 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17404 else if (offset)
16fdeb48 17405 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17406
19d2d16f 17407 fputs ("[TC],", file);
1875cc88 17408 }
581bc4de
MM
17409
17410 /* Currently C++ toc references to vtables can be emitted before it
17411 is decided whether the vtable is public or private. If this is
17412 the case, then the linker will eventually complain that there is
17413 a TOC reference to an unknown section. Thus, for vtables only,
17414 we emit the TOC reference to reference the symbol and not the
17415 section. */
28e510bd 17416 if (VTABLE_NAME_P (name))
581bc4de 17417 {
54ee9799 17418 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17419 if (offset < 0)
16fdeb48 17420 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17421 else if (offset > 0)
16fdeb48 17422 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17423 }
17424 else
17425 output_addr_const (file, x);
19d2d16f 17426 putc ('\n', file);
9878760c
RK
17427}
17428\f
17429/* Output an assembler pseudo-op to write an ASCII string of N characters
17430 starting at P to FILE.
17431
17432 On the RS/6000, we have to do this using the .byte operation and
17433 write out special characters outside the quoted string.
17434 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17435 so we must artificially break them up early. */
9878760c
RK
17436
17437void
a2369ed3 17438output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17439{
17440 char c;
17441 int i, count_string;
d330fd93
KG
17442 const char *for_string = "\t.byte \"";
17443 const char *for_decimal = "\t.byte ";
17444 const char *to_close = NULL;
9878760c
RK
17445
17446 count_string = 0;
17447 for (i = 0; i < n; i++)
17448 {
17449 c = *p++;
17450 if (c >= ' ' && c < 0177)
17451 {
17452 if (for_string)
17453 fputs (for_string, file);
17454 putc (c, file);
17455
17456 /* Write two quotes to get one. */
17457 if (c == '"')
17458 {
17459 putc (c, file);
17460 ++count_string;
17461 }
17462
17463 for_string = NULL;
17464 for_decimal = "\"\n\t.byte ";
17465 to_close = "\"\n";
17466 ++count_string;
17467
17468 if (count_string >= 512)
17469 {
17470 fputs (to_close, file);
17471
17472 for_string = "\t.byte \"";
17473 for_decimal = "\t.byte ";
17474 to_close = NULL;
17475 count_string = 0;
17476 }
17477 }
17478 else
17479 {
17480 if (for_decimal)
17481 fputs (for_decimal, file);
17482 fprintf (file, "%d", c);
17483
17484 for_string = "\n\t.byte \"";
17485 for_decimal = ", ";
17486 to_close = "\n";
17487 count_string = 0;
17488 }
17489 }
17490
17491 /* Now close the string if we have written one. Then end the line. */
17492 if (to_close)
9ebbca7d 17493 fputs (to_close, file);
9878760c
RK
17494}
17495\f
17496/* Generate a unique section name for FILENAME for a section type
17497 represented by SECTION_DESC. Output goes into BUF.
17498
17499 SECTION_DESC can be any string, as long as it is different for each
17500 possible section type.
17501
17502 We name the section in the same manner as xlc. The name begins with an
17503 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17504 names) with the last period replaced by the string SECTION_DESC. If
17505 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17506 the name. */
9878760c
RK
17507
17508void
f676971a 17509rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17510 const char *section_desc)
9878760c 17511{
9ebbca7d 17512 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17513 char *p;
17514 int len;
9878760c
RK
17515
17516 after_last_slash = filename;
17517 for (q = filename; *q; q++)
11e5fe42
RK
17518 {
17519 if (*q == '/')
17520 after_last_slash = q + 1;
17521 else if (*q == '.')
17522 last_period = q;
17523 }
9878760c 17524
11e5fe42 17525 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17526 *buf = (char *) xmalloc (len);
9878760c
RK
17527
17528 p = *buf;
17529 *p++ = '_';
17530
17531 for (q = after_last_slash; *q; q++)
17532 {
11e5fe42 17533 if (q == last_period)
c4ad648e 17534 {
9878760c
RK
17535 strcpy (p, section_desc);
17536 p += strlen (section_desc);
e3981aab 17537 break;
c4ad648e 17538 }
9878760c 17539
e9a780ec 17540 else if (ISALNUM (*q))
c4ad648e 17541 *p++ = *q;
9878760c
RK
17542 }
17543
11e5fe42 17544 if (last_period == 0)
9878760c
RK
17545 strcpy (p, section_desc);
17546 else
17547 *p = '\0';
17548}
e165f3f0 17549\f
a4f6c312 17550/* Emit profile function. */
411707f4 17551
411707f4 17552void
a2369ed3 17553output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17554{
858081ad
AH
17555 /* Non-standard profiling for kernels, which just saves LR then calls
17556 _mcount without worrying about arg saves. The idea is to change
17557 the function prologue as little as possible as it isn't easy to
17558 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17559 if (TARGET_PROFILE_KERNEL)
17560 return;
17561
8480e480
CC
17562 if (DEFAULT_ABI == ABI_AIX)
17563 {
9739c90c
JJ
17564#ifndef NO_PROFILE_COUNTERS
17565# define NO_PROFILE_COUNTERS 0
17566#endif
f676971a 17567 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17568 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17569 else
17570 {
17571 char buf[30];
17572 const char *label_name;
17573 rtx fun;
411707f4 17574
9739c90c
JJ
17575 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17576 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17577 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17578
9739c90c
JJ
17579 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17580 fun, Pmode);
17581 }
8480e480 17582 }
ee890fe2
SS
17583 else if (DEFAULT_ABI == ABI_DARWIN)
17584 {
d5fa86ba 17585 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17586 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17587
17588 /* Be conservative and always set this, at least for now. */
17589 current_function_uses_pic_offset_table = 1;
17590
17591#if TARGET_MACHO
17592 /* For PIC code, set up a stub and collect the caller's address
17593 from r0, which is where the prologue puts it. */
11abc112
MM
17594 if (MACHOPIC_INDIRECT
17595 && current_function_uses_pic_offset_table)
17596 caller_addr_regno = 0;
ee890fe2
SS
17597#endif
17598 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17599 0, VOIDmode, 1,
17600 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17601 }
411707f4
CC
17602}
17603
a4f6c312 17604/* Write function profiler code. */
e165f3f0
RK
17605
17606void
a2369ed3 17607output_function_profiler (FILE *file, int labelno)
e165f3f0 17608{
3daf36a4 17609 char buf[100];
e165f3f0 17610
38c1f2d7 17611 switch (DEFAULT_ABI)
3daf36a4 17612 {
38c1f2d7 17613 default:
37409796 17614 gcc_unreachable ();
38c1f2d7
MM
17615
17616 case ABI_V4:
09eeeacb
AM
17617 if (!TARGET_32BIT)
17618 {
d4ee4d25 17619 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17620 return;
17621 }
ffcfcb5f 17622 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17623 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17624 if (NO_PROFILE_COUNTERS)
17625 {
17626 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17627 reg_names[0], reg_names[1]);
17628 }
17629 else if (TARGET_SECURE_PLT && flag_pic)
17630 {
17631 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17632 reg_names[0], reg_names[1]);
17633 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17634 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17635 reg_names[12], reg_names[12]);
17636 assemble_name (file, buf);
17637 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17638 assemble_name (file, buf);
17639 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17640 }
17641 else if (flag_pic == 1)
38c1f2d7 17642 {
dfdfa60f 17643 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17644 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17645 reg_names[0], reg_names[1]);
17167fd8 17646 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17647 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17648 assemble_name (file, buf);
17167fd8 17649 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17650 }
9ebbca7d 17651 else if (flag_pic > 1)
38c1f2d7 17652 {
71625f3d
AM
17653 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17654 reg_names[0], reg_names[1]);
9ebbca7d 17655 /* Now, we need to get the address of the label. */
71625f3d 17656 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17657 assemble_name (file, buf);
9ebbca7d
GK
17658 fputs ("-.\n1:", file);
17659 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17660 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17661 reg_names[0], reg_names[11]);
17662 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17663 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17664 }
38c1f2d7
MM
17665 else
17666 {
17167fd8 17667 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17668 assemble_name (file, buf);
dfdfa60f 17669 fputs ("@ha\n", file);
71625f3d
AM
17670 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17671 reg_names[0], reg_names[1]);
a260abc9 17672 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17673 assemble_name (file, buf);
17167fd8 17674 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17675 }
17676
50d440bc 17677 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17678 fprintf (file, "\tbl %s%s\n",
17679 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17680 break;
17681
17682 case ABI_AIX:
ee890fe2 17683 case ABI_DARWIN:
ffcfcb5f
AM
17684 if (!TARGET_PROFILE_KERNEL)
17685 {
a3c9585f 17686 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17687 }
17688 else
17689 {
37409796 17690 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17691
17692 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17693 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17694
6de9cd9a 17695 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17696 {
17697 asm_fprintf (file, "\tstd %s,24(%s)\n",
17698 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17699 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17700 asm_fprintf (file, "\tld %s,24(%s)\n",
17701 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17702 }
17703 else
17704 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17705 }
38c1f2d7
MM
17706 break;
17707 }
e165f3f0 17708}
a251ffd0 17709
b54cf83a 17710\f
44cd321e
PS
17711
17712/* The following variable value is the last issued insn. */
17713
17714static rtx last_scheduled_insn;
17715
17716/* The following variable helps to balance issuing of load and
17717 store instructions */
17718
17719static int load_store_pendulum;
17720
b54cf83a
DE
17721/* Power4 load update and store update instructions are cracked into a
17722 load or store and an integer insn which are executed in the same cycle.
17723 Branches have their own dispatch slot which does not count against the
17724 GCC issue rate, but it changes the program flow so there are no other
17725 instructions to issue in this cycle. */
17726
17727static int
f676971a
EC
17728rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17729 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17730 rtx insn, int more)
b54cf83a 17731{
44cd321e 17732 last_scheduled_insn = insn;
b54cf83a
DE
17733 if (GET_CODE (PATTERN (insn)) == USE
17734 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17735 {
17736 cached_can_issue_more = more;
17737 return cached_can_issue_more;
17738 }
17739
17740 if (insn_terminates_group_p (insn, current_group))
17741 {
17742 cached_can_issue_more = 0;
17743 return cached_can_issue_more;
17744 }
b54cf83a 17745
d296e02e
AP
17746 /* If no reservation, but reach here */
17747 if (recog_memoized (insn) < 0)
17748 return more;
17749
ec507f2d 17750 if (rs6000_sched_groups)
b54cf83a 17751 {
cbe26ab8 17752 if (is_microcoded_insn (insn))
44cd321e 17753 cached_can_issue_more = 0;
cbe26ab8 17754 else if (is_cracked_insn (insn))
44cd321e
PS
17755 cached_can_issue_more = more > 2 ? more - 2 : 0;
17756 else
17757 cached_can_issue_more = more - 1;
17758
17759 return cached_can_issue_more;
b54cf83a 17760 }
165b263e 17761
d296e02e
AP
17762 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17763 return 0;
17764
44cd321e
PS
17765 cached_can_issue_more = more - 1;
17766 return cached_can_issue_more;
b54cf83a
DE
17767}
17768
a251ffd0
TG
17769/* Adjust the cost of a scheduling dependency. Return the new cost of
17770 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17771
c237e94a 17772static int
0a4f0294 17773rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17774{
44cd321e 17775 enum attr_type attr_type;
a251ffd0 17776
44cd321e 17777 if (! recog_memoized (insn))
a251ffd0
TG
17778 return 0;
17779
44cd321e 17780 switch (REG_NOTE_KIND (link))
a251ffd0 17781 {
44cd321e
PS
17782 case REG_DEP_TRUE:
17783 {
17784 /* Data dependency; DEP_INSN writes a register that INSN reads
17785 some cycles later. */
17786
17787 /* Separate a load from a narrower, dependent store. */
17788 if (rs6000_sched_groups
17789 && GET_CODE (PATTERN (insn)) == SET
17790 && GET_CODE (PATTERN (dep_insn)) == SET
17791 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17792 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17793 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17794 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17795 return cost + 14;
17796
17797 attr_type = get_attr_type (insn);
17798
17799 switch (attr_type)
17800 {
17801 case TYPE_JMPREG:
17802 /* Tell the first scheduling pass about the latency between
17803 a mtctr and bctr (and mtlr and br/blr). The first
17804 scheduling pass will not know about this latency since
17805 the mtctr instruction, which has the latency associated
17806 to it, will be generated by reload. */
17807 return TARGET_POWER ? 5 : 4;
17808 case TYPE_BRANCH:
17809 /* Leave some extra cycles between a compare and its
17810 dependent branch, to inhibit expensive mispredicts. */
17811 if ((rs6000_cpu_attr == CPU_PPC603
17812 || rs6000_cpu_attr == CPU_PPC604
17813 || rs6000_cpu_attr == CPU_PPC604E
17814 || rs6000_cpu_attr == CPU_PPC620
17815 || rs6000_cpu_attr == CPU_PPC630
17816 || rs6000_cpu_attr == CPU_PPC750
17817 || rs6000_cpu_attr == CPU_PPC7400
17818 || rs6000_cpu_attr == CPU_PPC7450
17819 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17820 || rs6000_cpu_attr == CPU_POWER5
17821 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17822 && recog_memoized (dep_insn)
17823 && (INSN_CODE (dep_insn) >= 0))
982afe02 17824
44cd321e
PS
17825 switch (get_attr_type (dep_insn))
17826 {
17827 case TYPE_CMP:
17828 case TYPE_COMPARE:
17829 case TYPE_DELAYED_COMPARE:
17830 case TYPE_IMUL_COMPARE:
17831 case TYPE_LMUL_COMPARE:
17832 case TYPE_FPCOMPARE:
17833 case TYPE_CR_LOGICAL:
17834 case TYPE_DELAYED_CR:
17835 return cost + 2;
17836 default:
17837 break;
17838 }
17839 break;
17840
17841 case TYPE_STORE:
17842 case TYPE_STORE_U:
17843 case TYPE_STORE_UX:
17844 case TYPE_FPSTORE:
17845 case TYPE_FPSTORE_U:
17846 case TYPE_FPSTORE_UX:
17847 if ((rs6000_cpu == PROCESSOR_POWER6)
17848 && recog_memoized (dep_insn)
17849 && (INSN_CODE (dep_insn) >= 0))
17850 {
17851
17852 if (GET_CODE (PATTERN (insn)) != SET)
17853 /* If this happens, we have to extend this to schedule
17854 optimally. Return default for now. */
17855 return cost;
17856
17857 /* Adjust the cost for the case where the value written
17858 by a fixed point operation is used as the address
17859 gen value on a store. */
17860 switch (get_attr_type (dep_insn))
17861 {
17862 case TYPE_LOAD:
17863 case TYPE_LOAD_U:
17864 case TYPE_LOAD_UX:
17865 case TYPE_CNTLZ:
17866 {
17867 if (! store_data_bypass_p (dep_insn, insn))
17868 return 4;
17869 break;
17870 }
17871 case TYPE_LOAD_EXT:
17872 case TYPE_LOAD_EXT_U:
17873 case TYPE_LOAD_EXT_UX:
17874 case TYPE_VAR_SHIFT_ROTATE:
17875 case TYPE_VAR_DELAYED_COMPARE:
17876 {
17877 if (! store_data_bypass_p (dep_insn, insn))
17878 return 6;
17879 break;
17880 }
17881 case TYPE_INTEGER:
17882 case TYPE_COMPARE:
17883 case TYPE_FAST_COMPARE:
17884 case TYPE_EXTS:
17885 case TYPE_SHIFT:
17886 case TYPE_INSERT_WORD:
17887 case TYPE_INSERT_DWORD:
17888 case TYPE_FPLOAD_U:
17889 case TYPE_FPLOAD_UX:
17890 case TYPE_STORE_U:
17891 case TYPE_STORE_UX:
17892 case TYPE_FPSTORE_U:
17893 case TYPE_FPSTORE_UX:
17894 {
17895 if (! store_data_bypass_p (dep_insn, insn))
17896 return 3;
17897 break;
17898 }
17899 case TYPE_IMUL:
17900 case TYPE_IMUL2:
17901 case TYPE_IMUL3:
17902 case TYPE_LMUL:
17903 case TYPE_IMUL_COMPARE:
17904 case TYPE_LMUL_COMPARE:
17905 {
17906 if (! store_data_bypass_p (dep_insn, insn))
17907 return 17;
17908 break;
17909 }
17910 case TYPE_IDIV:
17911 {
17912 if (! store_data_bypass_p (dep_insn, insn))
17913 return 45;
17914 break;
17915 }
17916 case TYPE_LDIV:
17917 {
17918 if (! store_data_bypass_p (dep_insn, insn))
17919 return 57;
17920 break;
17921 }
17922 default:
17923 break;
17924 }
17925 }
17926 break;
17927
17928 case TYPE_LOAD:
17929 case TYPE_LOAD_U:
17930 case TYPE_LOAD_UX:
17931 case TYPE_LOAD_EXT:
17932 case TYPE_LOAD_EXT_U:
17933 case TYPE_LOAD_EXT_UX:
17934 if ((rs6000_cpu == PROCESSOR_POWER6)
17935 && recog_memoized (dep_insn)
17936 && (INSN_CODE (dep_insn) >= 0))
17937 {
17938
17939 /* Adjust the cost for the case where the value written
17940 by a fixed point instruction is used within the address
17941 gen portion of a subsequent load(u)(x) */
17942 switch (get_attr_type (dep_insn))
17943 {
17944 case TYPE_LOAD:
17945 case TYPE_LOAD_U:
17946 case TYPE_LOAD_UX:
17947 case TYPE_CNTLZ:
17948 {
17949 if (set_to_load_agen (dep_insn, insn))
17950 return 4;
17951 break;
17952 }
17953 case TYPE_LOAD_EXT:
17954 case TYPE_LOAD_EXT_U:
17955 case TYPE_LOAD_EXT_UX:
17956 case TYPE_VAR_SHIFT_ROTATE:
17957 case TYPE_VAR_DELAYED_COMPARE:
17958 {
17959 if (set_to_load_agen (dep_insn, insn))
17960 return 6;
17961 break;
17962 }
17963 case TYPE_INTEGER:
17964 case TYPE_COMPARE:
17965 case TYPE_FAST_COMPARE:
17966 case TYPE_EXTS:
17967 case TYPE_SHIFT:
17968 case TYPE_INSERT_WORD:
17969 case TYPE_INSERT_DWORD:
17970 case TYPE_FPLOAD_U:
17971 case TYPE_FPLOAD_UX:
17972 case TYPE_STORE_U:
17973 case TYPE_STORE_UX:
17974 case TYPE_FPSTORE_U:
17975 case TYPE_FPSTORE_UX:
17976 {
17977 if (set_to_load_agen (dep_insn, insn))
17978 return 3;
17979 break;
17980 }
17981 case TYPE_IMUL:
17982 case TYPE_IMUL2:
17983 case TYPE_IMUL3:
17984 case TYPE_LMUL:
17985 case TYPE_IMUL_COMPARE:
17986 case TYPE_LMUL_COMPARE:
17987 {
17988 if (set_to_load_agen (dep_insn, insn))
17989 return 17;
17990 break;
17991 }
17992 case TYPE_IDIV:
17993 {
17994 if (set_to_load_agen (dep_insn, insn))
17995 return 45;
17996 break;
17997 }
17998 case TYPE_LDIV:
17999 {
18000 if (set_to_load_agen (dep_insn, insn))
18001 return 57;
18002 break;
18003 }
18004 default:
18005 break;
18006 }
18007 }
18008 break;
18009
18010 case TYPE_FPLOAD:
18011 if ((rs6000_cpu == PROCESSOR_POWER6)
18012 && recog_memoized (dep_insn)
18013 && (INSN_CODE (dep_insn) >= 0)
18014 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18015 return 2;
18016
18017 default:
18018 break;
18019 }
c9dbf840 18020
a251ffd0 18021 /* Fall out to return default cost. */
44cd321e
PS
18022 }
18023 break;
18024
18025 case REG_DEP_OUTPUT:
18026 /* Output dependency; DEP_INSN writes a register that INSN writes some
18027 cycles later. */
18028 if ((rs6000_cpu == PROCESSOR_POWER6)
18029 && recog_memoized (dep_insn)
18030 && (INSN_CODE (dep_insn) >= 0))
18031 {
18032 attr_type = get_attr_type (insn);
18033
18034 switch (attr_type)
18035 {
18036 case TYPE_FP:
18037 if (get_attr_type (dep_insn) == TYPE_FP)
18038 return 1;
18039 break;
18040 case TYPE_FPLOAD:
18041 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18042 return 2;
18043 break;
18044 default:
18045 break;
18046 }
18047 }
18048 case REG_DEP_ANTI:
18049 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18050 cycles later. */
18051 return 0;
18052
18053 default:
18054 gcc_unreachable ();
a251ffd0
TG
18055 }
18056
18057 return cost;
18058}
b6c9286a 18059
cbe26ab8 18060/* The function returns a true if INSN is microcoded.
839a4992 18061 Return false otherwise. */
cbe26ab8
DN
18062
18063static bool
18064is_microcoded_insn (rtx insn)
18065{
18066 if (!insn || !INSN_P (insn)
18067 || GET_CODE (PATTERN (insn)) == USE
18068 || GET_CODE (PATTERN (insn)) == CLOBBER)
18069 return false;
18070
d296e02e
AP
18071 if (rs6000_cpu_attr == CPU_CELL)
18072 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18073
ec507f2d 18074 if (rs6000_sched_groups)
cbe26ab8
DN
18075 {
18076 enum attr_type type = get_attr_type (insn);
18077 if (type == TYPE_LOAD_EXT_U
18078 || type == TYPE_LOAD_EXT_UX
18079 || type == TYPE_LOAD_UX
18080 || type == TYPE_STORE_UX
18081 || type == TYPE_MFCR)
c4ad648e 18082 return true;
cbe26ab8
DN
18083 }
18084
18085 return false;
18086}
18087
cbe26ab8
DN
18088/* The function returns true if INSN is cracked into 2 instructions
18089 by the processor (and therefore occupies 2 issue slots). */
18090
18091static bool
18092is_cracked_insn (rtx insn)
18093{
18094 if (!insn || !INSN_P (insn)
18095 || GET_CODE (PATTERN (insn)) == USE
18096 || GET_CODE (PATTERN (insn)) == CLOBBER)
18097 return false;
18098
ec507f2d 18099 if (rs6000_sched_groups)
cbe26ab8
DN
18100 {
18101 enum attr_type type = get_attr_type (insn);
18102 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18103 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18104 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18105 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18106 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18107 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18108 || type == TYPE_IDIV || type == TYPE_LDIV
18109 || type == TYPE_INSERT_WORD)
18110 return true;
cbe26ab8
DN
18111 }
18112
18113 return false;
18114}
18115
18116/* The function returns true if INSN can be issued only from
a3c9585f 18117 the branch slot. */
cbe26ab8
DN
18118
18119static bool
18120is_branch_slot_insn (rtx insn)
18121{
18122 if (!insn || !INSN_P (insn)
18123 || GET_CODE (PATTERN (insn)) == USE
18124 || GET_CODE (PATTERN (insn)) == CLOBBER)
18125 return false;
18126
ec507f2d 18127 if (rs6000_sched_groups)
cbe26ab8
DN
18128 {
18129 enum attr_type type = get_attr_type (insn);
18130 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18131 return true;
cbe26ab8
DN
18132 return false;
18133 }
18134
18135 return false;
18136}
79ae11c4 18137
44cd321e
PS
18138/* The function returns true if out_inst sets a value that is
18139 used in the address generation computation of in_insn */
18140static bool
18141set_to_load_agen (rtx out_insn, rtx in_insn)
18142{
18143 rtx out_set, in_set;
18144
18145 /* For performance reasons, only handle the simple case where
18146 both loads are a single_set. */
18147 out_set = single_set (out_insn);
18148 if (out_set)
18149 {
18150 in_set = single_set (in_insn);
18151 if (in_set)
18152 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18153 }
18154
18155 return false;
18156}
18157
18158/* The function returns true if the target storage location of
18159 out_insn is adjacent to the target storage location of in_insn */
18160/* Return 1 if memory locations are adjacent. */
18161
18162static bool
18163adjacent_mem_locations (rtx insn1, rtx insn2)
18164{
18165
e3a0e200
PB
18166 rtx a = get_store_dest (PATTERN (insn1));
18167 rtx b = get_store_dest (PATTERN (insn2));
18168
44cd321e
PS
18169 if ((GET_CODE (XEXP (a, 0)) == REG
18170 || (GET_CODE (XEXP (a, 0)) == PLUS
18171 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18172 && (GET_CODE (XEXP (b, 0)) == REG
18173 || (GET_CODE (XEXP (b, 0)) == PLUS
18174 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18175 {
f98e8938 18176 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18177 rtx reg0, reg1;
44cd321e
PS
18178
18179 if (GET_CODE (XEXP (a, 0)) == PLUS)
18180 {
18181 reg0 = XEXP (XEXP (a, 0), 0);
18182 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18183 }
18184 else
18185 reg0 = XEXP (a, 0);
18186
18187 if (GET_CODE (XEXP (b, 0)) == PLUS)
18188 {
18189 reg1 = XEXP (XEXP (b, 0), 0);
18190 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18191 }
18192 else
18193 reg1 = XEXP (b, 0);
18194
18195 val_diff = val1 - val0;
18196
18197 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18198 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18199 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18200 }
18201
18202 return false;
18203}
18204
a4f6c312 18205/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18206 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18207 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18208 define this macro if you do not need to adjust the scheduling
18209 priorities of insns. */
bef84347 18210
c237e94a 18211static int
a2369ed3 18212rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18213{
a4f6c312
SS
18214 /* On machines (like the 750) which have asymmetric integer units,
18215 where one integer unit can do multiply and divides and the other
18216 can't, reduce the priority of multiply/divide so it is scheduled
18217 before other integer operations. */
bef84347
VM
18218
18219#if 0
2c3c49de 18220 if (! INSN_P (insn))
bef84347
VM
18221 return priority;
18222
18223 if (GET_CODE (PATTERN (insn)) == USE)
18224 return priority;
18225
18226 switch (rs6000_cpu_attr) {
18227 case CPU_PPC750:
18228 switch (get_attr_type (insn))
18229 {
18230 default:
18231 break;
18232
18233 case TYPE_IMUL:
18234 case TYPE_IDIV:
3cb999d8
DE
18235 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18236 priority, priority);
bef84347
VM
18237 if (priority >= 0 && priority < 0x01000000)
18238 priority >>= 3;
18239 break;
18240 }
18241 }
18242#endif
18243
44cd321e 18244 if (insn_must_be_first_in_group (insn)
79ae11c4 18245 && reload_completed
f676971a 18246 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18247 && rs6000_sched_restricted_insns_priority)
18248 {
18249
c4ad648e
AM
18250 /* Prioritize insns that can be dispatched only in the first
18251 dispatch slot. */
79ae11c4 18252 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18253 /* Attach highest priority to insn. This means that in
18254 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18255 precede 'priority' (critical path) considerations. */
f676971a 18256 return current_sched_info->sched_max_insns_priority;
79ae11c4 18257 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18258 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18259 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18260 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18261 return (priority + 1);
18262 }
79ae11c4 18263
44cd321e
PS
18264 if (rs6000_cpu == PROCESSOR_POWER6
18265 && ((load_store_pendulum == -2 && is_load_insn (insn))
18266 || (load_store_pendulum == 2 && is_store_insn (insn))))
18267 /* Attach highest priority to insn if the scheduler has just issued two
18268 stores and this instruction is a load, or two loads and this instruction
18269 is a store. Power6 wants loads and stores scheduled alternately
18270 when possible */
18271 return current_sched_info->sched_max_insns_priority;
18272
bef84347
VM
18273 return priority;
18274}
18275
d296e02e
AP
18276/* Return true if the instruction is nonpipelined on the Cell. */
18277static bool
18278is_nonpipeline_insn (rtx insn)
18279{
18280 enum attr_type type;
18281 if (!insn || !INSN_P (insn)
18282 || GET_CODE (PATTERN (insn)) == USE
18283 || GET_CODE (PATTERN (insn)) == CLOBBER)
18284 return false;
18285
18286 type = get_attr_type (insn);
18287 if (type == TYPE_IMUL
18288 || type == TYPE_IMUL2
18289 || type == TYPE_IMUL3
18290 || type == TYPE_LMUL
18291 || type == TYPE_IDIV
18292 || type == TYPE_LDIV
18293 || type == TYPE_SDIV
18294 || type == TYPE_DDIV
18295 || type == TYPE_SSQRT
18296 || type == TYPE_DSQRT
18297 || type == TYPE_MFCR
18298 || type == TYPE_MFCRF
18299 || type == TYPE_MFJMPR)
18300 {
18301 return true;
18302 }
18303 return false;
18304}
18305
18306
a4f6c312
SS
18307/* Return how many instructions the machine can issue per cycle. */
18308
c237e94a 18309static int
863d938c 18310rs6000_issue_rate (void)
b6c9286a 18311{
3317bab1
DE
18312 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18313 if (!reload_completed)
18314 return 1;
18315
b6c9286a 18316 switch (rs6000_cpu_attr) {
3cb999d8
DE
18317 case CPU_RIOS1: /* ? */
18318 case CPU_RS64A:
18319 case CPU_PPC601: /* ? */
ed947a96 18320 case CPU_PPC7450:
3cb999d8 18321 return 3;
b54cf83a 18322 case CPU_PPC440:
b6c9286a 18323 case CPU_PPC603:
bef84347 18324 case CPU_PPC750:
ed947a96 18325 case CPU_PPC7400:
be12c2b0 18326 case CPU_PPC8540:
d296e02e 18327 case CPU_CELL:
f676971a 18328 return 2;
3cb999d8 18329 case CPU_RIOS2:
b6c9286a 18330 case CPU_PPC604:
19684119 18331 case CPU_PPC604E:
b6c9286a 18332 case CPU_PPC620:
3cb999d8 18333 case CPU_PPC630:
b6c9286a 18334 return 4;
cbe26ab8 18335 case CPU_POWER4:
ec507f2d 18336 case CPU_POWER5:
44cd321e 18337 case CPU_POWER6:
cbe26ab8 18338 return 5;
b6c9286a
MM
18339 default:
18340 return 1;
18341 }
18342}
18343
be12c2b0
VM
18344/* Return how many instructions to look ahead for better insn
18345 scheduling. */
18346
18347static int
863d938c 18348rs6000_use_sched_lookahead (void)
be12c2b0
VM
18349{
18350 if (rs6000_cpu_attr == CPU_PPC8540)
18351 return 4;
d296e02e
AP
18352 if (rs6000_cpu_attr == CPU_CELL)
18353 return (reload_completed ? 8 : 0);
be12c2b0
VM
18354 return 0;
18355}
18356
d296e02e
AP
18357/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18358static int
18359rs6000_use_sched_lookahead_guard (rtx insn)
18360{
18361 if (rs6000_cpu_attr != CPU_CELL)
18362 return 1;
18363
18364 if (insn == NULL_RTX || !INSN_P (insn))
18365 abort ();
982afe02 18366
d296e02e
AP
18367 if (!reload_completed
18368 || is_nonpipeline_insn (insn)
18369 || is_microcoded_insn (insn))
18370 return 0;
18371
18372 return 1;
18373}
18374
569fa502
DN
18375/* Determine is PAT refers to memory. */
18376
18377static bool
18378is_mem_ref (rtx pat)
18379{
18380 const char * fmt;
18381 int i, j;
18382 bool ret = false;
18383
1de59bbd
DE
18384 /* stack_tie does not produce any real memory traffic. */
18385 if (GET_CODE (pat) == UNSPEC
18386 && XINT (pat, 1) == UNSPEC_TIE)
18387 return false;
18388
569fa502
DN
18389 if (GET_CODE (pat) == MEM)
18390 return true;
18391
18392 /* Recursively process the pattern. */
18393 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18394
18395 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18396 {
18397 if (fmt[i] == 'e')
18398 ret |= is_mem_ref (XEXP (pat, i));
18399 else if (fmt[i] == 'E')
18400 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18401 ret |= is_mem_ref (XVECEXP (pat, i, j));
18402 }
18403
18404 return ret;
18405}
18406
18407/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18408
569fa502
DN
18409static bool
18410is_load_insn1 (rtx pat)
18411{
18412 if (!pat || pat == NULL_RTX)
18413 return false;
18414
18415 if (GET_CODE (pat) == SET)
18416 return is_mem_ref (SET_SRC (pat));
18417
18418 if (GET_CODE (pat) == PARALLEL)
18419 {
18420 int i;
18421
18422 for (i = 0; i < XVECLEN (pat, 0); i++)
18423 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18424 return true;
18425 }
18426
18427 return false;
18428}
18429
18430/* Determine if INSN loads from memory. */
18431
18432static bool
18433is_load_insn (rtx insn)
18434{
18435 if (!insn || !INSN_P (insn))
18436 return false;
18437
18438 if (GET_CODE (insn) == CALL_INSN)
18439 return false;
18440
18441 return is_load_insn1 (PATTERN (insn));
18442}
18443
18444/* Determine if PAT is a PATTERN of a store insn. */
18445
18446static bool
18447is_store_insn1 (rtx pat)
18448{
18449 if (!pat || pat == NULL_RTX)
18450 return false;
18451
18452 if (GET_CODE (pat) == SET)
18453 return is_mem_ref (SET_DEST (pat));
18454
18455 if (GET_CODE (pat) == PARALLEL)
18456 {
18457 int i;
18458
18459 for (i = 0; i < XVECLEN (pat, 0); i++)
18460 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18461 return true;
18462 }
18463
18464 return false;
18465}
18466
18467/* Determine if INSN stores to memory. */
18468
18469static bool
18470is_store_insn (rtx insn)
18471{
18472 if (!insn || !INSN_P (insn))
18473 return false;
18474
18475 return is_store_insn1 (PATTERN (insn));
18476}
18477
e3a0e200
PB
18478/* Return the dest of a store insn. */
18479
18480static rtx
18481get_store_dest (rtx pat)
18482{
18483 gcc_assert (is_store_insn1 (pat));
18484
18485 if (GET_CODE (pat) == SET)
18486 return SET_DEST (pat);
18487 else if (GET_CODE (pat) == PARALLEL)
18488 {
18489 int i;
18490
18491 for (i = 0; i < XVECLEN (pat, 0); i++)
18492 {
18493 rtx inner_pat = XVECEXP (pat, 0, i);
18494 if (GET_CODE (inner_pat) == SET
18495 && is_mem_ref (SET_DEST (inner_pat)))
18496 return inner_pat;
18497 }
18498 }
18499 /* We shouldn't get here, because we should have either a simple
18500 store insn or a store with update which are covered above. */
18501 gcc_unreachable();
18502}
18503
569fa502
DN
18504/* Returns whether the dependence between INSN and NEXT is considered
18505 costly by the given target. */
18506
18507static bool
b198261f 18508rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18509{
b198261f
MK
18510 rtx insn;
18511 rtx next;
18512
aabcd309 18513 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18514 allow all dependent insns in the same group.
569fa502
DN
18515 This is the most aggressive option. */
18516 if (rs6000_sched_costly_dep == no_dep_costly)
18517 return false;
18518
f676971a 18519 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18520 do not allow dependent instructions in the same group.
18521 This is the most conservative option. */
18522 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18523 return true;
569fa502 18524
b198261f
MK
18525 insn = DEP_PRO (dep);
18526 next = DEP_CON (dep);
18527
f676971a
EC
18528 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18529 && is_load_insn (next)
569fa502
DN
18530 && is_store_insn (insn))
18531 /* Prevent load after store in the same group. */
18532 return true;
18533
18534 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18535 && is_load_insn (next)
569fa502 18536 && is_store_insn (insn)
e2f6ff94 18537 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18538 /* Prevent load after store in the same group if it is a true
18539 dependence. */
569fa502 18540 return true;
f676971a
EC
18541
18542 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18543 and will not be scheduled in the same group. */
18544 if (rs6000_sched_costly_dep <= max_dep_latency
18545 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18546 return true;
18547
18548 return false;
18549}
18550
f676971a 18551/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18552 skipping any "non-active" insns - insns that will not actually occupy
18553 an issue slot. Return NULL_RTX if such an insn is not found. */
18554
18555static rtx
18556get_next_active_insn (rtx insn, rtx tail)
18557{
f489aff8 18558 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18559 return NULL_RTX;
18560
f489aff8 18561 while (1)
cbe26ab8 18562 {
f489aff8
AM
18563 insn = NEXT_INSN (insn);
18564 if (insn == NULL_RTX || insn == tail)
18565 return NULL_RTX;
cbe26ab8 18566
f489aff8
AM
18567 if (CALL_P (insn)
18568 || JUMP_P (insn)
18569 || (NONJUMP_INSN_P (insn)
18570 && GET_CODE (PATTERN (insn)) != USE
18571 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18572 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18573 break;
18574 }
18575 return insn;
cbe26ab8
DN
18576}
18577
44cd321e
PS
18578/* We are about to begin issuing insns for this clock cycle. */
18579
18580static int
18581rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18582 rtx *ready ATTRIBUTE_UNUSED,
18583 int *pn_ready ATTRIBUTE_UNUSED,
18584 int clock_var ATTRIBUTE_UNUSED)
18585{
d296e02e
AP
18586 int n_ready = *pn_ready;
18587
44cd321e
PS
18588 if (sched_verbose)
18589 fprintf (dump, "// rs6000_sched_reorder :\n");
18590
d296e02e
AP
18591 /* Reorder the ready list, if the second to last ready insn
18592 is a nonepipeline insn. */
18593 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18594 {
18595 if (is_nonpipeline_insn (ready[n_ready - 1])
18596 && (recog_memoized (ready[n_ready - 2]) > 0))
18597 /* Simply swap first two insns. */
18598 {
18599 rtx tmp = ready[n_ready - 1];
18600 ready[n_ready - 1] = ready[n_ready - 2];
18601 ready[n_ready - 2] = tmp;
18602 }
18603 }
18604
44cd321e
PS
18605 if (rs6000_cpu == PROCESSOR_POWER6)
18606 load_store_pendulum = 0;
18607
18608 return rs6000_issue_rate ();
18609}
18610
18611/* Like rs6000_sched_reorder, but called after issuing each insn. */
18612
18613static int
18614rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18615 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18616{
18617 if (sched_verbose)
18618 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18619
18620 /* For Power6, we need to handle some special cases to try and keep the
18621 store queue from overflowing and triggering expensive flushes.
18622
18623 This code monitors how load and store instructions are being issued
18624 and skews the ready list one way or the other to increase the likelihood
18625 that a desired instruction is issued at the proper time.
18626
18627 A couple of things are done. First, we maintain a "load_store_pendulum"
18628 to track the current state of load/store issue.
18629
18630 - If the pendulum is at zero, then no loads or stores have been
18631 issued in the current cycle so we do nothing.
18632
18633 - If the pendulum is 1, then a single load has been issued in this
18634 cycle and we attempt to locate another load in the ready list to
18635 issue with it.
18636
2f8e468b 18637 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18638 issued in this cycle, so we increase the priority of the first load
18639 in the ready list to increase it's likelihood of being chosen first
18640 in the next cycle.
18641
18642 - If the pendulum is -1, then a single store has been issued in this
18643 cycle and we attempt to locate another store in the ready list to
18644 issue with it, preferring a store to an adjacent memory location to
18645 facilitate store pairing in the store queue.
18646
18647 - If the pendulum is 2, then two loads have already been
18648 issued in this cycle, so we increase the priority of the first store
18649 in the ready list to increase it's likelihood of being chosen first
18650 in the next cycle.
18651
18652 - If the pendulum < -2 or > 2, then do nothing.
18653
18654 Note: This code covers the most common scenarios. There exist non
18655 load/store instructions which make use of the LSU and which
18656 would need to be accounted for to strictly model the behavior
18657 of the machine. Those instructions are currently unaccounted
18658 for to help minimize compile time overhead of this code.
18659 */
18660 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18661 {
18662 int pos;
18663 int i;
18664 rtx tmp;
18665
18666 if (is_store_insn (last_scheduled_insn))
18667 /* Issuing a store, swing the load_store_pendulum to the left */
18668 load_store_pendulum--;
18669 else if (is_load_insn (last_scheduled_insn))
18670 /* Issuing a load, swing the load_store_pendulum to the right */
18671 load_store_pendulum++;
18672 else
18673 return cached_can_issue_more;
18674
18675 /* If the pendulum is balanced, or there is only one instruction on
18676 the ready list, then all is well, so return. */
18677 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18678 return cached_can_issue_more;
18679
18680 if (load_store_pendulum == 1)
18681 {
18682 /* A load has been issued in this cycle. Scan the ready list
18683 for another load to issue with it */
18684 pos = *pn_ready-1;
18685
18686 while (pos >= 0)
18687 {
18688 if (is_load_insn (ready[pos]))
18689 {
18690 /* Found a load. Move it to the head of the ready list,
18691 and adjust it's priority so that it is more likely to
18692 stay there */
18693 tmp = ready[pos];
18694 for (i=pos; i<*pn_ready-1; i++)
18695 ready[i] = ready[i + 1];
18696 ready[*pn_ready-1] = tmp;
18697 if INSN_PRIORITY_KNOWN (tmp)
18698 INSN_PRIORITY (tmp)++;
18699 break;
18700 }
18701 pos--;
18702 }
18703 }
18704 else if (load_store_pendulum == -2)
18705 {
18706 /* Two stores have been issued in this cycle. Increase the
18707 priority of the first load in the ready list to favor it for
18708 issuing in the next cycle. */
18709 pos = *pn_ready-1;
18710
18711 while (pos >= 0)
18712 {
18713 if (is_load_insn (ready[pos])
18714 && INSN_PRIORITY_KNOWN (ready[pos]))
18715 {
18716 INSN_PRIORITY (ready[pos])++;
18717
18718 /* Adjust the pendulum to account for the fact that a load
18719 was found and increased in priority. This is to prevent
18720 increasing the priority of multiple loads */
18721 load_store_pendulum--;
18722
18723 break;
18724 }
18725 pos--;
18726 }
18727 }
18728 else if (load_store_pendulum == -1)
18729 {
18730 /* A store has been issued in this cycle. Scan the ready list for
18731 another store to issue with it, preferring a store to an adjacent
18732 memory location */
18733 int first_store_pos = -1;
18734
18735 pos = *pn_ready-1;
18736
18737 while (pos >= 0)
18738 {
18739 if (is_store_insn (ready[pos]))
18740 {
18741 /* Maintain the index of the first store found on the
18742 list */
18743 if (first_store_pos == -1)
18744 first_store_pos = pos;
18745
18746 if (is_store_insn (last_scheduled_insn)
18747 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18748 {
18749 /* Found an adjacent store. Move it to the head of the
18750 ready list, and adjust it's priority so that it is
18751 more likely to stay there */
18752 tmp = ready[pos];
18753 for (i=pos; i<*pn_ready-1; i++)
18754 ready[i] = ready[i + 1];
18755 ready[*pn_ready-1] = tmp;
18756 if INSN_PRIORITY_KNOWN (tmp)
18757 INSN_PRIORITY (tmp)++;
18758 first_store_pos = -1;
18759
18760 break;
18761 };
18762 }
18763 pos--;
18764 }
18765
18766 if (first_store_pos >= 0)
18767 {
18768 /* An adjacent store wasn't found, but a non-adjacent store was,
18769 so move the non-adjacent store to the front of the ready
18770 list, and adjust its priority so that it is more likely to
18771 stay there. */
18772 tmp = ready[first_store_pos];
18773 for (i=first_store_pos; i<*pn_ready-1; i++)
18774 ready[i] = ready[i + 1];
18775 ready[*pn_ready-1] = tmp;
18776 if INSN_PRIORITY_KNOWN (tmp)
18777 INSN_PRIORITY (tmp)++;
18778 }
18779 }
18780 else if (load_store_pendulum == 2)
18781 {
18782 /* Two loads have been issued in this cycle. Increase the priority
18783 of the first store in the ready list to favor it for issuing in
18784 the next cycle. */
18785 pos = *pn_ready-1;
18786
18787 while (pos >= 0)
18788 {
18789 if (is_store_insn (ready[pos])
18790 && INSN_PRIORITY_KNOWN (ready[pos]))
18791 {
18792 INSN_PRIORITY (ready[pos])++;
18793
18794 /* Adjust the pendulum to account for the fact that a store
18795 was found and increased in priority. This is to prevent
18796 increasing the priority of multiple stores */
18797 load_store_pendulum++;
18798
18799 break;
18800 }
18801 pos--;
18802 }
18803 }
18804 }
18805
18806 return cached_can_issue_more;
18807}
18808
839a4992 18809/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18810 of group WHICH_GROUP.
18811
18812 If WHICH_GROUP == current_group, this function will return true if INSN
18813 causes the termination of the current group (i.e, the dispatch group to
18814 which INSN belongs). This means that INSN will be the last insn in the
18815 group it belongs to.
18816
18817 If WHICH_GROUP == previous_group, this function will return true if INSN
18818 causes the termination of the previous group (i.e, the dispatch group that
18819 precedes the group to which INSN belongs). This means that INSN will be
18820 the first insn in the group it belongs to). */
18821
18822static bool
18823insn_terminates_group_p (rtx insn, enum group_termination which_group)
18824{
44cd321e 18825 bool first, last;
cbe26ab8
DN
18826
18827 if (! insn)
18828 return false;
569fa502 18829
44cd321e
PS
18830 first = insn_must_be_first_in_group (insn);
18831 last = insn_must_be_last_in_group (insn);
cbe26ab8 18832
44cd321e 18833 if (first && last)
cbe26ab8
DN
18834 return true;
18835
18836 if (which_group == current_group)
44cd321e 18837 return last;
cbe26ab8 18838 else if (which_group == previous_group)
44cd321e
PS
18839 return first;
18840
18841 return false;
18842}
18843
18844
18845static bool
18846insn_must_be_first_in_group (rtx insn)
18847{
18848 enum attr_type type;
18849
18850 if (!insn
18851 || insn == NULL_RTX
18852 || GET_CODE (insn) == NOTE
18853 || GET_CODE (PATTERN (insn)) == USE
18854 || GET_CODE (PATTERN (insn)) == CLOBBER)
18855 return false;
18856
18857 switch (rs6000_cpu)
cbe26ab8 18858 {
44cd321e
PS
18859 case PROCESSOR_POWER5:
18860 if (is_cracked_insn (insn))
18861 return true;
18862 case PROCESSOR_POWER4:
18863 if (is_microcoded_insn (insn))
18864 return true;
18865
18866 if (!rs6000_sched_groups)
18867 return false;
18868
18869 type = get_attr_type (insn);
18870
18871 switch (type)
18872 {
18873 case TYPE_MFCR:
18874 case TYPE_MFCRF:
18875 case TYPE_MTCR:
18876 case TYPE_DELAYED_CR:
18877 case TYPE_CR_LOGICAL:
18878 case TYPE_MTJMPR:
18879 case TYPE_MFJMPR:
18880 case TYPE_IDIV:
18881 case TYPE_LDIV:
18882 case TYPE_LOAD_L:
18883 case TYPE_STORE_C:
18884 case TYPE_ISYNC:
18885 case TYPE_SYNC:
18886 return true;
18887 default:
18888 break;
18889 }
18890 break;
18891 case PROCESSOR_POWER6:
18892 type = get_attr_type (insn);
18893
18894 switch (type)
18895 {
18896 case TYPE_INSERT_DWORD:
18897 case TYPE_EXTS:
18898 case TYPE_CNTLZ:
18899 case TYPE_SHIFT:
18900 case TYPE_VAR_SHIFT_ROTATE:
18901 case TYPE_TRAP:
18902 case TYPE_IMUL:
18903 case TYPE_IMUL2:
18904 case TYPE_IMUL3:
18905 case TYPE_LMUL:
18906 case TYPE_IDIV:
18907 case TYPE_INSERT_WORD:
18908 case TYPE_DELAYED_COMPARE:
18909 case TYPE_IMUL_COMPARE:
18910 case TYPE_LMUL_COMPARE:
18911 case TYPE_FPCOMPARE:
18912 case TYPE_MFCR:
18913 case TYPE_MTCR:
18914 case TYPE_MFJMPR:
18915 case TYPE_MTJMPR:
18916 case TYPE_ISYNC:
18917 case TYPE_SYNC:
18918 case TYPE_LOAD_L:
18919 case TYPE_STORE_C:
18920 case TYPE_LOAD_U:
18921 case TYPE_LOAD_UX:
18922 case TYPE_LOAD_EXT_UX:
18923 case TYPE_STORE_U:
18924 case TYPE_STORE_UX:
18925 case TYPE_FPLOAD_U:
18926 case TYPE_FPLOAD_UX:
18927 case TYPE_FPSTORE_U:
18928 case TYPE_FPSTORE_UX:
18929 return true;
18930 default:
18931 break;
18932 }
18933 break;
18934 default:
18935 break;
18936 }
18937
18938 return false;
18939}
18940
18941static bool
18942insn_must_be_last_in_group (rtx insn)
18943{
18944 enum attr_type type;
18945
18946 if (!insn
18947 || insn == NULL_RTX
18948 || GET_CODE (insn) == NOTE
18949 || GET_CODE (PATTERN (insn)) == USE
18950 || GET_CODE (PATTERN (insn)) == CLOBBER)
18951 return false;
18952
18953 switch (rs6000_cpu) {
18954 case PROCESSOR_POWER4:
18955 case PROCESSOR_POWER5:
18956 if (is_microcoded_insn (insn))
18957 return true;
18958
18959 if (is_branch_slot_insn (insn))
18960 return true;
18961
18962 break;
18963 case PROCESSOR_POWER6:
18964 type = get_attr_type (insn);
18965
18966 switch (type)
18967 {
18968 case TYPE_EXTS:
18969 case TYPE_CNTLZ:
18970 case TYPE_SHIFT:
18971 case TYPE_VAR_SHIFT_ROTATE:
18972 case TYPE_TRAP:
18973 case TYPE_IMUL:
18974 case TYPE_IMUL2:
18975 case TYPE_IMUL3:
18976 case TYPE_LMUL:
18977 case TYPE_IDIV:
18978 case TYPE_DELAYED_COMPARE:
18979 case TYPE_IMUL_COMPARE:
18980 case TYPE_LMUL_COMPARE:
18981 case TYPE_FPCOMPARE:
18982 case TYPE_MFCR:
18983 case TYPE_MTCR:
18984 case TYPE_MFJMPR:
18985 case TYPE_MTJMPR:
18986 case TYPE_ISYNC:
18987 case TYPE_SYNC:
18988 case TYPE_LOAD_L:
18989 case TYPE_STORE_C:
18990 return true;
18991 default:
18992 break;
cbe26ab8 18993 }
44cd321e
PS
18994 break;
18995 default:
18996 break;
18997 }
cbe26ab8
DN
18998
18999 return false;
19000}
19001
839a4992 19002/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19003 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19004
19005static bool
19006is_costly_group (rtx *group_insns, rtx next_insn)
19007{
19008 int i;
cbe26ab8
DN
19009 int issue_rate = rs6000_issue_rate ();
19010
19011 for (i = 0; i < issue_rate; i++)
19012 {
e2f6ff94
MK
19013 sd_iterator_def sd_it;
19014 dep_t dep;
cbe26ab8 19015 rtx insn = group_insns[i];
b198261f 19016
cbe26ab8 19017 if (!insn)
c4ad648e 19018 continue;
b198261f 19019
e2f6ff94 19020 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19021 {
b198261f
MK
19022 rtx next = DEP_CON (dep);
19023
19024 if (next == next_insn
19025 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19026 return true;
c4ad648e 19027 }
cbe26ab8
DN
19028 }
19029
19030 return false;
19031}
19032
f676971a 19033/* Utility of the function redefine_groups.
cbe26ab8
DN
19034 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19035 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19036 to keep it "far" (in a separate group) from GROUP_INSNS, following
19037 one of the following schemes, depending on the value of the flag
19038 -minsert_sched_nops = X:
19039 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19040 in order to force NEXT_INSN into a separate group.
f676971a
EC
19041 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19042 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19043 insertion (has a group just ended, how many vacant issue slots remain in the
19044 last group, and how many dispatch groups were encountered so far). */
19045
f676971a 19046static int
c4ad648e
AM
19047force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19048 rtx next_insn, bool *group_end, int can_issue_more,
19049 int *group_count)
cbe26ab8
DN
19050{
19051 rtx nop;
19052 bool force;
19053 int issue_rate = rs6000_issue_rate ();
19054 bool end = *group_end;
19055 int i;
19056
19057 if (next_insn == NULL_RTX)
19058 return can_issue_more;
19059
19060 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19061 return can_issue_more;
19062
19063 force = is_costly_group (group_insns, next_insn);
19064 if (!force)
19065 return can_issue_more;
19066
19067 if (sched_verbose > 6)
19068 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19069 *group_count ,can_issue_more);
cbe26ab8
DN
19070
19071 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19072 {
19073 if (*group_end)
c4ad648e 19074 can_issue_more = 0;
cbe26ab8
DN
19075
19076 /* Since only a branch can be issued in the last issue_slot, it is
19077 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19078 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19079 in this case the last nop will start a new group and the branch
19080 will be forced to the new group. */
cbe26ab8 19081 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19082 can_issue_more--;
cbe26ab8
DN
19083
19084 while (can_issue_more > 0)
c4ad648e 19085 {
9390387d 19086 nop = gen_nop ();
c4ad648e
AM
19087 emit_insn_before (nop, next_insn);
19088 can_issue_more--;
19089 }
cbe26ab8
DN
19090
19091 *group_end = true;
19092 return 0;
f676971a 19093 }
cbe26ab8
DN
19094
19095 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19096 {
19097 int n_nops = rs6000_sched_insert_nops;
19098
f676971a 19099 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19100 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19101 if (can_issue_more == 0)
c4ad648e 19102 can_issue_more = issue_rate;
cbe26ab8
DN
19103 can_issue_more--;
19104 if (can_issue_more == 0)
c4ad648e
AM
19105 {
19106 can_issue_more = issue_rate - 1;
19107 (*group_count)++;
19108 end = true;
19109 for (i = 0; i < issue_rate; i++)
19110 {
19111 group_insns[i] = 0;
19112 }
19113 }
cbe26ab8
DN
19114
19115 while (n_nops > 0)
c4ad648e
AM
19116 {
19117 nop = gen_nop ();
19118 emit_insn_before (nop, next_insn);
19119 if (can_issue_more == issue_rate - 1) /* new group begins */
19120 end = false;
19121 can_issue_more--;
19122 if (can_issue_more == 0)
19123 {
19124 can_issue_more = issue_rate - 1;
19125 (*group_count)++;
19126 end = true;
19127 for (i = 0; i < issue_rate; i++)
19128 {
19129 group_insns[i] = 0;
19130 }
19131 }
19132 n_nops--;
19133 }
cbe26ab8
DN
19134
19135 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19136 can_issue_more++;
cbe26ab8 19137
c4ad648e
AM
19138 /* Is next_insn going to start a new group? */
19139 *group_end
19140 = (end
cbe26ab8
DN
19141 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19142 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19143 || (can_issue_more < issue_rate &&
c4ad648e 19144 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19145 if (*group_end && end)
c4ad648e 19146 (*group_count)--;
cbe26ab8
DN
19147
19148 if (sched_verbose > 6)
c4ad648e
AM
19149 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19150 *group_count, can_issue_more);
f676971a
EC
19151 return can_issue_more;
19152 }
cbe26ab8
DN
19153
19154 return can_issue_more;
19155}
19156
19157/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19158 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19159 form in practice. It tries to achieve this synchronization by forcing the
19160 estimated processor grouping on the compiler (as opposed to the function
19161 'pad_goups' which tries to force the scheduler's grouping on the processor).
19162
19163 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19164 examines the (estimated) dispatch groups that will be formed by the processor
19165 dispatcher. It marks these group boundaries to reflect the estimated
19166 processor grouping, overriding the grouping that the scheduler had marked.
19167 Depending on the value of the flag '-minsert-sched-nops' this function can
19168 force certain insns into separate groups or force a certain distance between
19169 them by inserting nops, for example, if there exists a "costly dependence"
19170 between the insns.
19171
19172 The function estimates the group boundaries that the processor will form as
0fa2e4df 19173 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19174 each insn. A subsequent insn will start a new group if one of the following
19175 4 cases applies:
19176 - no more vacant issue slots remain in the current dispatch group.
19177 - only the last issue slot, which is the branch slot, is vacant, but the next
19178 insn is not a branch.
19179 - only the last 2 or less issue slots, including the branch slot, are vacant,
19180 which means that a cracked insn (which occupies two issue slots) can't be
19181 issued in this group.
f676971a 19182 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19183 start a new group. */
19184
19185static int
19186redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19187{
19188 rtx insn, next_insn;
19189 int issue_rate;
19190 int can_issue_more;
19191 int slot, i;
19192 bool group_end;
19193 int group_count = 0;
19194 rtx *group_insns;
19195
19196 /* Initialize. */
19197 issue_rate = rs6000_issue_rate ();
19198 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19199 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19200 {
19201 group_insns[i] = 0;
19202 }
19203 can_issue_more = issue_rate;
19204 slot = 0;
19205 insn = get_next_active_insn (prev_head_insn, tail);
19206 group_end = false;
19207
19208 while (insn != NULL_RTX)
19209 {
19210 slot = (issue_rate - can_issue_more);
19211 group_insns[slot] = insn;
19212 can_issue_more =
c4ad648e 19213 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19214 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19215 can_issue_more = 0;
cbe26ab8
DN
19216
19217 next_insn = get_next_active_insn (insn, tail);
19218 if (next_insn == NULL_RTX)
c4ad648e 19219 return group_count + 1;
cbe26ab8 19220
c4ad648e
AM
19221 /* Is next_insn going to start a new group? */
19222 group_end
19223 = (can_issue_more == 0
19224 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19225 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19226 || (can_issue_more < issue_rate &&
19227 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19228
f676971a 19229 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19230 next_insn, &group_end, can_issue_more,
19231 &group_count);
cbe26ab8
DN
19232
19233 if (group_end)
c4ad648e
AM
19234 {
19235 group_count++;
19236 can_issue_more = 0;
19237 for (i = 0; i < issue_rate; i++)
19238 {
19239 group_insns[i] = 0;
19240 }
19241 }
cbe26ab8
DN
19242
19243 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19244 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19245 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19246 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19247
19248 insn = next_insn;
19249 if (can_issue_more == 0)
c4ad648e
AM
19250 can_issue_more = issue_rate;
19251 } /* while */
cbe26ab8
DN
19252
19253 return group_count;
19254}
19255
19256/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19257 dispatch group boundaries that the scheduler had marked. Pad with nops
19258 any dispatch groups which have vacant issue slots, in order to force the
19259 scheduler's grouping on the processor dispatcher. The function
19260 returns the number of dispatch groups found. */
19261
19262static int
19263pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19264{
19265 rtx insn, next_insn;
19266 rtx nop;
19267 int issue_rate;
19268 int can_issue_more;
19269 int group_end;
19270 int group_count = 0;
19271
19272 /* Initialize issue_rate. */
19273 issue_rate = rs6000_issue_rate ();
19274 can_issue_more = issue_rate;
19275
19276 insn = get_next_active_insn (prev_head_insn, tail);
19277 next_insn = get_next_active_insn (insn, tail);
19278
19279 while (insn != NULL_RTX)
19280 {
19281 can_issue_more =
19282 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19283
19284 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19285
19286 if (next_insn == NULL_RTX)
c4ad648e 19287 break;
cbe26ab8
DN
19288
19289 if (group_end)
c4ad648e
AM
19290 {
19291 /* If the scheduler had marked group termination at this location
19292 (between insn and next_indn), and neither insn nor next_insn will
19293 force group termination, pad the group with nops to force group
19294 termination. */
19295 if (can_issue_more
19296 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19297 && !insn_terminates_group_p (insn, current_group)
19298 && !insn_terminates_group_p (next_insn, previous_group))
19299 {
9390387d 19300 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19301 can_issue_more--;
19302
19303 while (can_issue_more)
19304 {
19305 nop = gen_nop ();
19306 emit_insn_before (nop, next_insn);
19307 can_issue_more--;
19308 }
19309 }
19310
19311 can_issue_more = issue_rate;
19312 group_count++;
19313 }
cbe26ab8
DN
19314
19315 insn = next_insn;
19316 next_insn = get_next_active_insn (insn, tail);
19317 }
19318
19319 return group_count;
19320}
19321
44cd321e
PS
19322/* We're beginning a new block. Initialize data structures as necessary. */
19323
19324static void
19325rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19326 int sched_verbose ATTRIBUTE_UNUSED,
19327 int max_ready ATTRIBUTE_UNUSED)
982afe02 19328{
44cd321e
PS
19329 last_scheduled_insn = NULL_RTX;
19330 load_store_pendulum = 0;
19331}
19332
cbe26ab8
DN
19333/* The following function is called at the end of scheduling BB.
19334 After reload, it inserts nops at insn group bundling. */
19335
19336static void
38f391a5 19337rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19338{
19339 int n_groups;
19340
19341 if (sched_verbose)
19342 fprintf (dump, "=== Finishing schedule.\n");
19343
ec507f2d 19344 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19345 {
19346 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19347 return;
cbe26ab8
DN
19348
19349 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19350 n_groups = pad_groups (dump, sched_verbose,
19351 current_sched_info->prev_head,
19352 current_sched_info->next_tail);
cbe26ab8 19353 else
c4ad648e
AM
19354 n_groups = redefine_groups (dump, sched_verbose,
19355 current_sched_info->prev_head,
19356 current_sched_info->next_tail);
cbe26ab8
DN
19357
19358 if (sched_verbose >= 6)
19359 {
19360 fprintf (dump, "ngroups = %d\n", n_groups);
19361 print_rtl (dump, current_sched_info->prev_head);
19362 fprintf (dump, "Done finish_sched\n");
19363 }
19364 }
19365}
b6c9286a 19366\f
b6c9286a
MM
19367/* Length in units of the trampoline for entering a nested function. */
19368
19369int
863d938c 19370rs6000_trampoline_size (void)
b6c9286a
MM
19371{
19372 int ret = 0;
19373
19374 switch (DEFAULT_ABI)
19375 {
19376 default:
37409796 19377 gcc_unreachable ();
b6c9286a
MM
19378
19379 case ABI_AIX:
8f802bfb 19380 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19381 break;
19382
4dabc42d 19383 case ABI_DARWIN:
b6c9286a 19384 case ABI_V4:
03a7e1a5 19385 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19386 break;
b6c9286a
MM
19387 }
19388
19389 return ret;
19390}
19391
19392/* Emit RTL insns to initialize the variable parts of a trampoline.
19393 FNADDR is an RTX for the address of the function's pure code.
19394 CXT is an RTX for the static chain value for the function. */
19395
19396void
a2369ed3 19397rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19398{
8bd04c56 19399 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19400 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19401
19402 switch (DEFAULT_ABI)
19403 {
19404 default:
37409796 19405 gcc_unreachable ();
b6c9286a 19406
8bd04c56 19407/* Macros to shorten the code expansions below. */
9613eaff 19408#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19409#define MEM_PLUS(addr,offset) \
9613eaff 19410 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19411
b6c9286a
MM
19412 /* Under AIX, just build the 3 word function descriptor */
19413 case ABI_AIX:
8bd04c56 19414 {
9613eaff
SH
19415 rtx fn_reg = gen_reg_rtx (Pmode);
19416 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19417 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19418 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19419 emit_move_insn (MEM_DEREF (addr), fn_reg);
19420 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19421 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19422 }
b6c9286a
MM
19423 break;
19424
4dabc42d
TC
19425 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19426 case ABI_DARWIN:
b6c9286a 19427 case ABI_V4:
9613eaff 19428 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19429 FALSE, VOIDmode, 4,
9613eaff 19430 addr, Pmode,
eaf1bcf1 19431 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19432 fnaddr, Pmode,
19433 ctx_reg, Pmode);
b6c9286a 19434 break;
b6c9286a
MM
19435 }
19436
19437 return;
19438}
7509c759
MM
19439
19440\f
91d231cb 19441/* Table of valid machine attributes. */
a4f6c312 19442
91d231cb 19443const struct attribute_spec rs6000_attribute_table[] =
7509c759 19444{
91d231cb 19445 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19446 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19447 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19448 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19449 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19450 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19451#ifdef SUBTARGET_ATTRIBUTE_TABLE
19452 SUBTARGET_ATTRIBUTE_TABLE,
19453#endif
a5c76ee6 19454 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19455};
7509c759 19456
8bb418a3
ZL
19457/* Handle the "altivec" attribute. The attribute may have
19458 arguments as follows:
f676971a 19459
8bb418a3
ZL
19460 __attribute__((altivec(vector__)))
19461 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19462 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19463
19464 and may appear more than once (e.g., 'vector bool char') in a
19465 given declaration. */
19466
19467static tree
f90ac3f0
UP
19468rs6000_handle_altivec_attribute (tree *node,
19469 tree name ATTRIBUTE_UNUSED,
19470 tree args,
8bb418a3
ZL
19471 int flags ATTRIBUTE_UNUSED,
19472 bool *no_add_attrs)
19473{
19474 tree type = *node, result = NULL_TREE;
19475 enum machine_mode mode;
19476 int unsigned_p;
19477 char altivec_type
19478 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19479 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19480 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19481 : '?');
8bb418a3
ZL
19482
19483 while (POINTER_TYPE_P (type)
19484 || TREE_CODE (type) == FUNCTION_TYPE
19485 || TREE_CODE (type) == METHOD_TYPE
19486 || TREE_CODE (type) == ARRAY_TYPE)
19487 type = TREE_TYPE (type);
19488
19489 mode = TYPE_MODE (type);
19490
f90ac3f0
UP
19491 /* Check for invalid AltiVec type qualifiers. */
19492 if (type == long_unsigned_type_node || type == long_integer_type_node)
19493 {
19494 if (TARGET_64BIT)
19495 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19496 else if (rs6000_warn_altivec_long)
d4ee4d25 19497 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19498 }
19499 else if (type == long_long_unsigned_type_node
19500 || type == long_long_integer_type_node)
19501 error ("use of %<long long%> in AltiVec types is invalid");
19502 else if (type == double_type_node)
19503 error ("use of %<double%> in AltiVec types is invalid");
19504 else if (type == long_double_type_node)
19505 error ("use of %<long double%> in AltiVec types is invalid");
19506 else if (type == boolean_type_node)
19507 error ("use of boolean types in AltiVec types is invalid");
19508 else if (TREE_CODE (type) == COMPLEX_TYPE)
19509 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19510 else if (DECIMAL_FLOAT_MODE_P (mode))
19511 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19512
19513 switch (altivec_type)
19514 {
19515 case 'v':
8df83eae 19516 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19517 switch (mode)
19518 {
c4ad648e
AM
19519 case SImode:
19520 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19521 break;
19522 case HImode:
19523 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19524 break;
19525 case QImode:
19526 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19527 break;
19528 case SFmode: result = V4SF_type_node; break;
19529 /* If the user says 'vector int bool', we may be handed the 'bool'
19530 attribute _before_ the 'vector' attribute, and so select the
19531 proper type in the 'b' case below. */
19532 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19533 result = type;
19534 default: break;
8bb418a3
ZL
19535 }
19536 break;
19537 case 'b':
19538 switch (mode)
19539 {
c4ad648e
AM
19540 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19541 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19542 case QImode: case V16QImode: result = bool_V16QI_type_node;
19543 default: break;
8bb418a3
ZL
19544 }
19545 break;
19546 case 'p':
19547 switch (mode)
19548 {
c4ad648e
AM
19549 case V8HImode: result = pixel_V8HI_type_node;
19550 default: break;
8bb418a3
ZL
19551 }
19552 default: break;
19553 }
19554
7958a2a6
FJ
19555 if (result && result != type && TYPE_READONLY (type))
19556 result = build_qualified_type (result, TYPE_QUAL_CONST);
19557
8bb418a3
ZL
19558 *no_add_attrs = true; /* No need to hang on to the attribute. */
19559
f90ac3f0 19560 if (result)
8bb418a3
ZL
19561 *node = reconstruct_complex_type (*node, result);
19562
19563 return NULL_TREE;
19564}
19565
f18eca82
ZL
19566/* AltiVec defines four built-in scalar types that serve as vector
19567 elements; we must teach the compiler how to mangle them. */
19568
19569static const char *
3101faab 19570rs6000_mangle_type (const_tree type)
f18eca82 19571{
608063c3
JB
19572 type = TYPE_MAIN_VARIANT (type);
19573
19574 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19575 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19576 return NULL;
19577
f18eca82
ZL
19578 if (type == bool_char_type_node) return "U6__boolc";
19579 if (type == bool_short_type_node) return "U6__bools";
19580 if (type == pixel_type_node) return "u7__pixel";
19581 if (type == bool_int_type_node) return "U6__booli";
19582
337bde91
DE
19583 /* Mangle IBM extended float long double as `g' (__float128) on
19584 powerpc*-linux where long-double-64 previously was the default. */
19585 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19586 && TARGET_ELF
19587 && TARGET_LONG_DOUBLE_128
19588 && !TARGET_IEEEQUAD)
19589 return "g";
19590
f18eca82
ZL
19591 /* For all other types, use normal C++ mangling. */
19592 return NULL;
19593}
19594
a5c76ee6
ZW
19595/* Handle a "longcall" or "shortcall" attribute; arguments as in
19596 struct attribute_spec.handler. */
a4f6c312 19597
91d231cb 19598static tree
f676971a
EC
19599rs6000_handle_longcall_attribute (tree *node, tree name,
19600 tree args ATTRIBUTE_UNUSED,
19601 int flags ATTRIBUTE_UNUSED,
a2369ed3 19602 bool *no_add_attrs)
91d231cb
JM
19603{
19604 if (TREE_CODE (*node) != FUNCTION_TYPE
19605 && TREE_CODE (*node) != FIELD_DECL
19606 && TREE_CODE (*node) != TYPE_DECL)
19607 {
5c498b10 19608 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19609 IDENTIFIER_POINTER (name));
19610 *no_add_attrs = true;
19611 }
6a4cee5f 19612
91d231cb 19613 return NULL_TREE;
7509c759
MM
19614}
19615
a5c76ee6
ZW
19616/* Set longcall attributes on all functions declared when
19617 rs6000_default_long_calls is true. */
19618static void
a2369ed3 19619rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19620{
19621 if (rs6000_default_long_calls
19622 && (TREE_CODE (type) == FUNCTION_TYPE
19623 || TREE_CODE (type) == METHOD_TYPE))
19624 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19625 NULL_TREE,
19626 TYPE_ATTRIBUTES (type));
16d6f994
EC
19627
19628#if TARGET_MACHO
19629 darwin_set_default_type_attributes (type);
19630#endif
a5c76ee6
ZW
19631}
19632
3cb999d8
DE
19633/* Return a reference suitable for calling a function with the
19634 longcall attribute. */
a4f6c312 19635
9390387d 19636rtx
a2369ed3 19637rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19638{
d330fd93 19639 const char *call_name;
6a4cee5f
MM
19640 tree node;
19641
19642 if (GET_CODE (call_ref) != SYMBOL_REF)
19643 return call_ref;
19644
19645 /* System V adds '.' to the internal name, so skip them. */
19646 call_name = XSTR (call_ref, 0);
19647 if (*call_name == '.')
19648 {
19649 while (*call_name == '.')
19650 call_name++;
19651
19652 node = get_identifier (call_name);
39403d82 19653 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19654 }
19655
19656 return force_reg (Pmode, call_ref);
19657}
7509c759 19658\f
77ccdfed
EC
19659#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19660#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19661#endif
19662
19663/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19664 struct attribute_spec.handler. */
19665static tree
19666rs6000_handle_struct_attribute (tree *node, tree name,
19667 tree args ATTRIBUTE_UNUSED,
19668 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19669{
19670 tree *type = NULL;
19671 if (DECL_P (*node))
19672 {
19673 if (TREE_CODE (*node) == TYPE_DECL)
19674 type = &TREE_TYPE (*node);
19675 }
19676 else
19677 type = node;
19678
19679 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19680 || TREE_CODE (*type) == UNION_TYPE)))
19681 {
19682 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19683 *no_add_attrs = true;
19684 }
19685
19686 else if ((is_attribute_p ("ms_struct", name)
19687 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19688 || ((is_attribute_p ("gcc_struct", name)
19689 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19690 {
19691 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19692 IDENTIFIER_POINTER (name));
19693 *no_add_attrs = true;
19694 }
19695
19696 return NULL_TREE;
19697}
19698
19699static bool
3101faab 19700rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19701{
19702 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19703 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19704 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19705}
19706\f
b64a1b53
RH
19707#ifdef USING_ELFOS_H
19708
d6b5193b 19709/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19710
d6b5193b
RS
19711static void
19712rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19713{
19714 if (DEFAULT_ABI == ABI_AIX
19715 && TARGET_MINIMAL_TOC
19716 && !TARGET_RELOCATABLE)
19717 {
19718 if (!toc_initialized)
19719 {
19720 toc_initialized = 1;
19721 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19722 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19723 fprintf (asm_out_file, "\t.tc ");
19724 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19725 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19726 fprintf (asm_out_file, "\n");
19727
19728 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19729 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19730 fprintf (asm_out_file, " = .+32768\n");
19731 }
19732 else
19733 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19734 }
19735 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19736 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19737 else
19738 {
19739 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19740 if (!toc_initialized)
19741 {
19742 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19743 fprintf (asm_out_file, " = .+32768\n");
19744 toc_initialized = 1;
19745 }
19746 }
19747}
19748
19749/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19750
b64a1b53 19751static void
d6b5193b
RS
19752rs6000_elf_asm_init_sections (void)
19753{
19754 toc_section
19755 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19756
19757 sdata2_section
19758 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19759 SDATA2_SECTION_ASM_OP);
19760}
19761
19762/* Implement TARGET_SELECT_RTX_SECTION. */
19763
19764static section *
f676971a 19765rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19766 unsigned HOST_WIDE_INT align)
7509c759 19767{
a9098fd0 19768 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19769 return toc_section;
7509c759 19770 else
d6b5193b 19771 return default_elf_select_rtx_section (mode, x, align);
7509c759 19772}
d9407988 19773\f
d1908feb
JJ
19774/* For a SYMBOL_REF, set generic flags and then perform some
19775 target-specific processing.
19776
d1908feb
JJ
19777 When the AIX ABI is requested on a non-AIX system, replace the
19778 function name with the real name (with a leading .) rather than the
19779 function descriptor name. This saves a lot of overriding code to
19780 read the prefixes. */
d9407988 19781
fb49053f 19782static void
a2369ed3 19783rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19784{
d1908feb 19785 default_encode_section_info (decl, rtl, first);
b2003250 19786
d1908feb
JJ
19787 if (first
19788 && TREE_CODE (decl) == FUNCTION_DECL
19789 && !TARGET_AIX
19790 && DEFAULT_ABI == ABI_AIX)
d9407988 19791 {
c6a2438a 19792 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19793 size_t len = strlen (XSTR (sym_ref, 0));
19794 char *str = alloca (len + 2);
19795 str[0] = '.';
19796 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19797 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19798 }
d9407988
MM
19799}
19800
21d9bb3f
PB
19801static inline bool
19802compare_section_name (const char *section, const char *template)
19803{
19804 int len;
19805
19806 len = strlen (template);
19807 return (strncmp (section, template, len) == 0
19808 && (section[len] == 0 || section[len] == '.'));
19809}
19810
c1b7d95a 19811bool
3101faab 19812rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
19813{
19814 if (rs6000_sdata == SDATA_NONE)
19815 return false;
19816
7482ad25
AF
19817 /* We want to merge strings, so we never consider them small data. */
19818 if (TREE_CODE (decl) == STRING_CST)
19819 return false;
19820
19821 /* Functions are never in the small data area. */
19822 if (TREE_CODE (decl) == FUNCTION_DECL)
19823 return false;
19824
0e5dbd9b
DE
19825 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
19826 {
19827 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
19828 if (compare_section_name (section, ".sdata")
19829 || compare_section_name (section, ".sdata2")
19830 || compare_section_name (section, ".gnu.linkonce.s")
19831 || compare_section_name (section, ".sbss")
19832 || compare_section_name (section, ".sbss2")
19833 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
19834 || strcmp (section, ".PPC.EMB.sdata0") == 0
19835 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19836 return true;
19837 }
19838 else
19839 {
19840 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19841
19842 if (size > 0
307b599c 19843 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19844 /* If it's not public, and we're not going to reference it there,
19845 there's no need to put it in the small data section. */
0e5dbd9b
DE
19846 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19847 return true;
19848 }
19849
19850 return false;
19851}
19852
b91da81f 19853#endif /* USING_ELFOS_H */
aacd3885
RS
19854\f
19855/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19856
aacd3885 19857static bool
3101faab 19858rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
19859{
19860 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19861}
a6c2a102 19862\f
000034eb 19863/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19864 ADDR can be effectively incremented by incrementing REG.
19865
19866 r0 is special and we must not select it as an address
19867 register by this routine since our caller will try to
19868 increment the returned register via an "la" instruction. */
000034eb 19869
9390387d 19870rtx
a2369ed3 19871find_addr_reg (rtx addr)
000034eb
DE
19872{
19873 while (GET_CODE (addr) == PLUS)
19874 {
02441cd6
JL
19875 if (GET_CODE (XEXP (addr, 0)) == REG
19876 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19877 addr = XEXP (addr, 0);
02441cd6
JL
19878 else if (GET_CODE (XEXP (addr, 1)) == REG
19879 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19880 addr = XEXP (addr, 1);
19881 else if (CONSTANT_P (XEXP (addr, 0)))
19882 addr = XEXP (addr, 1);
19883 else if (CONSTANT_P (XEXP (addr, 1)))
19884 addr = XEXP (addr, 0);
19885 else
37409796 19886 gcc_unreachable ();
000034eb 19887 }
37409796
NS
19888 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19889 return addr;
000034eb
DE
19890}
19891
a6c2a102 19892void
a2369ed3 19893rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19894{
19895 fatal_insn ("bad address", op);
19896}
c8023011 19897
ee890fe2
SS
19898#if TARGET_MACHO
19899
efdba735 19900static tree branch_island_list = 0;
ee890fe2 19901
efdba735
SH
19902/* Remember to generate a branch island for far calls to the given
19903 function. */
ee890fe2 19904
f676971a 19905static void
c4ad648e
AM
19906add_compiler_branch_island (tree label_name, tree function_name,
19907 int line_number)
ee890fe2 19908{
efdba735 19909 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19910 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19911 TREE_CHAIN (branch_island) = branch_island_list;
19912 branch_island_list = branch_island;
ee890fe2
SS
19913}
19914
efdba735
SH
19915#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19916#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19917#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19918 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19919
efdba735
SH
19920/* Generate far-jump branch islands for everything on the
19921 branch_island_list. Invoked immediately after the last instruction
19922 of the epilogue has been emitted; the branch-islands must be
19923 appended to, and contiguous with, the function body. Mach-O stubs
19924 are generated in machopic_output_stub(). */
ee890fe2 19925
efdba735
SH
19926static void
19927macho_branch_islands (void)
19928{
19929 char tmp_buf[512];
19930 tree branch_island;
19931
19932 for (branch_island = branch_island_list;
19933 branch_island;
19934 branch_island = TREE_CHAIN (branch_island))
19935 {
19936 const char *label =
19937 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19938 const char *name =
11abc112 19939 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19940 char name_buf[512];
19941 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19942 if (name[0] == '*' || name[0] == '&')
19943 strcpy (name_buf, name+1);
19944 else
19945 {
19946 name_buf[0] = '_';
19947 strcpy (name_buf+1, name);
19948 }
19949 strcpy (tmp_buf, "\n");
19950 strcat (tmp_buf, label);
ee890fe2 19951#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19952 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19953 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19954#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19955 if (flag_pic)
19956 {
19957 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19958 strcat (tmp_buf, label);
19959 strcat (tmp_buf, "_pic\n");
19960 strcat (tmp_buf, label);
19961 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19962
efdba735
SH
19963 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19964 strcat (tmp_buf, name_buf);
19965 strcat (tmp_buf, " - ");
19966 strcat (tmp_buf, label);
19967 strcat (tmp_buf, "_pic)\n");
f676971a 19968
efdba735 19969 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19970
efdba735
SH
19971 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19972 strcat (tmp_buf, name_buf);
19973 strcat (tmp_buf, " - ");
19974 strcat (tmp_buf, label);
19975 strcat (tmp_buf, "_pic)\n");
f676971a 19976
efdba735
SH
19977 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19978 }
19979 else
19980 {
19981 strcat (tmp_buf, ":\nlis r12,hi16(");
19982 strcat (tmp_buf, name_buf);
19983 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19984 strcat (tmp_buf, name_buf);
19985 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19986 }
19987 output_asm_insn (tmp_buf, 0);
ee890fe2 19988#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19989 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19990 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19991#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19992 }
ee890fe2 19993
efdba735 19994 branch_island_list = 0;
ee890fe2
SS
19995}
19996
19997/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19998 already there or not. */
19999
efdba735 20000static int
a2369ed3 20001no_previous_def (tree function_name)
ee890fe2 20002{
efdba735
SH
20003 tree branch_island;
20004 for (branch_island = branch_island_list;
20005 branch_island;
20006 branch_island = TREE_CHAIN (branch_island))
20007 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20008 return 0;
20009 return 1;
20010}
20011
20012/* GET_PREV_LABEL gets the label name from the previous definition of
20013 the function. */
20014
efdba735 20015static tree
a2369ed3 20016get_prev_label (tree function_name)
ee890fe2 20017{
efdba735
SH
20018 tree branch_island;
20019 for (branch_island = branch_island_list;
20020 branch_island;
20021 branch_island = TREE_CHAIN (branch_island))
20022 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20023 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20024 return 0;
20025}
20026
75b1b789
MS
20027#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20028#define DARWIN_LINKER_GENERATES_ISLANDS 0
20029#endif
20030
20031/* KEXTs still need branch islands. */
20032#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20033 || flag_mkernel || flag_apple_kext)
20034
ee890fe2 20035/* INSN is either a function call or a millicode call. It may have an
f676971a 20036 unconditional jump in its delay slot.
ee890fe2
SS
20037
20038 CALL_DEST is the routine we are calling. */
20039
20040char *
c4ad648e
AM
20041output_call (rtx insn, rtx *operands, int dest_operand_number,
20042 int cookie_operand_number)
ee890fe2
SS
20043{
20044 static char buf[256];
75b1b789
MS
20045 if (DARWIN_GENERATE_ISLANDS
20046 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20047 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20048 {
20049 tree labelname;
efdba735 20050 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20051
ee890fe2
SS
20052 if (no_previous_def (funname))
20053 {
ee890fe2
SS
20054 rtx label_rtx = gen_label_rtx ();
20055 char *label_buf, temp_buf[256];
20056 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20057 CODE_LABEL_NUMBER (label_rtx));
20058 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20059 labelname = get_identifier (label_buf);
a38e7aa5 20060 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20061 }
20062 else
20063 labelname = get_prev_label (funname);
20064
efdba735
SH
20065 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20066 instruction will reach 'foo', otherwise link as 'bl L42'".
20067 "L42" should be a 'branch island', that will do a far jump to
20068 'foo'. Branch islands are generated in
20069 macho_branch_islands(). */
ee890fe2 20070 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20071 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20072 }
20073 else
efdba735
SH
20074 sprintf (buf, "bl %%z%d", dest_operand_number);
20075 return buf;
ee890fe2
SS
20076}
20077
ee890fe2
SS
20078/* Generate PIC and indirect symbol stubs. */
20079
20080void
a2369ed3 20081machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20082{
20083 unsigned int length;
a4f6c312
SS
20084 char *symbol_name, *lazy_ptr_name;
20085 char *local_label_0;
ee890fe2
SS
20086 static int label = 0;
20087
df56a27f 20088 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20089 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20090
ee890fe2 20091
ee890fe2
SS
20092 length = strlen (symb);
20093 symbol_name = alloca (length + 32);
20094 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20095
20096 lazy_ptr_name = alloca (length + 32);
20097 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20098
ee890fe2 20099 if (flag_pic == 2)
56c779bc 20100 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20101 else
56c779bc 20102 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20103
20104 if (flag_pic == 2)
20105 {
d974312d
DJ
20106 fprintf (file, "\t.align 5\n");
20107
20108 fprintf (file, "%s:\n", stub);
20109 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20110
876455fa 20111 label++;
89da1f32 20112 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 20113 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20114
ee890fe2
SS
20115 fprintf (file, "\tmflr r0\n");
20116 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20117 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20118 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20119 lazy_ptr_name, local_label_0);
20120 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20121 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20122 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20123 lazy_ptr_name, local_label_0);
20124 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20125 fprintf (file, "\tbctr\n");
20126 }
20127 else
d974312d
DJ
20128 {
20129 fprintf (file, "\t.align 4\n");
20130
20131 fprintf (file, "%s:\n", stub);
20132 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20133
20134 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20135 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20136 (TARGET_64BIT ? "ldu" : "lwzu"),
20137 lazy_ptr_name);
d974312d
DJ
20138 fprintf (file, "\tmtctr r12\n");
20139 fprintf (file, "\tbctr\n");
20140 }
f676971a 20141
56c779bc 20142 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20143 fprintf (file, "%s:\n", lazy_ptr_name);
20144 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20145 fprintf (file, "%sdyld_stub_binding_helper\n",
20146 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20147}
20148
20149/* Legitimize PIC addresses. If the address is already
20150 position-independent, we return ORIG. Newly generated
20151 position-independent addresses go into a reg. This is REG if non
20152 zero, otherwise we allocate register(s) as necessary. */
20153
4fbbe694 20154#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20155
20156rtx
f676971a 20157rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20158 rtx reg)
ee890fe2
SS
20159{
20160 rtx base, offset;
20161
20162 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20163 reg = gen_reg_rtx (Pmode);
20164
20165 if (GET_CODE (orig) == CONST)
20166 {
37409796
NS
20167 rtx reg_temp;
20168
ee890fe2
SS
20169 if (GET_CODE (XEXP (orig, 0)) == PLUS
20170 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20171 return orig;
20172
37409796 20173 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20174
37409796
NS
20175 /* Use a different reg for the intermediate value, as
20176 it will be marked UNCHANGING. */
b3a13419 20177 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20178 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20179 Pmode, reg_temp);
20180 offset =
20181 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20182 Pmode, reg);
bb8df8a6 20183
ee890fe2
SS
20184 if (GET_CODE (offset) == CONST_INT)
20185 {
20186 if (SMALL_INT (offset))
ed8908e7 20187 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20188 else if (! reload_in_progress && ! reload_completed)
20189 offset = force_reg (Pmode, offset);
20190 else
c859cda6
DJ
20191 {
20192 rtx mem = force_const_mem (Pmode, orig);
20193 return machopic_legitimize_pic_address (mem, Pmode, reg);
20194 }
ee890fe2 20195 }
f1c25d3b 20196 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20197 }
20198
20199 /* Fall back on generic machopic code. */
20200 return machopic_legitimize_pic_address (orig, mode, reg);
20201}
20202
c4e18b1c
GK
20203/* Output a .machine directive for the Darwin assembler, and call
20204 the generic start_file routine. */
20205
20206static void
20207rs6000_darwin_file_start (void)
20208{
94ff898d 20209 static const struct
c4e18b1c
GK
20210 {
20211 const char *arg;
20212 const char *name;
20213 int if_set;
20214 } mapping[] = {
55dbfb48 20215 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20216 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20217 { "power4", "ppc970", 0 },
20218 { "G5", "ppc970", 0 },
20219 { "7450", "ppc7450", 0 },
20220 { "7400", "ppc7400", MASK_ALTIVEC },
20221 { "G4", "ppc7400", 0 },
20222 { "750", "ppc750", 0 },
20223 { "740", "ppc750", 0 },
20224 { "G3", "ppc750", 0 },
20225 { "604e", "ppc604e", 0 },
20226 { "604", "ppc604", 0 },
20227 { "603e", "ppc603", 0 },
20228 { "603", "ppc603", 0 },
20229 { "601", "ppc601", 0 },
20230 { NULL, "ppc", 0 } };
20231 const char *cpu_id = "";
20232 size_t i;
94ff898d 20233
9390387d 20234 rs6000_file_start ();
192d0f89 20235 darwin_file_start ();
c4e18b1c
GK
20236
20237 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20238 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20239 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20240 && rs6000_select[i].string[0] != '\0')
20241 cpu_id = rs6000_select[i].string;
20242
20243 /* Look through the mapping array. Pick the first name that either
20244 matches the argument, has a bit set in IF_SET that is also set
20245 in the target flags, or has a NULL name. */
20246
20247 i = 0;
20248 while (mapping[i].arg != NULL
20249 && strcmp (mapping[i].arg, cpu_id) != 0
20250 && (mapping[i].if_set & target_flags) == 0)
20251 i++;
20252
20253 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20254}
20255
ee890fe2 20256#endif /* TARGET_MACHO */
7c262518
RH
20257
20258#if TARGET_ELF
9b580a0b
RH
20259static int
20260rs6000_elf_reloc_rw_mask (void)
7c262518 20261{
9b580a0b
RH
20262 if (flag_pic)
20263 return 3;
20264 else if (DEFAULT_ABI == ABI_AIX)
20265 return 2;
20266 else
20267 return 0;
7c262518 20268}
d9f6800d
RH
20269
20270/* Record an element in the table of global constructors. SYMBOL is
20271 a SYMBOL_REF of the function to be called; PRIORITY is a number
20272 between 0 and MAX_INIT_PRIORITY.
20273
20274 This differs from default_named_section_asm_out_constructor in
20275 that we have special handling for -mrelocatable. */
20276
20277static void
a2369ed3 20278rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20279{
20280 const char *section = ".ctors";
20281 char buf[16];
20282
20283 if (priority != DEFAULT_INIT_PRIORITY)
20284 {
20285 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20286 /* Invert the numbering so the linker puts us in the proper
20287 order; constructors are run from right to left, and the
20288 linker sorts in increasing order. */
20289 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20290 section = buf;
20291 }
20292
d6b5193b 20293 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20294 assemble_align (POINTER_SIZE);
d9f6800d
RH
20295
20296 if (TARGET_RELOCATABLE)
20297 {
20298 fputs ("\t.long (", asm_out_file);
20299 output_addr_const (asm_out_file, symbol);
20300 fputs (")@fixup\n", asm_out_file);
20301 }
20302 else
c8af3574 20303 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20304}
20305
20306static void
a2369ed3 20307rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20308{
20309 const char *section = ".dtors";
20310 char buf[16];
20311
20312 if (priority != DEFAULT_INIT_PRIORITY)
20313 {
20314 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20315 /* Invert the numbering so the linker puts us in the proper
20316 order; constructors are run from right to left, and the
20317 linker sorts in increasing order. */
20318 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20319 section = buf;
20320 }
20321
d6b5193b 20322 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20323 assemble_align (POINTER_SIZE);
d9f6800d
RH
20324
20325 if (TARGET_RELOCATABLE)
20326 {
20327 fputs ("\t.long (", asm_out_file);
20328 output_addr_const (asm_out_file, symbol);
20329 fputs (")@fixup\n", asm_out_file);
20330 }
20331 else
c8af3574 20332 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20333}
9739c90c
JJ
20334
20335void
a2369ed3 20336rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20337{
20338 if (TARGET_64BIT)
20339 {
20340 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20341 ASM_OUTPUT_LABEL (file, name);
20342 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20343 rs6000_output_function_entry (file, name);
20344 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20345 if (DOT_SYMBOLS)
9739c90c 20346 {
85b776df 20347 fputs ("\t.size\t", file);
9739c90c 20348 assemble_name (file, name);
85b776df
AM
20349 fputs (",24\n\t.type\t.", file);
20350 assemble_name (file, name);
20351 fputs (",@function\n", file);
20352 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20353 {
20354 fputs ("\t.globl\t.", file);
20355 assemble_name (file, name);
20356 putc ('\n', file);
20357 }
9739c90c 20358 }
85b776df
AM
20359 else
20360 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20361 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20362 rs6000_output_function_entry (file, name);
20363 fputs (":\n", file);
9739c90c
JJ
20364 return;
20365 }
20366
20367 if (TARGET_RELOCATABLE
7f970b70 20368 && !TARGET_SECURE_PLT
9739c90c 20369 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20370 && uses_TOC ())
9739c90c
JJ
20371 {
20372 char buf[256];
20373
20374 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20375
20376 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20377 fprintf (file, "\t.long ");
20378 assemble_name (file, buf);
20379 putc ('-', file);
20380 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20381 assemble_name (file, buf);
20382 putc ('\n', file);
20383 }
20384
20385 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20386 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20387
20388 if (DEFAULT_ABI == ABI_AIX)
20389 {
20390 const char *desc_name, *orig_name;
20391
20392 orig_name = (*targetm.strip_name_encoding) (name);
20393 desc_name = orig_name;
20394 while (*desc_name == '.')
20395 desc_name++;
20396
20397 if (TREE_PUBLIC (decl))
20398 fprintf (file, "\t.globl %s\n", desc_name);
20399
20400 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20401 fprintf (file, "%s:\n", desc_name);
20402 fprintf (file, "\t.long %s\n", orig_name);
20403 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20404 if (DEFAULT_ABI == ABI_AIX)
20405 fputs ("\t.long 0\n", file);
20406 fprintf (file, "\t.previous\n");
20407 }
20408 ASM_OUTPUT_LABEL (file, name);
20409}
1334b570
AM
20410
20411static void
20412rs6000_elf_end_indicate_exec_stack (void)
20413{
20414 if (TARGET_32BIT)
20415 file_end_indicate_exec_stack ();
20416}
7c262518
RH
20417#endif
20418
cbaaba19 20419#if TARGET_XCOFF
0d5817b2
DE
20420static void
20421rs6000_xcoff_asm_output_anchor (rtx symbol)
20422{
20423 char buffer[100];
20424
20425 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20426 SYMBOL_REF_BLOCK_OFFSET (symbol));
20427 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20428}
20429
7c262518 20430static void
a2369ed3 20431rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20432{
20433 fputs (GLOBAL_ASM_OP, stream);
20434 RS6000_OUTPUT_BASENAME (stream, name);
20435 putc ('\n', stream);
20436}
20437
d6b5193b
RS
20438/* A get_unnamed_decl callback, used for read-only sections. PTR
20439 points to the section string variable. */
20440
20441static void
20442rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20443{
890f9edf
OH
20444 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20445 *(const char *const *) directive,
20446 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20447}
20448
20449/* Likewise for read-write sections. */
20450
20451static void
20452rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20453{
890f9edf
OH
20454 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20455 *(const char *const *) directive,
20456 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20457}
20458
20459/* A get_unnamed_section callback, used for switching to toc_section. */
20460
20461static void
20462rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20463{
20464 if (TARGET_MINIMAL_TOC)
20465 {
20466 /* toc_section is always selected at least once from
20467 rs6000_xcoff_file_start, so this is guaranteed to
20468 always be defined once and only once in each file. */
20469 if (!toc_initialized)
20470 {
20471 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20472 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20473 toc_initialized = 1;
20474 }
20475 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20476 (TARGET_32BIT ? "" : ",3"));
20477 }
20478 else
20479 fputs ("\t.toc\n", asm_out_file);
20480}
20481
20482/* Implement TARGET_ASM_INIT_SECTIONS. */
20483
20484static void
20485rs6000_xcoff_asm_init_sections (void)
20486{
20487 read_only_data_section
20488 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20489 &xcoff_read_only_section_name);
20490
20491 private_data_section
20492 = get_unnamed_section (SECTION_WRITE,
20493 rs6000_xcoff_output_readwrite_section_asm_op,
20494 &xcoff_private_data_section_name);
20495
20496 read_only_private_data_section
20497 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20498 &xcoff_private_data_section_name);
20499
20500 toc_section
20501 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20502
20503 readonly_data_section = read_only_data_section;
20504 exception_section = data_section;
20505}
20506
9b580a0b
RH
20507static int
20508rs6000_xcoff_reloc_rw_mask (void)
20509{
20510 return 3;
20511}
20512
b275d088 20513static void
c18a5b6c
MM
20514rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20515 tree decl ATTRIBUTE_UNUSED)
7c262518 20516{
0e5dbd9b
DE
20517 int smclass;
20518 static const char * const suffix[3] = { "PR", "RO", "RW" };
20519
20520 if (flags & SECTION_CODE)
20521 smclass = 0;
20522 else if (flags & SECTION_WRITE)
20523 smclass = 2;
20524 else
20525 smclass = 1;
20526
5b5198f7 20527 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20528 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20529 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20530}
ae46c4e0 20531
d6b5193b 20532static section *
f676971a 20533rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20534 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20535{
9b580a0b 20536 if (decl_readonly_section (decl, reloc))
ae46c4e0 20537 {
0e5dbd9b 20538 if (TREE_PUBLIC (decl))
d6b5193b 20539 return read_only_data_section;
ae46c4e0 20540 else
d6b5193b 20541 return read_only_private_data_section;
ae46c4e0
RH
20542 }
20543 else
20544 {
0e5dbd9b 20545 if (TREE_PUBLIC (decl))
d6b5193b 20546 return data_section;
ae46c4e0 20547 else
d6b5193b 20548 return private_data_section;
ae46c4e0
RH
20549 }
20550}
20551
20552static void
a2369ed3 20553rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20554{
20555 const char *name;
ae46c4e0 20556
5b5198f7
DE
20557 /* Use select_section for private and uninitialized data. */
20558 if (!TREE_PUBLIC (decl)
20559 || DECL_COMMON (decl)
0e5dbd9b
DE
20560 || DECL_INITIAL (decl) == NULL_TREE
20561 || DECL_INITIAL (decl) == error_mark_node
20562 || (flag_zero_initialized_in_bss
20563 && initializer_zerop (DECL_INITIAL (decl))))
20564 return;
20565
20566 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20567 name = (*targetm.strip_name_encoding) (name);
20568 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20569}
b64a1b53 20570
fb49053f
RH
20571/* Select section for constant in constant pool.
20572
20573 On RS/6000, all constants are in the private read-only data area.
20574 However, if this is being placed in the TOC it must be output as a
20575 toc entry. */
20576
d6b5193b 20577static section *
f676971a 20578rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20579 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20580{
20581 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20582 return toc_section;
b64a1b53 20583 else
d6b5193b 20584 return read_only_private_data_section;
b64a1b53 20585}
772c5265
RH
20586
20587/* Remove any trailing [DS] or the like from the symbol name. */
20588
20589static const char *
a2369ed3 20590rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20591{
20592 size_t len;
20593 if (*name == '*')
20594 name++;
20595 len = strlen (name);
20596 if (name[len - 1] == ']')
20597 return ggc_alloc_string (name, len - 4);
20598 else
20599 return name;
20600}
20601
5add3202
DE
20602/* Section attributes. AIX is always PIC. */
20603
20604static unsigned int
a2369ed3 20605rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20606{
5b5198f7 20607 unsigned int align;
9b580a0b 20608 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20609
20610 /* Align to at least UNIT size. */
20611 if (flags & SECTION_CODE)
20612 align = MIN_UNITS_PER_WORD;
20613 else
20614 /* Increase alignment of large objects if not already stricter. */
20615 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20616 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20617 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20618
20619 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20620}
a5fe455b 20621
1bc7c5b6
ZW
20622/* Output at beginning of assembler file.
20623
20624 Initialize the section names for the RS/6000 at this point.
20625
20626 Specify filename, including full path, to assembler.
20627
20628 We want to go into the TOC section so at least one .toc will be emitted.
20629 Also, in order to output proper .bs/.es pairs, we need at least one static
20630 [RW] section emitted.
20631
20632 Finally, declare mcount when profiling to make the assembler happy. */
20633
20634static void
863d938c 20635rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20636{
20637 rs6000_gen_section_name (&xcoff_bss_section_name,
20638 main_input_filename, ".bss_");
20639 rs6000_gen_section_name (&xcoff_private_data_section_name,
20640 main_input_filename, ".rw_");
20641 rs6000_gen_section_name (&xcoff_read_only_section_name,
20642 main_input_filename, ".ro_");
20643
20644 fputs ("\t.file\t", asm_out_file);
20645 output_quoted_string (asm_out_file, main_input_filename);
20646 fputc ('\n', asm_out_file);
1bc7c5b6 20647 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20648 switch_to_section (private_data_section);
20649 switch_to_section (text_section);
1bc7c5b6
ZW
20650 if (profile_flag)
20651 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20652 rs6000_file_start ();
20653}
20654
a5fe455b
ZW
20655/* Output at end of assembler file.
20656 On the RS/6000, referencing data should automatically pull in text. */
20657
20658static void
863d938c 20659rs6000_xcoff_file_end (void)
a5fe455b 20660{
d6b5193b 20661 switch_to_section (text_section);
a5fe455b 20662 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20663 switch_to_section (data_section);
a5fe455b
ZW
20664 fputs (TARGET_32BIT
20665 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20666 asm_out_file);
20667}
f1384257 20668#endif /* TARGET_XCOFF */
0e5dbd9b 20669
3c50106f
RH
20670/* Compute a (partial) cost for rtx X. Return true if the complete
20671 cost has been computed, and false if subexpressions should be
20672 scanned. In either case, *TOTAL contains the cost result. */
20673
20674static bool
1494c534 20675rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20676{
f0517163
RS
20677 enum machine_mode mode = GET_MODE (x);
20678
3c50106f
RH
20679 switch (code)
20680 {
30a555d9 20681 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20682 case CONST_INT:
066cd967
DE
20683 if (((outer_code == SET
20684 || outer_code == PLUS
20685 || outer_code == MINUS)
279bb624
DE
20686 && (satisfies_constraint_I (x)
20687 || satisfies_constraint_L (x)))
066cd967 20688 || (outer_code == AND
279bb624
DE
20689 && (satisfies_constraint_K (x)
20690 || (mode == SImode
20691 ? satisfies_constraint_L (x)
20692 : satisfies_constraint_J (x))
1990cd79
AM
20693 || mask_operand (x, mode)
20694 || (mode == DImode
20695 && mask64_operand (x, DImode))))
22e54023 20696 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20697 && (satisfies_constraint_K (x)
20698 || (mode == SImode
20699 ? satisfies_constraint_L (x)
20700 : satisfies_constraint_J (x))))
066cd967
DE
20701 || outer_code == ASHIFT
20702 || outer_code == ASHIFTRT
20703 || outer_code == LSHIFTRT
20704 || outer_code == ROTATE
20705 || outer_code == ROTATERT
d5861a7a 20706 || outer_code == ZERO_EXTRACT
066cd967 20707 || (outer_code == MULT
279bb624 20708 && satisfies_constraint_I (x))
22e54023
DE
20709 || ((outer_code == DIV || outer_code == UDIV
20710 || outer_code == MOD || outer_code == UMOD)
20711 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20712 || (outer_code == COMPARE
279bb624
DE
20713 && (satisfies_constraint_I (x)
20714 || satisfies_constraint_K (x)))
22e54023 20715 || (outer_code == EQ
279bb624
DE
20716 && (satisfies_constraint_I (x)
20717 || satisfies_constraint_K (x)
20718 || (mode == SImode
20719 ? satisfies_constraint_L (x)
20720 : satisfies_constraint_J (x))))
22e54023 20721 || (outer_code == GTU
279bb624 20722 && satisfies_constraint_I (x))
22e54023 20723 || (outer_code == LTU
279bb624 20724 && satisfies_constraint_P (x)))
066cd967
DE
20725 {
20726 *total = 0;
20727 return true;
20728 }
20729 else if ((outer_code == PLUS
4ae234b0 20730 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20731 || (outer_code == MINUS
4ae234b0 20732 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20733 || ((outer_code == SET
20734 || outer_code == IOR
20735 || outer_code == XOR)
20736 && (INTVAL (x)
20737 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20738 {
20739 *total = COSTS_N_INSNS (1);
20740 return true;
20741 }
20742 /* FALLTHRU */
20743
20744 case CONST_DOUBLE:
f6fe3a22 20745 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20746 {
f6fe3a22
DE
20747 if ((outer_code == IOR || outer_code == XOR)
20748 && CONST_DOUBLE_HIGH (x) == 0
20749 && (CONST_DOUBLE_LOW (x)
20750 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20751 {
20752 *total = 0;
20753 return true;
20754 }
20755 else if ((outer_code == AND && and64_2_operand (x, DImode))
20756 || ((outer_code == SET
20757 || outer_code == IOR
20758 || outer_code == XOR)
20759 && CONST_DOUBLE_HIGH (x) == 0))
20760 {
20761 *total = COSTS_N_INSNS (1);
20762 return true;
20763 }
066cd967
DE
20764 }
20765 /* FALLTHRU */
20766
3c50106f 20767 case CONST:
066cd967 20768 case HIGH:
3c50106f 20769 case SYMBOL_REF:
066cd967
DE
20770 case MEM:
20771 /* When optimizing for size, MEM should be slightly more expensive
20772 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20773 L1 cache latency is about two instructions. */
066cd967 20774 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20775 return true;
20776
30a555d9
DE
20777 case LABEL_REF:
20778 *total = 0;
20779 return true;
20780
3c50106f 20781 case PLUS:
f0517163 20782 if (mode == DFmode)
066cd967
DE
20783 {
20784 if (GET_CODE (XEXP (x, 0)) == MULT)
20785 {
20786 /* FNMA accounted in outer NEG. */
20787 if (outer_code == NEG)
20788 *total = rs6000_cost->dmul - rs6000_cost->fp;
20789 else
20790 *total = rs6000_cost->dmul;
20791 }
20792 else
20793 *total = rs6000_cost->fp;
20794 }
f0517163 20795 else if (mode == SFmode)
066cd967
DE
20796 {
20797 /* FNMA accounted in outer NEG. */
20798 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20799 *total = 0;
20800 else
20801 *total = rs6000_cost->fp;
20802 }
f0517163 20803 else
066cd967
DE
20804 *total = COSTS_N_INSNS (1);
20805 return false;
3c50106f 20806
52190329 20807 case MINUS:
f0517163 20808 if (mode == DFmode)
066cd967 20809 {
762c919f
JM
20810 if (GET_CODE (XEXP (x, 0)) == MULT
20811 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20812 {
20813 /* FNMA accounted in outer NEG. */
20814 if (outer_code == NEG)
762c919f 20815 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20816 else
20817 *total = rs6000_cost->dmul;
20818 }
20819 else
20820 *total = rs6000_cost->fp;
20821 }
f0517163 20822 else if (mode == SFmode)
066cd967
DE
20823 {
20824 /* FNMA accounted in outer NEG. */
20825 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20826 *total = 0;
20827 else
20828 *total = rs6000_cost->fp;
20829 }
f0517163 20830 else
c4ad648e 20831 *total = COSTS_N_INSNS (1);
066cd967 20832 return false;
3c50106f
RH
20833
20834 case MULT:
c9dbf840 20835 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 20836 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 20837 {
8b897cfa
RS
20838 if (INTVAL (XEXP (x, 1)) >= -256
20839 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20840 *total = rs6000_cost->mulsi_const9;
8b897cfa 20841 else
06a67bdd 20842 *total = rs6000_cost->mulsi_const;
3c50106f 20843 }
066cd967
DE
20844 /* FMA accounted in outer PLUS/MINUS. */
20845 else if ((mode == DFmode || mode == SFmode)
20846 && (outer_code == PLUS || outer_code == MINUS))
20847 *total = 0;
f0517163 20848 else if (mode == DFmode)
06a67bdd 20849 *total = rs6000_cost->dmul;
f0517163 20850 else if (mode == SFmode)
06a67bdd 20851 *total = rs6000_cost->fp;
f0517163 20852 else if (mode == DImode)
06a67bdd 20853 *total = rs6000_cost->muldi;
8b897cfa 20854 else
06a67bdd 20855 *total = rs6000_cost->mulsi;
066cd967 20856 return false;
3c50106f
RH
20857
20858 case DIV:
20859 case MOD:
f0517163
RS
20860 if (FLOAT_MODE_P (mode))
20861 {
06a67bdd
RS
20862 *total = mode == DFmode ? rs6000_cost->ddiv
20863 : rs6000_cost->sdiv;
066cd967 20864 return false;
f0517163 20865 }
5efb1046 20866 /* FALLTHRU */
3c50106f
RH
20867
20868 case UDIV:
20869 case UMOD:
627b6fe2
DJ
20870 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20871 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20872 {
20873 if (code == DIV || code == MOD)
20874 /* Shift, addze */
20875 *total = COSTS_N_INSNS (2);
20876 else
20877 /* Shift */
20878 *total = COSTS_N_INSNS (1);
20879 }
c4ad648e 20880 else
627b6fe2
DJ
20881 {
20882 if (GET_MODE (XEXP (x, 1)) == DImode)
20883 *total = rs6000_cost->divdi;
20884 else
20885 *total = rs6000_cost->divsi;
20886 }
20887 /* Add in shift and subtract for MOD. */
20888 if (code == MOD || code == UMOD)
20889 *total += COSTS_N_INSNS (2);
066cd967 20890 return false;
3c50106f 20891
32f56aad 20892 case CTZ:
3c50106f
RH
20893 case FFS:
20894 *total = COSTS_N_INSNS (4);
066cd967 20895 return false;
3c50106f 20896
32f56aad
DE
20897 case POPCOUNT:
20898 *total = COSTS_N_INSNS (6);
20899 return false;
20900
06a67bdd 20901 case NOT:
066cd967
DE
20902 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20903 {
20904 *total = 0;
20905 return false;
20906 }
20907 /* FALLTHRU */
20908
20909 case AND:
32f56aad 20910 case CLZ:
066cd967
DE
20911 case IOR:
20912 case XOR:
d5861a7a
DE
20913 case ZERO_EXTRACT:
20914 *total = COSTS_N_INSNS (1);
20915 return false;
20916
066cd967
DE
20917 case ASHIFT:
20918 case ASHIFTRT:
20919 case LSHIFTRT:
20920 case ROTATE:
20921 case ROTATERT:
d5861a7a 20922 /* Handle mul_highpart. */
066cd967
DE
20923 if (outer_code == TRUNCATE
20924 && GET_CODE (XEXP (x, 0)) == MULT)
20925 {
20926 if (mode == DImode)
20927 *total = rs6000_cost->muldi;
20928 else
20929 *total = rs6000_cost->mulsi;
20930 return true;
20931 }
d5861a7a
DE
20932 else if (outer_code == AND)
20933 *total = 0;
20934 else
20935 *total = COSTS_N_INSNS (1);
20936 return false;
20937
20938 case SIGN_EXTEND:
20939 case ZERO_EXTEND:
20940 if (GET_CODE (XEXP (x, 0)) == MEM)
20941 *total = 0;
20942 else
20943 *total = COSTS_N_INSNS (1);
066cd967 20944 return false;
06a67bdd 20945
066cd967
DE
20946 case COMPARE:
20947 case NEG:
20948 case ABS:
20949 if (!FLOAT_MODE_P (mode))
20950 {
20951 *total = COSTS_N_INSNS (1);
20952 return false;
20953 }
20954 /* FALLTHRU */
20955
20956 case FLOAT:
20957 case UNSIGNED_FLOAT:
20958 case FIX:
20959 case UNSIGNED_FIX:
06a67bdd
RS
20960 case FLOAT_TRUNCATE:
20961 *total = rs6000_cost->fp;
066cd967 20962 return false;
06a67bdd 20963
a2af5043
DJ
20964 case FLOAT_EXTEND:
20965 if (mode == DFmode)
20966 *total = 0;
20967 else
20968 *total = rs6000_cost->fp;
20969 return false;
20970
06a67bdd
RS
20971 case UNSPEC:
20972 switch (XINT (x, 1))
20973 {
20974 case UNSPEC_FRSP:
20975 *total = rs6000_cost->fp;
20976 return true;
20977
20978 default:
20979 break;
20980 }
20981 break;
20982
20983 case CALL:
20984 case IF_THEN_ELSE:
20985 if (optimize_size)
20986 {
20987 *total = COSTS_N_INSNS (1);
20988 return true;
20989 }
066cd967
DE
20990 else if (FLOAT_MODE_P (mode)
20991 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20992 {
20993 *total = rs6000_cost->fp;
20994 return false;
20995 }
06a67bdd
RS
20996 break;
20997
c0600ecd
DE
20998 case EQ:
20999 case GTU:
21000 case LTU:
22e54023
DE
21001 /* Carry bit requires mode == Pmode.
21002 NEG or PLUS already counted so only add one. */
21003 if (mode == Pmode
21004 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21005 {
22e54023
DE
21006 *total = COSTS_N_INSNS (1);
21007 return true;
21008 }
21009 if (outer_code == SET)
21010 {
21011 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21012 {
22e54023 21013 *total = COSTS_N_INSNS (2);
c0600ecd 21014 return true;
c0600ecd 21015 }
22e54023
DE
21016 else if (mode == Pmode)
21017 {
21018 *total = COSTS_N_INSNS (3);
21019 return false;
21020 }
21021 }
21022 /* FALLTHRU */
21023
21024 case GT:
21025 case LT:
21026 case UNORDERED:
21027 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21028 {
21029 *total = COSTS_N_INSNS (2);
21030 return true;
c0600ecd 21031 }
22e54023
DE
21032 /* CC COMPARE. */
21033 if (outer_code == COMPARE)
21034 {
21035 *total = 0;
21036 return true;
21037 }
21038 break;
c0600ecd 21039
3c50106f 21040 default:
06a67bdd 21041 break;
3c50106f 21042 }
06a67bdd
RS
21043
21044 return false;
3c50106f
RH
21045}
21046
34bb030a
DE
21047/* A C expression returning the cost of moving data from a register of class
21048 CLASS1 to one of CLASS2. */
21049
21050int
f676971a 21051rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21052 enum reg_class from, enum reg_class to)
34bb030a
DE
21053{
21054 /* Moves from/to GENERAL_REGS. */
21055 if (reg_classes_intersect_p (to, GENERAL_REGS)
21056 || reg_classes_intersect_p (from, GENERAL_REGS))
21057 {
21058 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21059 from = to;
21060
21061 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21062 return (rs6000_memory_move_cost (mode, from, 0)
21063 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21064
c4ad648e
AM
21065 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21066 shift. */
34bb030a
DE
21067 else if (from == CR_REGS)
21068 return 4;
21069
21070 else
c4ad648e 21071 /* A move will cost one instruction per GPR moved. */
c8b622ff 21072 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21073 }
21074
c4ad648e 21075 /* Moving between two similar registers is just one instruction. */
34bb030a 21076 else if (reg_classes_intersect_p (to, from))
7393f7f8 21077 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21078
c4ad648e 21079 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21080 else
f676971a 21081 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21082 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21083}
21084
21085/* A C expressions returning the cost of moving data of MODE from a register to
21086 or from memory. */
21087
21088int
f676971a 21089rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21090 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21091{
21092 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21093 return 4 * hard_regno_nregs[0][mode];
34bb030a 21094 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21095 return 4 * hard_regno_nregs[32][mode];
34bb030a 21096 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21097 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21098 else
21099 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21100}
21101
9c78b944
DE
21102/* Returns a code for a target-specific builtin that implements
21103 reciprocal of the function, or NULL_TREE if not available. */
21104
21105static tree
21106rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21107 bool sqrt ATTRIBUTE_UNUSED)
21108{
21109 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21110 && flag_finite_math_only && !flag_trapping_math
21111 && flag_unsafe_math_optimizations))
21112 return NULL_TREE;
21113
21114 if (md_fn)
21115 return NULL_TREE;
21116 else
21117 switch (fn)
21118 {
21119 case BUILT_IN_SQRTF:
21120 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21121
21122 default:
21123 return NULL_TREE;
21124 }
21125}
21126
ef765ea9
DE
21127/* Newton-Raphson approximation of single-precision floating point divide n/d.
21128 Assumes no trapping math and finite arguments. */
21129
21130void
9c78b944 21131rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21132{
21133 rtx x0, e0, e1, y1, u0, v0, one;
21134
21135 x0 = gen_reg_rtx (SFmode);
21136 e0 = gen_reg_rtx (SFmode);
21137 e1 = gen_reg_rtx (SFmode);
21138 y1 = gen_reg_rtx (SFmode);
21139 u0 = gen_reg_rtx (SFmode);
21140 v0 = gen_reg_rtx (SFmode);
21141 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21142
21143 /* x0 = 1./d estimate */
21144 emit_insn (gen_rtx_SET (VOIDmode, x0,
21145 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21146 UNSPEC_FRES)));
21147 /* e0 = 1. - d * x0 */
21148 emit_insn (gen_rtx_SET (VOIDmode, e0,
21149 gen_rtx_MINUS (SFmode, one,
21150 gen_rtx_MULT (SFmode, d, x0))));
21151 /* e1 = e0 + e0 * e0 */
21152 emit_insn (gen_rtx_SET (VOIDmode, e1,
21153 gen_rtx_PLUS (SFmode,
21154 gen_rtx_MULT (SFmode, e0, e0), e0)));
21155 /* y1 = x0 + e1 * x0 */
21156 emit_insn (gen_rtx_SET (VOIDmode, y1,
21157 gen_rtx_PLUS (SFmode,
21158 gen_rtx_MULT (SFmode, e1, x0), x0)));
21159 /* u0 = n * y1 */
21160 emit_insn (gen_rtx_SET (VOIDmode, u0,
21161 gen_rtx_MULT (SFmode, n, y1)));
21162 /* v0 = n - d * u0 */
21163 emit_insn (gen_rtx_SET (VOIDmode, v0,
21164 gen_rtx_MINUS (SFmode, n,
21165 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21166 /* dst = u0 + v0 * y1 */
21167 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21168 gen_rtx_PLUS (SFmode,
21169 gen_rtx_MULT (SFmode, v0, y1), u0)));
21170}
21171
21172/* Newton-Raphson approximation of double-precision floating point divide n/d.
21173 Assumes no trapping math and finite arguments. */
21174
21175void
9c78b944 21176rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21177{
21178 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21179
21180 x0 = gen_reg_rtx (DFmode);
21181 e0 = gen_reg_rtx (DFmode);
21182 e1 = gen_reg_rtx (DFmode);
21183 e2 = gen_reg_rtx (DFmode);
21184 y1 = gen_reg_rtx (DFmode);
21185 y2 = gen_reg_rtx (DFmode);
21186 y3 = gen_reg_rtx (DFmode);
21187 u0 = gen_reg_rtx (DFmode);
21188 v0 = gen_reg_rtx (DFmode);
21189 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21190
21191 /* x0 = 1./d estimate */
21192 emit_insn (gen_rtx_SET (VOIDmode, x0,
21193 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21194 UNSPEC_FRES)));
21195 /* e0 = 1. - d * x0 */
21196 emit_insn (gen_rtx_SET (VOIDmode, e0,
21197 gen_rtx_MINUS (DFmode, one,
21198 gen_rtx_MULT (SFmode, d, x0))));
21199 /* y1 = x0 + e0 * x0 */
21200 emit_insn (gen_rtx_SET (VOIDmode, y1,
21201 gen_rtx_PLUS (DFmode,
21202 gen_rtx_MULT (DFmode, e0, x0), x0)));
21203 /* e1 = e0 * e0 */
21204 emit_insn (gen_rtx_SET (VOIDmode, e1,
21205 gen_rtx_MULT (DFmode, e0, e0)));
21206 /* y2 = y1 + e1 * y1 */
21207 emit_insn (gen_rtx_SET (VOIDmode, y2,
21208 gen_rtx_PLUS (DFmode,
21209 gen_rtx_MULT (DFmode, e1, y1), y1)));
21210 /* e2 = e1 * e1 */
21211 emit_insn (gen_rtx_SET (VOIDmode, e2,
21212 gen_rtx_MULT (DFmode, e1, e1)));
21213 /* y3 = y2 + e2 * y2 */
21214 emit_insn (gen_rtx_SET (VOIDmode, y3,
21215 gen_rtx_PLUS (DFmode,
21216 gen_rtx_MULT (DFmode, e2, y2), y2)));
21217 /* u0 = n * y3 */
21218 emit_insn (gen_rtx_SET (VOIDmode, u0,
21219 gen_rtx_MULT (DFmode, n, y3)));
21220 /* v0 = n - d * u0 */
21221 emit_insn (gen_rtx_SET (VOIDmode, v0,
21222 gen_rtx_MINUS (DFmode, n,
21223 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21224 /* dst = u0 + v0 * y3 */
21225 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21226 gen_rtx_PLUS (DFmode,
21227 gen_rtx_MULT (DFmode, v0, y3), u0)));
21228}
21229
565ef4ba 21230
9c78b944
DE
21231/* Newton-Raphson approximation of single-precision floating point rsqrt.
21232 Assumes no trapping math and finite arguments. */
21233
21234void
21235rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21236{
21237 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21238 half, one, halfthree, c1, cond, label;
21239
21240 x0 = gen_reg_rtx (SFmode);
21241 x1 = gen_reg_rtx (SFmode);
21242 x2 = gen_reg_rtx (SFmode);
21243 y1 = gen_reg_rtx (SFmode);
21244 u0 = gen_reg_rtx (SFmode);
21245 u1 = gen_reg_rtx (SFmode);
21246 u2 = gen_reg_rtx (SFmode);
21247 v0 = gen_reg_rtx (SFmode);
21248 v1 = gen_reg_rtx (SFmode);
21249 v2 = gen_reg_rtx (SFmode);
21250 t0 = gen_reg_rtx (SFmode);
21251 halfthree = gen_reg_rtx (SFmode);
21252 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21253 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21254
21255 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21256 emit_insn (gen_rtx_SET (VOIDmode, t0,
21257 gen_rtx_MULT (SFmode, src, src)));
21258
21259 emit_insn (gen_rtx_SET (VOIDmode, cond,
21260 gen_rtx_COMPARE (CCFPmode, t0, src)));
21261 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21262 emit_unlikely_jump (c1, label);
21263
21264 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21265 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21266
21267 /* halfthree = 1.5 = 1.0 + 0.5 */
21268 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21269 gen_rtx_PLUS (SFmode, one, half)));
21270
21271 /* x0 = rsqrt estimate */
21272 emit_insn (gen_rtx_SET (VOIDmode, x0,
21273 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21274 UNSPEC_RSQRT)));
21275
21276 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21277 emit_insn (gen_rtx_SET (VOIDmode, y1,
21278 gen_rtx_MINUS (SFmode,
21279 gen_rtx_MULT (SFmode, src, halfthree),
21280 src)));
21281
21282 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21283 emit_insn (gen_rtx_SET (VOIDmode, u0,
21284 gen_rtx_MULT (SFmode, x0, x0)));
21285 emit_insn (gen_rtx_SET (VOIDmode, v0,
21286 gen_rtx_MINUS (SFmode,
21287 halfthree,
21288 gen_rtx_MULT (SFmode, y1, u0))));
21289 emit_insn (gen_rtx_SET (VOIDmode, x1,
21290 gen_rtx_MULT (SFmode, x0, v0)));
21291
21292 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21293 emit_insn (gen_rtx_SET (VOIDmode, u1,
21294 gen_rtx_MULT (SFmode, x1, x1)));
21295 emit_insn (gen_rtx_SET (VOIDmode, v1,
21296 gen_rtx_MINUS (SFmode,
21297 halfthree,
21298 gen_rtx_MULT (SFmode, y1, u1))));
21299 emit_insn (gen_rtx_SET (VOIDmode, x2,
21300 gen_rtx_MULT (SFmode, x1, v1)));
21301
21302 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21303 emit_insn (gen_rtx_SET (VOIDmode, u2,
21304 gen_rtx_MULT (SFmode, x2, x2)));
21305 emit_insn (gen_rtx_SET (VOIDmode, v2,
21306 gen_rtx_MINUS (SFmode,
21307 halfthree,
21308 gen_rtx_MULT (SFmode, y1, u2))));
21309 emit_insn (gen_rtx_SET (VOIDmode, dst,
21310 gen_rtx_MULT (SFmode, x2, v2)));
21311
21312 emit_label (XEXP (label, 0));
21313}
21314
565ef4ba
RS
21315/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21316 target, and SRC is the argument operand. */
21317
21318void
21319rs6000_emit_popcount (rtx dst, rtx src)
21320{
21321 enum machine_mode mode = GET_MODE (dst);
21322 rtx tmp1, tmp2;
21323
21324 tmp1 = gen_reg_rtx (mode);
21325
21326 if (mode == SImode)
21327 {
21328 emit_insn (gen_popcntbsi2 (tmp1, src));
21329 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21330 NULL_RTX, 0);
21331 tmp2 = force_reg (SImode, tmp2);
21332 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21333 }
21334 else
21335 {
21336 emit_insn (gen_popcntbdi2 (tmp1, src));
21337 tmp2 = expand_mult (DImode, tmp1,
21338 GEN_INT ((HOST_WIDE_INT)
21339 0x01010101 << 32 | 0x01010101),
21340 NULL_RTX, 0);
21341 tmp2 = force_reg (DImode, tmp2);
21342 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21343 }
21344}
21345
21346
21347/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21348 target, and SRC is the argument operand. */
21349
21350void
21351rs6000_emit_parity (rtx dst, rtx src)
21352{
21353 enum machine_mode mode = GET_MODE (dst);
21354 rtx tmp;
21355
21356 tmp = gen_reg_rtx (mode);
21357 if (mode == SImode)
21358 {
21359 /* Is mult+shift >= shift+xor+shift+xor? */
21360 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21361 {
21362 rtx tmp1, tmp2, tmp3, tmp4;
21363
21364 tmp1 = gen_reg_rtx (SImode);
21365 emit_insn (gen_popcntbsi2 (tmp1, src));
21366
21367 tmp2 = gen_reg_rtx (SImode);
21368 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21369 tmp3 = gen_reg_rtx (SImode);
21370 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21371
21372 tmp4 = gen_reg_rtx (SImode);
21373 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21374 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21375 }
21376 else
21377 rs6000_emit_popcount (tmp, src);
21378 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21379 }
21380 else
21381 {
21382 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21383 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21384 {
21385 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21386
21387 tmp1 = gen_reg_rtx (DImode);
21388 emit_insn (gen_popcntbdi2 (tmp1, src));
21389
21390 tmp2 = gen_reg_rtx (DImode);
21391 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21392 tmp3 = gen_reg_rtx (DImode);
21393 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21394
21395 tmp4 = gen_reg_rtx (DImode);
21396 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21397 tmp5 = gen_reg_rtx (DImode);
21398 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21399
21400 tmp6 = gen_reg_rtx (DImode);
21401 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21402 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21403 }
21404 else
21405 rs6000_emit_popcount (tmp, src);
21406 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21407 }
21408}
21409
ded9bf77
AH
21410/* Return an RTX representing where to find the function value of a
21411 function returning MODE. */
21412static rtx
21413rs6000_complex_function_value (enum machine_mode mode)
21414{
21415 unsigned int regno;
21416 rtx r1, r2;
21417 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21418 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21419
18f63bfa
AH
21420 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21421 regno = FP_ARG_RETURN;
354ed18f
AH
21422 else
21423 {
18f63bfa 21424 regno = GP_ARG_RETURN;
ded9bf77 21425
18f63bfa
AH
21426 /* 32-bit is OK since it'll go in r3/r4. */
21427 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21428 return gen_rtx_REG (mode, regno);
21429 }
21430
18f63bfa
AH
21431 if (inner_bytes >= 8)
21432 return gen_rtx_REG (mode, regno);
21433
ded9bf77
AH
21434 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21435 const0_rtx);
21436 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21437 GEN_INT (inner_bytes));
ded9bf77
AH
21438 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21439}
21440
a6ebc39a
AH
21441/* Define how to find the value returned by a function.
21442 VALTYPE is the data type of the value (as a tree).
21443 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21444 otherwise, FUNC is 0.
21445
21446 On the SPE, both FPs and vectors are returned in r3.
21447
21448 On RS/6000 an integer value is in r3 and a floating-point value is in
21449 fp1, unless -msoft-float. */
21450
21451rtx
586de218 21452rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21453{
21454 enum machine_mode mode;
2a8fa26c 21455 unsigned int regno;
a6ebc39a 21456
594a51fe
SS
21457 /* Special handling for structs in darwin64. */
21458 if (rs6000_darwin64_abi
21459 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21460 && TREE_CODE (valtype) == RECORD_TYPE
21461 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21462 {
21463 CUMULATIVE_ARGS valcum;
21464 rtx valret;
21465
0b5383eb 21466 valcum.words = 0;
594a51fe
SS
21467 valcum.fregno = FP_ARG_MIN_REG;
21468 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21469 /* Do a trial code generation as if this were going to be passed as
21470 an argument; if any part goes in memory, we return NULL. */
21471 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21472 if (valret)
21473 return valret;
21474 /* Otherwise fall through to standard ABI rules. */
21475 }
21476
0e67400a
FJ
21477 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21478 {
21479 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21480 return gen_rtx_PARALLEL (DImode,
21481 gen_rtvec (2,
21482 gen_rtx_EXPR_LIST (VOIDmode,
21483 gen_rtx_REG (SImode, GP_ARG_RETURN),
21484 const0_rtx),
21485 gen_rtx_EXPR_LIST (VOIDmode,
21486 gen_rtx_REG (SImode,
21487 GP_ARG_RETURN + 1),
21488 GEN_INT (4))));
21489 }
0f086e42
FJ
21490 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21491 {
21492 return gen_rtx_PARALLEL (DCmode,
21493 gen_rtvec (4,
21494 gen_rtx_EXPR_LIST (VOIDmode,
21495 gen_rtx_REG (SImode, GP_ARG_RETURN),
21496 const0_rtx),
21497 gen_rtx_EXPR_LIST (VOIDmode,
21498 gen_rtx_REG (SImode,
21499 GP_ARG_RETURN + 1),
21500 GEN_INT (4)),
21501 gen_rtx_EXPR_LIST (VOIDmode,
21502 gen_rtx_REG (SImode,
21503 GP_ARG_RETURN + 2),
21504 GEN_INT (8)),
21505 gen_rtx_EXPR_LIST (VOIDmode,
21506 gen_rtx_REG (SImode,
21507 GP_ARG_RETURN + 3),
21508 GEN_INT (12))));
21509 }
602ea4d3 21510
7348aa7f
FXC
21511 mode = TYPE_MODE (valtype);
21512 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21513 || POINTER_TYPE_P (valtype))
b78d48dd 21514 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21515
00b79d54 21516 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21517 {
21518 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21519 {
21520 switch (mode)
21521 {
21522 default:
21523 gcc_unreachable ();
21524 case SDmode:
21525 regno = GP_ARG_RETURN;
21526 break;
21527 case DDmode:
21528 regno = FP_ARG_RETURN;
21529 break;
21530 case TDmode:
21531 /* Use f2:f3 specified by the ABI. */
21532 regno = FP_ARG_RETURN + 1;
21533 break;
21534 }
21535 }
21536 else
21537 regno = GP_ARG_RETURN;
21538 }
00b79d54 21539 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21540 regno = FP_ARG_RETURN;
ded9bf77 21541 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21542 && targetm.calls.split_complex_arg)
ded9bf77 21543 return rs6000_complex_function_value (mode);
44688022 21544 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21545 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21546 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21547 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21548 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21549 && (mode == DFmode || mode == DDmode || mode == DCmode
21550 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21551 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21552 else
21553 regno = GP_ARG_RETURN;
21554
21555 return gen_rtx_REG (mode, regno);
21556}
21557
ded9bf77
AH
21558/* Define how to find the value returned by a library function
21559 assuming the value has mode MODE. */
21560rtx
21561rs6000_libcall_value (enum machine_mode mode)
21562{
21563 unsigned int regno;
21564
2e6c9641
FJ
21565 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21566 {
21567 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21568 return gen_rtx_PARALLEL (DImode,
21569 gen_rtvec (2,
21570 gen_rtx_EXPR_LIST (VOIDmode,
21571 gen_rtx_REG (SImode, GP_ARG_RETURN),
21572 const0_rtx),
21573 gen_rtx_EXPR_LIST (VOIDmode,
21574 gen_rtx_REG (SImode,
21575 GP_ARG_RETURN + 1),
21576 GEN_INT (4))));
21577 }
21578
00b79d54 21579 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21580 {
21581 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21582 {
21583 switch (mode)
21584 {
21585 default:
21586 gcc_unreachable ();
21587 case SDmode:
21588 regno = GP_ARG_RETURN;
21589 break;
21590 case DDmode:
21591 regno = FP_ARG_RETURN;
21592 break;
21593 case TDmode:
21594 /* Use f2:f3 specified by the ABI. */
21595 regno = FP_ARG_RETURN + 1;
21596 break;
21597 }
21598 }
21599 else
21600 regno = GP_ARG_RETURN;
21601 }
00b79d54 21602 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21603 && TARGET_HARD_FLOAT && TARGET_FPRS)
21604 regno = FP_ARG_RETURN;
44688022
AM
21605 else if (ALTIVEC_VECTOR_MODE (mode)
21606 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21607 regno = ALTIVEC_ARG_RETURN;
42ba5130 21608 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21609 return rs6000_complex_function_value (mode);
18f63bfa 21610 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21611 && (mode == DFmode || mode == DDmode || mode == DCmode
21612 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21613 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21614 else
21615 regno = GP_ARG_RETURN;
21616
21617 return gen_rtx_REG (mode, regno);
21618}
21619
d1d0c603
JJ
21620/* Define the offset between two registers, FROM to be eliminated and its
21621 replacement TO, at the start of a routine. */
21622HOST_WIDE_INT
21623rs6000_initial_elimination_offset (int from, int to)
21624{
21625 rs6000_stack_t *info = rs6000_stack_info ();
21626 HOST_WIDE_INT offset;
21627
7d5175e1 21628 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21629 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21630 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21631 {
21632 offset = info->push_p ? 0 : -info->total_size;
21633 if (FRAME_GROWS_DOWNWARD)
5b667039 21634 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21635 }
21636 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21637 offset = FRAME_GROWS_DOWNWARD
5b667039 21638 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21639 : 0;
21640 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21641 offset = info->total_size;
21642 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21643 offset = info->push_p ? info->total_size : 0;
21644 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21645 offset = 0;
21646 else
37409796 21647 gcc_unreachable ();
d1d0c603
JJ
21648
21649 return offset;
21650}
21651
58646b77 21652/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21653
c8e4f0e9 21654static bool
3101faab 21655rs6000_is_opaque_type (const_tree type)
62e1dfcf 21656{
58646b77 21657 return (type == opaque_V2SI_type_node
2abe3e28 21658 || type == opaque_V2SF_type_node
58646b77
PB
21659 || type == opaque_p_V2SI_type_node
21660 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21661}
21662
96714395 21663static rtx
a2369ed3 21664rs6000_dwarf_register_span (rtx reg)
96714395
AH
21665{
21666 unsigned regno;
21667
4d4cbc0e
AH
21668 if (TARGET_SPE
21669 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
21670 || (TARGET_E500_DOUBLE
21671 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
21672 ;
21673 else
96714395
AH
21674 return NULL_RTX;
21675
21676 regno = REGNO (reg);
21677
21678 /* The duality of the SPE register size wreaks all kinds of havoc.
21679 This is a way of distinguishing r0 in 32-bits from r0 in
21680 64-bits. */
21681 return
21682 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21683 BYTES_BIG_ENDIAN
21684 ? gen_rtvec (2,
21685 gen_rtx_REG (SImode, regno + 1200),
21686 gen_rtx_REG (SImode, regno))
21687 : gen_rtvec (2,
21688 gen_rtx_REG (SImode, regno),
21689 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21690}
21691
37ea0b7e
JM
21692/* Fill in sizes for SPE register high parts in table used by unwinder. */
21693
21694static void
21695rs6000_init_dwarf_reg_sizes_extra (tree address)
21696{
21697 if (TARGET_SPE)
21698 {
21699 int i;
21700 enum machine_mode mode = TYPE_MODE (char_type_node);
21701 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21702 rtx mem = gen_rtx_MEM (BLKmode, addr);
21703 rtx value = gen_int_mode (4, mode);
21704
21705 for (i = 1201; i < 1232; i++)
21706 {
21707 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21708 HOST_WIDE_INT offset
21709 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21710
21711 emit_move_insn (adjust_address (mem, mode, offset), value);
21712 }
21713 }
21714}
21715
93c9d1ba
AM
21716/* Map internal gcc register numbers to DWARF2 register numbers. */
21717
21718unsigned int
21719rs6000_dbx_register_number (unsigned int regno)
21720{
21721 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21722 return regno;
21723 if (regno == MQ_REGNO)
21724 return 100;
1de43f85 21725 if (regno == LR_REGNO)
93c9d1ba 21726 return 108;
1de43f85 21727 if (regno == CTR_REGNO)
93c9d1ba
AM
21728 return 109;
21729 if (CR_REGNO_P (regno))
21730 return regno - CR0_REGNO + 86;
21731 if (regno == XER_REGNO)
21732 return 101;
21733 if (ALTIVEC_REGNO_P (regno))
21734 return regno - FIRST_ALTIVEC_REGNO + 1124;
21735 if (regno == VRSAVE_REGNO)
21736 return 356;
21737 if (regno == VSCR_REGNO)
21738 return 67;
21739 if (regno == SPE_ACC_REGNO)
21740 return 99;
21741 if (regno == SPEFSCR_REGNO)
21742 return 612;
21743 /* SPE high reg number. We get these values of regno from
21744 rs6000_dwarf_register_span. */
37409796
NS
21745 gcc_assert (regno >= 1200 && regno < 1232);
21746 return regno;
93c9d1ba
AM
21747}
21748
93f90be6 21749/* target hook eh_return_filter_mode */
f676971a 21750static enum machine_mode
93f90be6
FJ
21751rs6000_eh_return_filter_mode (void)
21752{
21753 return TARGET_32BIT ? SImode : word_mode;
21754}
21755
00b79d54
BE
21756/* Target hook for scalar_mode_supported_p. */
21757static bool
21758rs6000_scalar_mode_supported_p (enum machine_mode mode)
21759{
21760 if (DECIMAL_FLOAT_MODE_P (mode))
21761 return true;
21762 else
21763 return default_scalar_mode_supported_p (mode);
21764}
21765
f676971a
EC
21766/* Target hook for vector_mode_supported_p. */
21767static bool
21768rs6000_vector_mode_supported_p (enum machine_mode mode)
21769{
21770
96038623
DE
21771 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21772 return true;
21773
f676971a
EC
21774 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21775 return true;
21776
21777 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21778 return true;
21779
21780 else
21781 return false;
21782}
21783
bb8df8a6
EC
21784/* Target hook for invalid_arg_for_unprototyped_fn. */
21785static const char *
3101faab 21786invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21787{
21788 return (!rs6000_darwin64_abi
21789 && typelist == 0
21790 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21791 && (funcdecl == NULL_TREE
21792 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21793 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21794 ? N_("AltiVec argument passed to unprototyped function")
21795 : NULL;
21796}
21797
3aebbe5f
JJ
21798/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21799 setup by using __stack_chk_fail_local hidden function instead of
21800 calling __stack_chk_fail directly. Otherwise it is better to call
21801 __stack_chk_fail directly. */
21802
21803static tree
21804rs6000_stack_protect_fail (void)
21805{
21806 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21807 ? default_hidden_stack_protect_fail ()
21808 : default_external_stack_protect_fail ();
21809}
21810
17211ab5 21811#include "gt-rs6000.h"