]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
gimplify.c (gimplify_modify_expr_rhs): Handle the case when we are assigning from...
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
279bb624 58#include "tm-constrs.h"
1bc7c5b6
ZW
59#if TARGET_XCOFF
60#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61#endif
93a27b7b
ZW
62#if TARGET_MACHO
63#include "gstab.h" /* for N_SLINE */
64#endif
9b30bae2 65
7509c759
MM
66#ifndef TARGET_NO_PROTOTYPE
67#define TARGET_NO_PROTOTYPE 0
68#endif
69
9878760c
RK
70#define min(A,B) ((A) < (B) ? (A) : (B))
71#define max(A,B) ((A) > (B) ? (A) : (B))
72
d1d0c603
JJ
73/* Structure used to define the rs6000 stack */
74typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
d1d0c603
JJ
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
111} rs6000_stack_t;
112
5b667039
JJ
113/* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */
115typedef struct machine_function GTY(())
116{
117 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
118 int ra_needs_full_frame;
119 /* Some local-dynamic symbol. */
120 const char *some_ld_name;
121 /* Whether the instruction chain has been scanned already. */
122 int insn_chain_scanned_p;
123 /* Flags if __builtin_return_address (0) was used. */
124 int ra_need_lr;
125 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126 varargs save area. */
127 HOST_WIDE_INT varargs_save_offset;
128} machine_function;
129
5248c961
RK
130/* Target cpu type */
131
132enum processor_type rs6000_cpu;
8e3f41e7
MM
133struct rs6000_cpu_select rs6000_select[3] =
134{
815cdc52
MM
135 /* switch name, tune arch */
136 { (const char *)0, "--with-cpu=", 1, 1 },
137 { (const char *)0, "-mcpu=", 1, 1 },
138 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 139};
5248c961 140
d296e02e
AP
141static GTY(()) bool rs6000_cell_dont_microcode;
142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
44cd321e
PS
149/* Align branch targets. */
150static GTY(()) bool rs6000_align_branch_targets;
151
569fa502
DN
152/* Support for -msched-costly-dep option. */
153const char *rs6000_sched_costly_dep_str;
154enum rs6000_dependence_cost rs6000_sched_costly_dep;
155
cbe26ab8
DN
156/* Support for -minsert-sched-nops option. */
157const char *rs6000_sched_insert_nops_str;
158enum rs6000_nop_insertion rs6000_sched_insert_nops;
159
7ccf35ed 160/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 161static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 162
602ea4d3 163/* Size of long double. */
6fa3f289
ZW
164int rs6000_long_double_type_size;
165
602ea4d3
JJ
166/* IEEE quad extended precision long double. */
167int rs6000_ieeequad;
168
169/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
170int rs6000_altivec_abi;
171
a3170dc6
AH
172/* Nonzero if we want SPE ABI extensions. */
173int rs6000_spe_abi;
174
5da702b1
AH
175/* Nonzero if floating point operations are done in the GPRs. */
176int rs6000_float_gprs = 0;
177
594a51fe
SS
178/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
179int rs6000_darwin64_abi;
180
a0ab749a 181/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 182static GTY(()) int common_mode_defined;
c81bebd7 183
9878760c
RK
184/* Save information from a "cmpxx" operation until the branch or scc is
185 emitted. */
9878760c
RK
186rtx rs6000_compare_op0, rs6000_compare_op1;
187int rs6000_compare_fp_p;
874a0744 188
874a0744
MM
189/* Label number of label created for -mrelocatable, to call to so we can
190 get the address of the GOT section */
191int rs6000_pic_labelno;
c81bebd7 192
b91da81f 193#ifdef USING_ELFOS_H
c81bebd7 194/* Which abi to adhere to */
9739c90c 195const char *rs6000_abi_name;
d9407988
MM
196
197/* Semantics of the small data area */
198enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199
200/* Which small data model to use */
815cdc52 201const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
202
203/* Counter for labels which are to be placed in .fixup. */
204int fixuplabelno = 0;
874a0744 205#endif
4697a36c 206
c4501e62
JJ
207/* Bit size of immediate TLS offsets and string from which it is decoded. */
208int rs6000_tls_size = 32;
209const char *rs6000_tls_size_string;
210
b6c9286a
MM
211/* ABI enumeration available for subtarget to use. */
212enum rs6000_abi rs6000_current_abi;
213
85b776df
AM
214/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
215int dot_symbols;
216
38c1f2d7 217/* Debug flags */
815cdc52 218const char *rs6000_debug_name;
38c1f2d7
MM
219int rs6000_debug_stack; /* debug stack applications */
220int rs6000_debug_arg; /* debug argument handling */
221
aabcd309 222/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
223bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
224
58646b77
PB
225/* Built in types. */
226
227tree rs6000_builtin_types[RS6000_BTI_MAX];
228tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 229
57ac7be9
AM
230const char *rs6000_traceback_name;
231static enum {
232 traceback_default = 0,
233 traceback_none,
234 traceback_part,
235 traceback_full
236} rs6000_traceback;
237
38c1f2d7
MM
238/* Flag to say the TOC is initialized */
239int toc_initialized;
9ebbca7d 240char toc_label_name[10];
38c1f2d7 241
44cd321e
PS
242/* Cached value of rs6000_variable_issue. This is cached in
243 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
244static short cached_can_issue_more;
245
d6b5193b
RS
246static GTY(()) section *read_only_data_section;
247static GTY(()) section *private_data_section;
248static GTY(()) section *read_only_private_data_section;
249static GTY(()) section *sdata2_section;
250static GTY(()) section *toc_section;
251
a3c9585f
KH
252/* Control alignment for fields within structures. */
253/* String from -malign-XXXXX. */
025d9908
KH
254int rs6000_alignment_flags;
255
78f5898b
AH
256/* True for any options that were explicitly set. */
257struct {
df01da37 258 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 259 bool alignment; /* True if -malign- was used. */
d3603e8c 260 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
261 bool spe; /* True if -mspe= was used. */
262 bool float_gprs; /* True if -mfloat-gprs= was used. */
263 bool isel; /* True if -misel was used. */
264 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 265 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
266} rs6000_explicit_options;
267
a3170dc6
AH
268struct builtin_description
269{
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
273 unsigned int mask;
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
277};
8b897cfa
RS
278\f
279/* Target cpu costs. */
280
281struct processor_costs {
c4ad648e 282 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
283 const int mulsi_const; /* cost of SImode multiplication by constant. */
284 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
285 const int muldi; /* cost of DImode multiplication. */
286 const int divsi; /* cost of SImode division. */
287 const int divdi; /* cost of DImode division. */
288 const int fp; /* cost of simple SFmode and DFmode insns. */
289 const int dmul; /* cost of DFmode multiplication (and fmadd). */
290 const int sdiv; /* cost of SFmode division (fdivs). */
291 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
292 const int cache_line_size; /* cache line size in bytes. */
293 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
294 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
295 const int simultaneous_prefetches; /* number of parallel prefetch
296 operations. */
8b897cfa
RS
297};
298
299const struct processor_costs *rs6000_cost;
300
301/* Processor costs (relative to an add) */
302
303/* Instruction size costs on 32bit processors. */
304static const
305struct processor_costs size32_cost = {
06a67bdd
RS
306 COSTS_N_INSNS (1), /* mulsi */
307 COSTS_N_INSNS (1), /* mulsi_const */
308 COSTS_N_INSNS (1), /* mulsi_const9 */
309 COSTS_N_INSNS (1), /* muldi */
310 COSTS_N_INSNS (1), /* divsi */
311 COSTS_N_INSNS (1), /* divdi */
312 COSTS_N_INSNS (1), /* fp */
313 COSTS_N_INSNS (1), /* dmul */
314 COSTS_N_INSNS (1), /* sdiv */
315 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
316 32,
317 0,
318 0,
5f732aba 319 0,
8b897cfa
RS
320};
321
322/* Instruction size costs on 64bit processors. */
323static const
324struct processor_costs size64_cost = {
06a67bdd
RS
325 COSTS_N_INSNS (1), /* mulsi */
326 COSTS_N_INSNS (1), /* mulsi_const */
327 COSTS_N_INSNS (1), /* mulsi_const9 */
328 COSTS_N_INSNS (1), /* muldi */
329 COSTS_N_INSNS (1), /* divsi */
330 COSTS_N_INSNS (1), /* divdi */
331 COSTS_N_INSNS (1), /* fp */
332 COSTS_N_INSNS (1), /* dmul */
333 COSTS_N_INSNS (1), /* sdiv */
334 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
335 128,
336 0,
337 0,
5f732aba 338 0,
8b897cfa
RS
339};
340
341/* Instruction costs on RIOS1 processors. */
342static const
343struct processor_costs rios1_cost = {
06a67bdd
RS
344 COSTS_N_INSNS (5), /* mulsi */
345 COSTS_N_INSNS (4), /* mulsi_const */
346 COSTS_N_INSNS (3), /* mulsi_const9 */
347 COSTS_N_INSNS (5), /* muldi */
348 COSTS_N_INSNS (19), /* divsi */
349 COSTS_N_INSNS (19), /* divdi */
350 COSTS_N_INSNS (2), /* fp */
351 COSTS_N_INSNS (2), /* dmul */
352 COSTS_N_INSNS (19), /* sdiv */
353 COSTS_N_INSNS (19), /* ddiv */
5f732aba
DE
354 128,
355 64, /* l1 cache */
356 512, /* l2 cache */
0b11da67 357 0, /* streams */
8b897cfa
RS
358};
359
360/* Instruction costs on RIOS2 processors. */
361static const
362struct processor_costs rios2_cost = {
06a67bdd
RS
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (13), /* divsi */
368 COSTS_N_INSNS (13), /* divdi */
369 COSTS_N_INSNS (2), /* fp */
370 COSTS_N_INSNS (2), /* dmul */
371 COSTS_N_INSNS (17), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
373 256,
374 256, /* l1 cache */
375 1024, /* l2 cache */
0b11da67 376 0, /* streams */
8b897cfa
RS
377};
378
379/* Instruction costs on RS64A processors. */
380static const
381struct processor_costs rs64a_cost = {
06a67bdd
RS
382 COSTS_N_INSNS (20), /* mulsi */
383 COSTS_N_INSNS (12), /* mulsi_const */
384 COSTS_N_INSNS (8), /* mulsi_const9 */
385 COSTS_N_INSNS (34), /* muldi */
386 COSTS_N_INSNS (65), /* divsi */
387 COSTS_N_INSNS (67), /* divdi */
388 COSTS_N_INSNS (4), /* fp */
389 COSTS_N_INSNS (4), /* dmul */
390 COSTS_N_INSNS (31), /* sdiv */
391 COSTS_N_INSNS (31), /* ddiv */
0b11da67 392 128,
5f732aba
DE
393 128, /* l1 cache */
394 2048, /* l2 cache */
0b11da67 395 1, /* streams */
8b897cfa
RS
396};
397
398/* Instruction costs on MPCCORE processors. */
399static const
400struct processor_costs mpccore_cost = {
06a67bdd
RS
401 COSTS_N_INSNS (2), /* mulsi */
402 COSTS_N_INSNS (2), /* mulsi_const */
403 COSTS_N_INSNS (2), /* mulsi_const9 */
404 COSTS_N_INSNS (2), /* muldi */
405 COSTS_N_INSNS (6), /* divsi */
406 COSTS_N_INSNS (6), /* divdi */
407 COSTS_N_INSNS (4), /* fp */
408 COSTS_N_INSNS (5), /* dmul */
409 COSTS_N_INSNS (10), /* sdiv */
410 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
411 32,
412 4, /* l1 cache */
413 16, /* l2 cache */
0b11da67 414 1, /* streams */
8b897cfa
RS
415};
416
417/* Instruction costs on PPC403 processors. */
418static const
419struct processor_costs ppc403_cost = {
06a67bdd
RS
420 COSTS_N_INSNS (4), /* mulsi */
421 COSTS_N_INSNS (4), /* mulsi_const */
422 COSTS_N_INSNS (4), /* mulsi_const9 */
423 COSTS_N_INSNS (4), /* muldi */
424 COSTS_N_INSNS (33), /* divsi */
425 COSTS_N_INSNS (33), /* divdi */
426 COSTS_N_INSNS (11), /* fp */
427 COSTS_N_INSNS (11), /* dmul */
428 COSTS_N_INSNS (11), /* sdiv */
429 COSTS_N_INSNS (11), /* ddiv */
0b11da67 430 32,
5f732aba
DE
431 4, /* l1 cache */
432 16, /* l2 cache */
0b11da67 433 1, /* streams */
8b897cfa
RS
434};
435
436/* Instruction costs on PPC405 processors. */
437static const
438struct processor_costs ppc405_cost = {
06a67bdd
RS
439 COSTS_N_INSNS (5), /* mulsi */
440 COSTS_N_INSNS (4), /* mulsi_const */
441 COSTS_N_INSNS (3), /* mulsi_const9 */
442 COSTS_N_INSNS (5), /* muldi */
443 COSTS_N_INSNS (35), /* divsi */
444 COSTS_N_INSNS (35), /* divdi */
445 COSTS_N_INSNS (11), /* fp */
446 COSTS_N_INSNS (11), /* dmul */
447 COSTS_N_INSNS (11), /* sdiv */
448 COSTS_N_INSNS (11), /* ddiv */
0b11da67 449 32,
5f732aba
DE
450 16, /* l1 cache */
451 128, /* l2 cache */
0b11da67 452 1, /* streams */
8b897cfa
RS
453};
454
455/* Instruction costs on PPC440 processors. */
456static const
457struct processor_costs ppc440_cost = {
06a67bdd
RS
458 COSTS_N_INSNS (3), /* mulsi */
459 COSTS_N_INSNS (2), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (3), /* muldi */
462 COSTS_N_INSNS (34), /* divsi */
463 COSTS_N_INSNS (34), /* divdi */
464 COSTS_N_INSNS (5), /* fp */
465 COSTS_N_INSNS (5), /* dmul */
466 COSTS_N_INSNS (19), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
0b11da67 468 32,
5f732aba
DE
469 32, /* l1 cache */
470 256, /* l2 cache */
0b11da67 471 1, /* streams */
8b897cfa
RS
472};
473
474/* Instruction costs on PPC601 processors. */
475static const
476struct processor_costs ppc601_cost = {
06a67bdd
RS
477 COSTS_N_INSNS (5), /* mulsi */
478 COSTS_N_INSNS (5), /* mulsi_const */
479 COSTS_N_INSNS (5), /* mulsi_const9 */
480 COSTS_N_INSNS (5), /* muldi */
481 COSTS_N_INSNS (36), /* divsi */
482 COSTS_N_INSNS (36), /* divdi */
483 COSTS_N_INSNS (4), /* fp */
484 COSTS_N_INSNS (5), /* dmul */
485 COSTS_N_INSNS (17), /* sdiv */
486 COSTS_N_INSNS (31), /* ddiv */
0b11da67 487 32,
5f732aba
DE
488 32, /* l1 cache */
489 256, /* l2 cache */
0b11da67 490 1, /* streams */
8b897cfa
RS
491};
492
493/* Instruction costs on PPC603 processors. */
494static const
495struct processor_costs ppc603_cost = {
06a67bdd
RS
496 COSTS_N_INSNS (5), /* mulsi */
497 COSTS_N_INSNS (3), /* mulsi_const */
498 COSTS_N_INSNS (2), /* mulsi_const9 */
499 COSTS_N_INSNS (5), /* muldi */
500 COSTS_N_INSNS (37), /* divsi */
501 COSTS_N_INSNS (37), /* divdi */
502 COSTS_N_INSNS (3), /* fp */
503 COSTS_N_INSNS (4), /* dmul */
504 COSTS_N_INSNS (18), /* sdiv */
505 COSTS_N_INSNS (33), /* ddiv */
0b11da67 506 32,
5f732aba
DE
507 8, /* l1 cache */
508 64, /* l2 cache */
0b11da67 509 1, /* streams */
8b897cfa
RS
510};
511
512/* Instruction costs on PPC604 processors. */
513static const
514struct processor_costs ppc604_cost = {
06a67bdd
RS
515 COSTS_N_INSNS (4), /* mulsi */
516 COSTS_N_INSNS (4), /* mulsi_const */
517 COSTS_N_INSNS (4), /* mulsi_const9 */
518 COSTS_N_INSNS (4), /* muldi */
519 COSTS_N_INSNS (20), /* divsi */
520 COSTS_N_INSNS (20), /* divdi */
521 COSTS_N_INSNS (3), /* fp */
522 COSTS_N_INSNS (3), /* dmul */
523 COSTS_N_INSNS (18), /* sdiv */
524 COSTS_N_INSNS (32), /* ddiv */
0b11da67 525 32,
5f732aba
DE
526 16, /* l1 cache */
527 512, /* l2 cache */
0b11da67 528 1, /* streams */
8b897cfa
RS
529};
530
531/* Instruction costs on PPC604e processors. */
532static const
533struct processor_costs ppc604e_cost = {
06a67bdd
RS
534 COSTS_N_INSNS (2), /* mulsi */
535 COSTS_N_INSNS (2), /* mulsi_const */
536 COSTS_N_INSNS (2), /* mulsi_const9 */
537 COSTS_N_INSNS (2), /* muldi */
538 COSTS_N_INSNS (20), /* divsi */
539 COSTS_N_INSNS (20), /* divdi */
540 COSTS_N_INSNS (3), /* fp */
541 COSTS_N_INSNS (3), /* dmul */
542 COSTS_N_INSNS (18), /* sdiv */
543 COSTS_N_INSNS (32), /* ddiv */
0b11da67 544 32,
5f732aba
DE
545 32, /* l1 cache */
546 1024, /* l2 cache */
0b11da67 547 1, /* streams */
8b897cfa
RS
548};
549
f0517163 550/* Instruction costs on PPC620 processors. */
8b897cfa
RS
551static const
552struct processor_costs ppc620_cost = {
06a67bdd
RS
553 COSTS_N_INSNS (5), /* mulsi */
554 COSTS_N_INSNS (4), /* mulsi_const */
555 COSTS_N_INSNS (3), /* mulsi_const9 */
556 COSTS_N_INSNS (7), /* muldi */
557 COSTS_N_INSNS (21), /* divsi */
558 COSTS_N_INSNS (37), /* divdi */
559 COSTS_N_INSNS (3), /* fp */
560 COSTS_N_INSNS (3), /* dmul */
561 COSTS_N_INSNS (18), /* sdiv */
562 COSTS_N_INSNS (32), /* ddiv */
0b11da67 563 128,
5f732aba
DE
564 32, /* l1 cache */
565 1024, /* l2 cache */
0b11da67 566 1, /* streams */
f0517163
RS
567};
568
569/* Instruction costs on PPC630 processors. */
570static const
571struct processor_costs ppc630_cost = {
06a67bdd
RS
572 COSTS_N_INSNS (5), /* mulsi */
573 COSTS_N_INSNS (4), /* mulsi_const */
574 COSTS_N_INSNS (3), /* mulsi_const9 */
575 COSTS_N_INSNS (7), /* muldi */
576 COSTS_N_INSNS (21), /* divsi */
577 COSTS_N_INSNS (37), /* divdi */
578 COSTS_N_INSNS (3), /* fp */
579 COSTS_N_INSNS (3), /* dmul */
580 COSTS_N_INSNS (17), /* sdiv */
581 COSTS_N_INSNS (21), /* ddiv */
0b11da67 582 128,
5f732aba
DE
583 64, /* l1 cache */
584 1024, /* l2 cache */
0b11da67 585 1, /* streams */
8b897cfa
RS
586};
587
d296e02e
AP
588/* Instruction costs on Cell processor. */
589/* COSTS_N_INSNS (1) ~ one add. */
590static const
591struct processor_costs ppccell_cost = {
592 COSTS_N_INSNS (9/2)+2, /* mulsi */
593 COSTS_N_INSNS (6/2), /* mulsi_const */
594 COSTS_N_INSNS (6/2), /* mulsi_const9 */
595 COSTS_N_INSNS (15/2)+2, /* muldi */
596 COSTS_N_INSNS (38/2), /* divsi */
597 COSTS_N_INSNS (70/2), /* divdi */
598 COSTS_N_INSNS (10/2), /* fp */
599 COSTS_N_INSNS (10/2), /* dmul */
600 COSTS_N_INSNS (74/2), /* sdiv */
601 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67 602 128,
5f732aba
DE
603 32, /* l1 cache */
604 512, /* l2 cache */
605 6, /* streams */
d296e02e
AP
606};
607
8b897cfa
RS
608/* Instruction costs on PPC750 and PPC7400 processors. */
609static const
610struct processor_costs ppc750_cost = {
06a67bdd
RS
611 COSTS_N_INSNS (5), /* mulsi */
612 COSTS_N_INSNS (3), /* mulsi_const */
613 COSTS_N_INSNS (2), /* mulsi_const9 */
614 COSTS_N_INSNS (5), /* muldi */
615 COSTS_N_INSNS (17), /* divsi */
616 COSTS_N_INSNS (17), /* divdi */
617 COSTS_N_INSNS (3), /* fp */
618 COSTS_N_INSNS (3), /* dmul */
619 COSTS_N_INSNS (17), /* sdiv */
620 COSTS_N_INSNS (31), /* ddiv */
0b11da67 621 32,
5f732aba
DE
622 32, /* l1 cache */
623 512, /* l2 cache */
0b11da67 624 1, /* streams */
8b897cfa
RS
625};
626
627/* Instruction costs on PPC7450 processors. */
628static const
629struct processor_costs ppc7450_cost = {
06a67bdd
RS
630 COSTS_N_INSNS (4), /* mulsi */
631 COSTS_N_INSNS (3), /* mulsi_const */
632 COSTS_N_INSNS (3), /* mulsi_const9 */
633 COSTS_N_INSNS (4), /* muldi */
634 COSTS_N_INSNS (23), /* divsi */
635 COSTS_N_INSNS (23), /* divdi */
636 COSTS_N_INSNS (5), /* fp */
637 COSTS_N_INSNS (5), /* dmul */
638 COSTS_N_INSNS (21), /* sdiv */
639 COSTS_N_INSNS (35), /* ddiv */
0b11da67 640 32,
5f732aba
DE
641 32, /* l1 cache */
642 1024, /* l2 cache */
0b11da67 643 1, /* streams */
8b897cfa 644};
a3170dc6 645
8b897cfa
RS
646/* Instruction costs on PPC8540 processors. */
647static const
648struct processor_costs ppc8540_cost = {
06a67bdd
RS
649 COSTS_N_INSNS (4), /* mulsi */
650 COSTS_N_INSNS (4), /* mulsi_const */
651 COSTS_N_INSNS (4), /* mulsi_const9 */
652 COSTS_N_INSNS (4), /* muldi */
653 COSTS_N_INSNS (19), /* divsi */
654 COSTS_N_INSNS (19), /* divdi */
655 COSTS_N_INSNS (4), /* fp */
656 COSTS_N_INSNS (4), /* dmul */
657 COSTS_N_INSNS (29), /* sdiv */
658 COSTS_N_INSNS (29), /* ddiv */
0b11da67 659 32,
5f732aba
DE
660 32, /* l1 cache */
661 256, /* l2 cache */
0b11da67 662 1, /* prefetch streams /*/
8b897cfa
RS
663};
664
665/* Instruction costs on POWER4 and POWER5 processors. */
666static const
667struct processor_costs power4_cost = {
06a67bdd
RS
668 COSTS_N_INSNS (3), /* mulsi */
669 COSTS_N_INSNS (2), /* mulsi_const */
670 COSTS_N_INSNS (2), /* mulsi_const9 */
671 COSTS_N_INSNS (4), /* muldi */
672 COSTS_N_INSNS (18), /* divsi */
673 COSTS_N_INSNS (34), /* divdi */
674 COSTS_N_INSNS (3), /* fp */
675 COSTS_N_INSNS (3), /* dmul */
676 COSTS_N_INSNS (17), /* sdiv */
677 COSTS_N_INSNS (17), /* ddiv */
0b11da67 678 128,
5f732aba
DE
679 32, /* l1 cache */
680 1024, /* l2 cache */
0b11da67 681 8, /* prefetch streams /*/
8b897cfa
RS
682};
683
44cd321e
PS
684/* Instruction costs on POWER6 processors. */
685static const
686struct processor_costs power6_cost = {
687 COSTS_N_INSNS (8), /* mulsi */
688 COSTS_N_INSNS (8), /* mulsi_const */
689 COSTS_N_INSNS (8), /* mulsi_const9 */
690 COSTS_N_INSNS (8), /* muldi */
691 COSTS_N_INSNS (22), /* divsi */
692 COSTS_N_INSNS (28), /* divdi */
693 COSTS_N_INSNS (3), /* fp */
694 COSTS_N_INSNS (3), /* dmul */
695 COSTS_N_INSNS (13), /* sdiv */
696 COSTS_N_INSNS (16), /* ddiv */
0b11da67 697 128,
5f732aba
DE
698 64, /* l1 cache */
699 2048, /* l2 cache */
0b11da67 700 16, /* prefetch streams */
44cd321e
PS
701};
702
8b897cfa 703\f
a2369ed3 704static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 705static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 706static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
707static void rs6000_emit_stack_tie (void);
708static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
709static rtx spe_synthesize_frame_save (rtx);
710static bool spe_func_has_64bit_regs_p (void);
b20a9cca 711static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 712 int, HOST_WIDE_INT);
a2369ed3
DJ
713static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
714static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
715static unsigned rs6000_hash_constant (rtx);
716static unsigned toc_hash_function (const void *);
717static int toc_hash_eq (const void *, const void *);
718static int constant_pool_expr_1 (rtx, int *, int *);
719static bool constant_pool_expr_p (rtx);
d04b6e6e 720static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
721static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
722static struct machine_function * rs6000_init_machine_status (void);
723static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 724static bool no_global_regs_above (int);
5add3202 725#ifdef HAVE_GAS_HIDDEN
a2369ed3 726static void rs6000_assemble_visibility (tree, int);
5add3202 727#endif
a2369ed3
DJ
728static int rs6000_ra_ever_killed (void);
729static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 730static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 731static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 732static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 733static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 734static const char *rs6000_mangle_type (const_tree);
b86fe7b4 735extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 736static void rs6000_set_default_type_attributes (tree);
52ff33d0 737static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
738static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
739static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
740static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
741 tree);
a2369ed3 742static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 743static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 744static void rs6000_file_start (void);
7c262518 745#if TARGET_ELF
9b580a0b 746static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
747static void rs6000_elf_asm_out_constructor (rtx, int);
748static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 749static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 750static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
751static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
752 unsigned HOST_WIDE_INT);
a56d7372 753static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 754 ATTRIBUTE_UNUSED;
7c262518 755#endif
3101faab 756static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
cbaaba19 757#if TARGET_XCOFF
0d5817b2 758static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 759static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 760static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 761static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 762static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 763static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 764 unsigned HOST_WIDE_INT);
d6b5193b
RS
765static void rs6000_xcoff_unique_section (tree, int);
766static section *rs6000_xcoff_select_rtx_section
767 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
768static const char * rs6000_xcoff_strip_name_encoding (const char *);
769static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
770static void rs6000_xcoff_file_start (void);
771static void rs6000_xcoff_file_end (void);
f1384257 772#endif
a2369ed3
DJ
773static int rs6000_variable_issue (FILE *, int, rtx, int);
774static bool rs6000_rtx_costs (rtx, int, int, int *);
775static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 776static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 777static bool is_microcoded_insn (rtx);
d296e02e 778static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
779static bool is_cracked_insn (rtx);
780static bool is_branch_slot_insn (rtx);
44cd321e 781static bool is_load_insn (rtx);
e3a0e200 782static rtx get_store_dest (rtx pat);
44cd321e
PS
783static bool is_store_insn (rtx);
784static bool set_to_load_agen (rtx,rtx);
982afe02 785static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
786static int rs6000_adjust_priority (rtx, int);
787static int rs6000_issue_rate (void);
b198261f 788static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
789static rtx get_next_active_insn (rtx, rtx);
790static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
791static bool insn_must_be_first_in_group (rtx);
792static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
793static bool is_costly_group (rtx *, rtx);
794static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
795static int redefine_groups (FILE *, int, rtx, rtx);
796static int pad_groups (FILE *, int, rtx, rtx);
797static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
798static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
799static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 800static int rs6000_use_sched_lookahead (void);
d296e02e 801static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 802static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 803static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
804static tree rs6000_builtin_mul_widen_even (tree);
805static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 806static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 807
58646b77 808static void def_builtin (int, const char *, tree, int);
3101faab 809static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
810static void rs6000_init_builtins (void);
811static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
812static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
813static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
814static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
815static void altivec_init_builtins (void);
816static void rs6000_common_init_builtins (void);
c15c90bb 817static void rs6000_init_libfuncs (void);
a2369ed3 818
96038623
DE
819static void paired_init_builtins (void);
820static rtx paired_expand_builtin (tree, rtx, bool *);
821static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
822static rtx paired_expand_stv_builtin (enum insn_code, tree);
823static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
824
b20a9cca
AM
825static void enable_mask_for_builtins (struct builtin_description *, int,
826 enum rs6000_builtins,
827 enum rs6000_builtins);
7c62e993 828static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
829static void spe_init_builtins (void);
830static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 831static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
832static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
833static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
834static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
835static rs6000_stack_t *rs6000_stack_info (void);
836static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
837
838static rtx altivec_expand_builtin (tree, rtx, bool *);
839static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
840static rtx altivec_expand_st_builtin (tree, rtx, bool *);
841static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
842static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 843static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 844 const char *, tree, rtx);
b4a62fa0 845static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 846static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
847static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
848static rtx altivec_expand_vec_set_builtin (tree);
849static rtx altivec_expand_vec_ext_builtin (tree, rtx);
850static int get_element_number (tree, tree);
78f5898b 851static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 852static void rs6000_parse_tls_size_option (void);
5da702b1 853static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
854static int first_altivec_reg_to_save (void);
855static unsigned int compute_vrsave_mask (void);
9390387d 856static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
857static void is_altivec_return_reg (rtx, void *);
858static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
859int easy_vector_constant (rtx, enum machine_mode);
3101faab 860static bool rs6000_is_opaque_type (const_tree);
a2369ed3 861static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 862static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 863static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 864static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
865static rtx rs6000_tls_get_addr (void);
866static rtx rs6000_got_sym (void);
9390387d 867static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
868static const char *rs6000_get_some_local_dynamic_name (void);
869static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 870static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 871static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 872 enum machine_mode, tree);
0b5383eb
DJ
873static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
874 HOST_WIDE_INT);
875static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
876 tree, HOST_WIDE_INT);
877static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
878 HOST_WIDE_INT,
879 rtx[], int *);
880static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
881 const_tree, HOST_WIDE_INT,
882 rtx[], int *);
883static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 884static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 885static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
886static void setup_incoming_varargs (CUMULATIVE_ARGS *,
887 enum machine_mode, tree,
888 int *, int);
8cd5a4e0 889static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 890 const_tree, bool);
78a52f11
RH
891static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
892 tree, bool);
3101faab 893static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
894#if TARGET_MACHO
895static void macho_branch_islands (void);
efdba735
SH
896static int no_previous_def (tree function_name);
897static tree get_prev_label (tree function_name);
c4e18b1c 898static void rs6000_darwin_file_start (void);
efdba735
SH
899#endif
900
c35d187f 901static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 902static void rs6000_va_start (tree, rtx);
23a60a04 903static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 904static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 905static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 906static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 907static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 908 enum machine_mode);
94ff898d 909static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
910 enum machine_mode);
911static int get_vsel_insn (enum machine_mode);
912static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 913static tree rs6000_stack_protect_fail (void);
21213b4c
DP
914
915const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
916static enum machine_mode rs6000_eh_return_filter_mode (void);
917
17211ab5
GK
918/* Hash table stuff for keeping track of TOC entries. */
919
920struct toc_hash_struct GTY(())
921{
922 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
923 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
924 rtx key;
925 enum machine_mode key_mode;
926 int labelno;
927};
928
929static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
930\f
931/* Default register names. */
932char rs6000_reg_names[][8] =
933{
802a0058
MM
934 "0", "1", "2", "3", "4", "5", "6", "7",
935 "8", "9", "10", "11", "12", "13", "14", "15",
936 "16", "17", "18", "19", "20", "21", "22", "23",
937 "24", "25", "26", "27", "28", "29", "30", "31",
938 "0", "1", "2", "3", "4", "5", "6", "7",
939 "8", "9", "10", "11", "12", "13", "14", "15",
940 "16", "17", "18", "19", "20", "21", "22", "23",
941 "24", "25", "26", "27", "28", "29", "30", "31",
942 "mq", "lr", "ctr","ap",
943 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
944 "xer",
945 /* AltiVec registers. */
0cd5e3a1
AH
946 "0", "1", "2", "3", "4", "5", "6", "7",
947 "8", "9", "10", "11", "12", "13", "14", "15",
948 "16", "17", "18", "19", "20", "21", "22", "23",
949 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
950 "vrsave", "vscr",
951 /* SPE registers. */
7d5175e1
JJ
952 "spe_acc", "spefscr",
953 /* Soft frame pointer. */
954 "sfp"
c81bebd7
MM
955};
956
957#ifdef TARGET_REGNAMES
8b60264b 958static const char alt_reg_names[][8] =
c81bebd7 959{
802a0058
MM
960 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
961 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
962 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
963 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
964 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
965 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
966 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
967 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
968 "mq", "lr", "ctr", "ap",
969 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 970 "xer",
59a4c851 971 /* AltiVec registers. */
0ac081f6 972 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
973 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
974 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
975 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
976 "vrsave", "vscr",
977 /* SPE registers. */
7d5175e1
JJ
978 "spe_acc", "spefscr",
979 /* Soft frame pointer. */
980 "sfp"
c81bebd7
MM
981};
982#endif
9878760c 983\f
daf11973
MM
984#ifndef MASK_STRICT_ALIGN
985#define MASK_STRICT_ALIGN 0
986#endif
ffcfcb5f
AM
987#ifndef TARGET_PROFILE_KERNEL
988#define TARGET_PROFILE_KERNEL 0
989#endif
3961e8fe
RH
990
991/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
992#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
993\f
994/* Initialize the GCC target structure. */
91d231cb
JM
995#undef TARGET_ATTRIBUTE_TABLE
996#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
997#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
998#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 999
301d03af
RS
1000#undef TARGET_ASM_ALIGNED_DI_OP
1001#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1002
1003/* Default unaligned ops are only provided for ELF. Find the ops needed
1004 for non-ELF systems. */
1005#ifndef OBJECT_FORMAT_ELF
cbaaba19 1006#if TARGET_XCOFF
ae6c1efd 1007/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1008 64-bit targets. */
1009#undef TARGET_ASM_UNALIGNED_HI_OP
1010#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1011#undef TARGET_ASM_UNALIGNED_SI_OP
1012#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1013#undef TARGET_ASM_UNALIGNED_DI_OP
1014#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1015#else
1016/* For Darwin. */
1017#undef TARGET_ASM_UNALIGNED_HI_OP
1018#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1019#undef TARGET_ASM_UNALIGNED_SI_OP
1020#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1021#undef TARGET_ASM_UNALIGNED_DI_OP
1022#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1023#undef TARGET_ASM_ALIGNED_DI_OP
1024#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1025#endif
1026#endif
1027
1028/* This hook deals with fixups for relocatable code and DI-mode objects
1029 in 64-bit code. */
1030#undef TARGET_ASM_INTEGER
1031#define TARGET_ASM_INTEGER rs6000_assemble_integer
1032
93638d7a
AM
1033#ifdef HAVE_GAS_HIDDEN
1034#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1035#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1036#endif
1037
c4501e62
JJ
1038#undef TARGET_HAVE_TLS
1039#define TARGET_HAVE_TLS HAVE_AS_TLS
1040
1041#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1042#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1043
08c148a8
NB
1044#undef TARGET_ASM_FUNCTION_PROLOGUE
1045#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1046#undef TARGET_ASM_FUNCTION_EPILOGUE
1047#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1048
b54cf83a
DE
1049#undef TARGET_SCHED_VARIABLE_ISSUE
1050#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1051
c237e94a
ZW
1052#undef TARGET_SCHED_ISSUE_RATE
1053#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1054#undef TARGET_SCHED_ADJUST_COST
1055#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1056#undef TARGET_SCHED_ADJUST_PRIORITY
1057#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1058#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1059#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1060#undef TARGET_SCHED_INIT
1061#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1062#undef TARGET_SCHED_FINISH
1063#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1064#undef TARGET_SCHED_REORDER
1065#define TARGET_SCHED_REORDER rs6000_sched_reorder
1066#undef TARGET_SCHED_REORDER2
1067#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1068
be12c2b0
VM
1069#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1070#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1071
d296e02e
AP
1072#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1073#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1074
7ccf35ed
DN
1075#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1076#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1077#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1078#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1079#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1080#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1081#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1082#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1083
5b900a4c
DN
1084#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1085#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1086
0ac081f6
AH
1087#undef TARGET_INIT_BUILTINS
1088#define TARGET_INIT_BUILTINS rs6000_init_builtins
1089
1090#undef TARGET_EXPAND_BUILTIN
1091#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1092
608063c3
JB
1093#undef TARGET_MANGLE_TYPE
1094#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1095
c15c90bb
ZW
1096#undef TARGET_INIT_LIBFUNCS
1097#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1098
f1384257 1099#if TARGET_MACHO
0e5dbd9b 1100#undef TARGET_BINDS_LOCAL_P
31920d83 1101#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1102#endif
0e5dbd9b 1103
77ccdfed
EC
1104#undef TARGET_MS_BITFIELD_LAYOUT_P
1105#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1106
3961e8fe
RH
1107#undef TARGET_ASM_OUTPUT_MI_THUNK
1108#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1109
3961e8fe 1110#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1111#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1112
4977bab6
ZW
1113#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1114#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1115
2e3f0db6
DJ
1116#undef TARGET_INVALID_WITHIN_DOLOOP
1117#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1118
3c50106f
RH
1119#undef TARGET_RTX_COSTS
1120#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1121#undef TARGET_ADDRESS_COST
1122#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1123
c8e4f0e9 1124#undef TARGET_VECTOR_OPAQUE_P
58646b77 1125#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1126
96714395
AH
1127#undef TARGET_DWARF_REGISTER_SPAN
1128#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1129
37ea0b7e
JM
1130#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1131#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1132
c6e8c921
GK
1133/* On rs6000, function arguments are promoted, as are function return
1134 values. */
1135#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1136#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1137#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1138#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1139
c6e8c921
GK
1140#undef TARGET_RETURN_IN_MEMORY
1141#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1142
1143#undef TARGET_SETUP_INCOMING_VARARGS
1144#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1145
1146/* Always strict argument naming on rs6000. */
1147#undef TARGET_STRICT_ARGUMENT_NAMING
1148#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1149#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1150#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1151#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1152#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1153#undef TARGET_MUST_PASS_IN_STACK
1154#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1155#undef TARGET_PASS_BY_REFERENCE
1156#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1157#undef TARGET_ARG_PARTIAL_BYTES
1158#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1159
c35d187f
RH
1160#undef TARGET_BUILD_BUILTIN_VA_LIST
1161#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1162
d7bd8aeb
JJ
1163#undef TARGET_EXPAND_BUILTIN_VA_START
1164#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1165
cd3ce9b4
JM
1166#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1167#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1168
93f90be6
FJ
1169#undef TARGET_EH_RETURN_FILTER_MODE
1170#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1171
00b79d54
BE
1172#undef TARGET_SCALAR_MODE_SUPPORTED_P
1173#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1174
f676971a
EC
1175#undef TARGET_VECTOR_MODE_SUPPORTED_P
1176#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1177
4d3e6fae
FJ
1178#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1179#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1180
78f5898b
AH
1181#undef TARGET_HANDLE_OPTION
1182#define TARGET_HANDLE_OPTION rs6000_handle_option
1183
1184#undef TARGET_DEFAULT_TARGET_FLAGS
1185#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1186 (TARGET_DEFAULT)
78f5898b 1187
3aebbe5f
JJ
1188#undef TARGET_STACK_PROTECT_FAIL
1189#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1190
445cf5eb
JM
1191/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1192 The PowerPC architecture requires only weak consistency among
1193 processors--that is, memory accesses between processors need not be
1194 sequentially consistent and memory accesses among processors can occur
1195 in any order. The ability to order memory accesses weakly provides
1196 opportunities for more efficient use of the system bus. Unless a
1197 dependency exists, the 604e allows read operations to precede store
1198 operations. */
1199#undef TARGET_RELAXED_ORDERING
1200#define TARGET_RELAXED_ORDERING true
1201
fdbe66f2
EB
1202#ifdef HAVE_AS_TLS
1203#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1204#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1205#endif
1206
aacd3885
RS
1207/* Use a 32-bit anchor range. This leads to sequences like:
1208
1209 addis tmp,anchor,high
1210 add dest,tmp,low
1211
1212 where tmp itself acts as an anchor, and can be shared between
1213 accesses to the same 64k page. */
1214#undef TARGET_MIN_ANCHOR_OFFSET
1215#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1216#undef TARGET_MAX_ANCHOR_OFFSET
1217#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1218#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1219#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1220
9c78b944
DE
1221#undef TARGET_BUILTIN_RECIPROCAL
1222#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1223
f6897b10 1224struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1225\f
0d1fbc8c
AH
1226
1227/* Value is 1 if hard register REGNO can hold a value of machine-mode
1228 MODE. */
1229static int
1230rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1231{
1232 /* The GPRs can hold any mode, but values bigger than one register
1233 cannot go past R31. */
1234 if (INT_REGNO_P (regno))
1235 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1236
a5a97921 1237 /* The float registers can only hold floating modes and DImode.
7393f7f8 1238 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1239 if (FP_REGNO_P (regno))
1240 return
96038623 1241 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1242 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1243 && mode != SDmode
0d1fbc8c
AH
1244 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1245 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1246 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1247 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1248 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1249
1250 /* The CR register can only hold CC modes. */
1251 if (CR_REGNO_P (regno))
1252 return GET_MODE_CLASS (mode) == MODE_CC;
1253
1254 if (XER_REGNO_P (regno))
1255 return mode == PSImode;
1256
1257 /* AltiVec only in AldyVec registers. */
1258 if (ALTIVEC_REGNO_P (regno))
1259 return ALTIVEC_VECTOR_MODE (mode);
1260
1261 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1262 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1263 return 1;
1264
1265 /* We cannot put TImode anywhere except general register and it must be
1266 able to fit within the register set. */
1267
1268 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1269}
1270
1271/* Initialize rs6000_hard_regno_mode_ok_p table. */
1272static void
1273rs6000_init_hard_regno_mode_ok (void)
1274{
1275 int r, m;
1276
1277 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1278 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1279 if (rs6000_hard_regno_mode_ok (r, m))
1280 rs6000_hard_regno_mode_ok_p[m][r] = true;
1281}
1282
e4cad568
GK
1283#if TARGET_MACHO
1284/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1285
1286static void
1287darwin_rs6000_override_options (void)
1288{
1289 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1290 off. */
1291 rs6000_altivec_abi = 1;
1292 TARGET_ALTIVEC_VRSAVE = 1;
1293 if (DEFAULT_ABI == ABI_DARWIN)
1294 {
1295 if (MACHO_DYNAMIC_NO_PIC_P)
1296 {
1297 if (flag_pic)
1298 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1299 flag_pic = 0;
1300 }
1301 else if (flag_pic == 1)
1302 {
1303 flag_pic = 2;
1304 }
1305 }
1306 if (TARGET_64BIT && ! TARGET_POWERPC64)
1307 {
1308 target_flags |= MASK_POWERPC64;
1309 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1310 }
1311 if (flag_mkernel)
1312 {
1313 rs6000_default_long_calls = 1;
1314 target_flags |= MASK_SOFT_FLOAT;
1315 }
1316
1317 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1318 Altivec. */
1319 if (!flag_mkernel && !flag_apple_kext
1320 && TARGET_64BIT
1321 && ! (target_flags_explicit & MASK_ALTIVEC))
1322 target_flags |= MASK_ALTIVEC;
1323
1324 /* Unless the user (not the configurer) has explicitly overridden
1325 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1326 G4 unless targetting the kernel. */
1327 if (!flag_mkernel
1328 && !flag_apple_kext
1329 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1330 && ! (target_flags_explicit & MASK_ALTIVEC)
1331 && ! rs6000_select[1].string)
1332 {
1333 target_flags |= MASK_ALTIVEC;
1334 }
1335}
1336#endif
1337
c1e55850
GK
1338/* If not otherwise specified by a target, make 'long double' equivalent to
1339 'double'. */
1340
1341#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1342#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1343#endif
1344
5248c961
RK
1345/* Override command line options. Mostly we process the processor
1346 type and sometimes adjust other TARGET_ options. */
1347
1348void
d779d0dc 1349rs6000_override_options (const char *default_cpu)
5248c961 1350{
c4d38ccb 1351 size_t i, j;
8e3f41e7 1352 struct rs6000_cpu_select *ptr;
66188a7e 1353 int set_masks;
5248c961 1354
66188a7e 1355 /* Simplifications for entries below. */
85638c0d 1356
66188a7e
GK
1357 enum {
1358 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1359 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1360 };
85638c0d 1361
66188a7e
GK
1362 /* This table occasionally claims that a processor does not support
1363 a particular feature even though it does, but the feature is slower
1364 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1365 complete description of the processor's support.
66188a7e
GK
1366
1367 Please keep this list in order, and don't forget to update the
1368 documentation in invoke.texi when adding a new processor or
1369 flag. */
5248c961
RK
1370 static struct ptt
1371 {
8b60264b
KG
1372 const char *const name; /* Canonical processor name. */
1373 const enum processor_type processor; /* Processor type enum value. */
1374 const int target_enable; /* Target flags to enable. */
8b60264b 1375 } const processor_target_table[]
66188a7e 1376 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1377 {"403", PROCESSOR_PPC403,
66188a7e 1378 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1379 {"405", PROCESSOR_PPC405,
716019c0
JM
1380 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1381 {"405fp", PROCESSOR_PPC405,
1382 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1383 {"440", PROCESSOR_PPC440,
716019c0
JM
1384 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1385 {"440fp", PROCESSOR_PPC440,
1386 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1387 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1388 {"601", PROCESSOR_PPC601,
66188a7e
GK
1389 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1390 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1391 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1392 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1393 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1394 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1395 {"620", PROCESSOR_PPC620,
1396 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1397 {"630", PROCESSOR_PPC630,
1398 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1399 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1400 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1401 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1402 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1403 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1404 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1405 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1406 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1407 /* 8548 has a dummy entry for now. */
a45bce6e 1408 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1409 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1410 {"970", PROCESSOR_POWER4,
66188a7e 1411 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1412 {"cell", PROCESSOR_CELL,
1413 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1414 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1415 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1416 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1417 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1418 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1419 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1420 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1421 {"power2", PROCESSOR_POWER,
1422 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1423 {"power3", PROCESSOR_PPC630,
1424 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1425 {"power4", PROCESSOR_POWER4,
fc091c8e 1426 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1427 {"power5", PROCESSOR_POWER5,
432218ba
DE
1428 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1429 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1430 {"power5+", PROCESSOR_POWER5,
1431 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1432 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1433 {"power6", PROCESSOR_POWER6,
e118597e 1434 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1435 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1436 {"power6x", PROCESSOR_POWER6,
1437 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1438 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1439 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1440 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1441 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1442 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1443 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1444 {"rios2", PROCESSOR_RIOS2,
1445 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1446 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1447 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1448 {"rs64", PROCESSOR_RS64A,
1449 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1450 };
5248c961 1451
ca7558fc 1452 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1453
66188a7e
GK
1454 /* Some OSs don't support saving the high part of 64-bit registers on
1455 context switch. Other OSs don't support saving Altivec registers.
1456 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1457 settings; if the user wants either, the user must explicitly specify
1458 them and we won't interfere with the user's specification. */
1459
1460 enum {
1461 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1462 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1463 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1464 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1465 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1466 };
0d1fbc8c
AH
1467
1468 rs6000_init_hard_regno_mode_ok ();
1469
c4ad648e 1470 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1471#ifdef OS_MISSING_POWERPC64
1472 if (OS_MISSING_POWERPC64)
1473 set_masks &= ~MASK_POWERPC64;
1474#endif
1475#ifdef OS_MISSING_ALTIVEC
1476 if (OS_MISSING_ALTIVEC)
1477 set_masks &= ~MASK_ALTIVEC;
1478#endif
1479
768875a8
AM
1480 /* Don't override by the processor default if given explicitly. */
1481 set_masks &= ~target_flags_explicit;
957211c3 1482
a4f6c312 1483 /* Identify the processor type. */
8e3f41e7 1484 rs6000_select[0].string = default_cpu;
3cb999d8 1485 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1486
b6a1cbae 1487 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1488 {
8e3f41e7
MM
1489 ptr = &rs6000_select[i];
1490 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1491 {
8e3f41e7
MM
1492 for (j = 0; j < ptt_size; j++)
1493 if (! strcmp (ptr->string, processor_target_table[j].name))
1494 {
1495 if (ptr->set_tune_p)
1496 rs6000_cpu = processor_target_table[j].processor;
1497
1498 if (ptr->set_arch_p)
1499 {
66188a7e
GK
1500 target_flags &= ~set_masks;
1501 target_flags |= (processor_target_table[j].target_enable
1502 & set_masks);
8e3f41e7
MM
1503 }
1504 break;
1505 }
1506
4406229e 1507 if (j == ptt_size)
8e3f41e7 1508 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1509 }
1510 }
8a61d227 1511
993f19a8 1512 if (TARGET_E500)
a3170dc6
AH
1513 rs6000_isel = 1;
1514
dff9f1b6
DE
1515 /* If we are optimizing big endian systems for space, use the load/store
1516 multiple and string instructions. */
ef792183 1517 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1518 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1519
a4f6c312
SS
1520 /* Don't allow -mmultiple or -mstring on little endian systems
1521 unless the cpu is a 750, because the hardware doesn't support the
1522 instructions used in little endian mode, and causes an alignment
1523 trap. The 750 does not cause an alignment trap (except when the
1524 target is unaligned). */
bef84347 1525
b21fb038 1526 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1527 {
1528 if (TARGET_MULTIPLE)
1529 {
1530 target_flags &= ~MASK_MULTIPLE;
b21fb038 1531 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1532 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1533 }
1534
1535 if (TARGET_STRING)
1536 {
1537 target_flags &= ~MASK_STRING;
b21fb038 1538 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1539 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1540 }
1541 }
3933e0e1 1542
38c1f2d7
MM
1543 /* Set debug flags */
1544 if (rs6000_debug_name)
1545 {
bfc79d3b 1546 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1547 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1548 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1549 rs6000_debug_stack = 1;
bfc79d3b 1550 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1551 rs6000_debug_arg = 1;
1552 else
c725bd79 1553 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1554 }
1555
57ac7be9
AM
1556 if (rs6000_traceback_name)
1557 {
1558 if (! strncmp (rs6000_traceback_name, "full", 4))
1559 rs6000_traceback = traceback_full;
1560 else if (! strncmp (rs6000_traceback_name, "part", 4))
1561 rs6000_traceback = traceback_part;
1562 else if (! strncmp (rs6000_traceback_name, "no", 2))
1563 rs6000_traceback = traceback_none;
1564 else
9e637a26 1565 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1566 rs6000_traceback_name);
1567 }
1568
78f5898b
AH
1569 if (!rs6000_explicit_options.long_double)
1570 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1571
602ea4d3 1572#ifndef POWERPC_LINUX
d3603e8c 1573 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1574 rs6000_ieeequad = 1;
1575#endif
1576
0db747be
DE
1577 /* Enable Altivec ABI for AIX -maltivec. */
1578 if (TARGET_XCOFF && TARGET_ALTIVEC)
1579 rs6000_altivec_abi = 1;
1580
1581 /* Set Altivec ABI as default for PowerPC64 Linux. */
6d0ef01e
HP
1582 if (TARGET_ELF && TARGET_64BIT)
1583 {
1584 rs6000_altivec_abi = 1;
78f5898b 1585 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1586 }
1587
594a51fe
SS
1588 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1589 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1590 {
1591 rs6000_darwin64_abi = 1;
9c7956fd 1592#if TARGET_MACHO
6ac49599 1593 darwin_one_byte_bool = 1;
9c7956fd 1594#endif
d9168963
SS
1595 /* Default to natural alignment, for better performance. */
1596 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1597 }
1598
194c524a
DE
1599 /* Place FP constants in the constant pool instead of TOC
1600 if section anchors enabled. */
1601 if (flag_section_anchors)
1602 TARGET_NO_FP_IN_TOC = 1;
1603
c4501e62
JJ
1604 /* Handle -mtls-size option. */
1605 rs6000_parse_tls_size_option ();
1606
a7ae18e2
AH
1607#ifdef SUBTARGET_OVERRIDE_OPTIONS
1608 SUBTARGET_OVERRIDE_OPTIONS;
1609#endif
1610#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1611 SUBSUBTARGET_OVERRIDE_OPTIONS;
1612#endif
4d4cbc0e
AH
1613#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1614 SUB3TARGET_OVERRIDE_OPTIONS;
1615#endif
a7ae18e2 1616
5da702b1
AH
1617 if (TARGET_E500)
1618 {
1619 /* The e500 does not have string instructions, and we set
1620 MASK_STRING above when optimizing for size. */
1621 if ((target_flags & MASK_STRING) != 0)
1622 target_flags = target_flags & ~MASK_STRING;
1623 }
1624 else if (rs6000_select[1].string != NULL)
1625 {
1626 /* For the powerpc-eabispe configuration, we set all these by
1627 default, so let's unset them if we manually set another
1628 CPU that is not the E500. */
78f5898b 1629 if (!rs6000_explicit_options.abi)
5da702b1 1630 rs6000_spe_abi = 0;
78f5898b 1631 if (!rs6000_explicit_options.spe)
5da702b1 1632 rs6000_spe = 0;
78f5898b 1633 if (!rs6000_explicit_options.float_gprs)
5da702b1 1634 rs6000_float_gprs = 0;
78f5898b 1635 if (!rs6000_explicit_options.isel)
5da702b1
AH
1636 rs6000_isel = 0;
1637 }
b5044283 1638
eca0d5e8
JM
1639 /* Detect invalid option combinations with E500. */
1640 CHECK_E500_OPTIONS;
1641
ec507f2d 1642 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1643 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1644 && rs6000_cpu != PROCESSOR_POWER6
1645 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1646 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1647 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1648 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1649 || rs6000_cpu == PROCESSOR_POWER5
1650 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1651
ec507f2d
DE
1652 rs6000_sched_restricted_insns_priority
1653 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1654
569fa502 1655 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1656 rs6000_sched_costly_dep
1657 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1658
569fa502
DN
1659 if (rs6000_sched_costly_dep_str)
1660 {
f676971a 1661 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1662 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1663 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1664 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1665 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1666 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1667 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1668 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1669 else
c4ad648e 1670 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1671 }
1672
1673 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1674 rs6000_sched_insert_nops
1675 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1676
cbe26ab8
DN
1677 if (rs6000_sched_insert_nops_str)
1678 {
1679 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1680 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1681 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1682 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1683 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1684 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1685 else
c4ad648e 1686 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1687 }
1688
c81bebd7 1689#ifdef TARGET_REGNAMES
a4f6c312
SS
1690 /* If the user desires alternate register names, copy in the
1691 alternate names now. */
c81bebd7 1692 if (TARGET_REGNAMES)
4e135bdd 1693 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1694#endif
1695
df01da37 1696 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1697 If -maix-struct-return or -msvr4-struct-return was explicitly
1698 used, don't override with the ABI default. */
df01da37
DE
1699 if (!rs6000_explicit_options.aix_struct_ret)
1700 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1701
602ea4d3 1702 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1703 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1704
f676971a 1705 if (TARGET_TOC)
9ebbca7d 1706 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1707
301d03af
RS
1708 /* We can only guarantee the availability of DI pseudo-ops when
1709 assembling for 64-bit targets. */
ae6c1efd 1710 if (!TARGET_64BIT)
301d03af
RS
1711 {
1712 targetm.asm_out.aligned_op.di = NULL;
1713 targetm.asm_out.unaligned_op.di = NULL;
1714 }
1715
1494c534
DE
1716 /* Set branch target alignment, if not optimizing for size. */
1717 if (!optimize_size)
1718 {
d296e02e
AP
1719 /* Cell wants to be aligned 8byte for dual issue. */
1720 if (rs6000_cpu == PROCESSOR_CELL)
1721 {
1722 if (align_functions <= 0)
1723 align_functions = 8;
1724 if (align_jumps <= 0)
1725 align_jumps = 8;
1726 if (align_loops <= 0)
1727 align_loops = 8;
1728 }
44cd321e 1729 if (rs6000_align_branch_targets)
1494c534
DE
1730 {
1731 if (align_functions <= 0)
1732 align_functions = 16;
1733 if (align_jumps <= 0)
1734 align_jumps = 16;
1735 if (align_loops <= 0)
1736 align_loops = 16;
1737 }
1738 if (align_jumps_max_skip <= 0)
1739 align_jumps_max_skip = 15;
1740 if (align_loops_max_skip <= 0)
1741 align_loops_max_skip = 15;
1742 }
2792d578 1743
71f123ca
FS
1744 /* Arrange to save and restore machine status around nested functions. */
1745 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1746
1747 /* We should always be splitting complex arguments, but we can't break
1748 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1749 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1750 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1751
1752 /* Initialize rs6000_cost with the appropriate target costs. */
1753 if (optimize_size)
1754 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1755 else
1756 switch (rs6000_cpu)
1757 {
1758 case PROCESSOR_RIOS1:
1759 rs6000_cost = &rios1_cost;
1760 break;
1761
1762 case PROCESSOR_RIOS2:
1763 rs6000_cost = &rios2_cost;
1764 break;
1765
1766 case PROCESSOR_RS64A:
1767 rs6000_cost = &rs64a_cost;
1768 break;
1769
1770 case PROCESSOR_MPCCORE:
1771 rs6000_cost = &mpccore_cost;
1772 break;
1773
1774 case PROCESSOR_PPC403:
1775 rs6000_cost = &ppc403_cost;
1776 break;
1777
1778 case PROCESSOR_PPC405:
1779 rs6000_cost = &ppc405_cost;
1780 break;
1781
1782 case PROCESSOR_PPC440:
1783 rs6000_cost = &ppc440_cost;
1784 break;
1785
1786 case PROCESSOR_PPC601:
1787 rs6000_cost = &ppc601_cost;
1788 break;
1789
1790 case PROCESSOR_PPC603:
1791 rs6000_cost = &ppc603_cost;
1792 break;
1793
1794 case PROCESSOR_PPC604:
1795 rs6000_cost = &ppc604_cost;
1796 break;
1797
1798 case PROCESSOR_PPC604e:
1799 rs6000_cost = &ppc604e_cost;
1800 break;
1801
1802 case PROCESSOR_PPC620:
8b897cfa
RS
1803 rs6000_cost = &ppc620_cost;
1804 break;
1805
f0517163
RS
1806 case PROCESSOR_PPC630:
1807 rs6000_cost = &ppc630_cost;
1808 break;
1809
982afe02 1810 case PROCESSOR_CELL:
d296e02e
AP
1811 rs6000_cost = &ppccell_cost;
1812 break;
1813
8b897cfa
RS
1814 case PROCESSOR_PPC750:
1815 case PROCESSOR_PPC7400:
1816 rs6000_cost = &ppc750_cost;
1817 break;
1818
1819 case PROCESSOR_PPC7450:
1820 rs6000_cost = &ppc7450_cost;
1821 break;
1822
1823 case PROCESSOR_PPC8540:
1824 rs6000_cost = &ppc8540_cost;
1825 break;
1826
1827 case PROCESSOR_POWER4:
1828 case PROCESSOR_POWER5:
1829 rs6000_cost = &power4_cost;
1830 break;
1831
44cd321e
PS
1832 case PROCESSOR_POWER6:
1833 rs6000_cost = &power6_cost;
1834 break;
1835
8b897cfa 1836 default:
37409796 1837 gcc_unreachable ();
8b897cfa 1838 }
0b11da67
DE
1839
1840 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1841 set_param_value ("simultaneous-prefetches",
1842 rs6000_cost->simultaneous_prefetches);
1843 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1844 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1845 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1846 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1847 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1848 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1849
1850 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1851 can be optimized to ap = __builtin_next_arg (0). */
1852 if (DEFAULT_ABI != ABI_V4)
1853 targetm.expand_builtin_va_start = NULL;
5248c961 1854}
5accd822 1855
7ccf35ed
DN
1856/* Implement targetm.vectorize.builtin_mask_for_load. */
1857static tree
1858rs6000_builtin_mask_for_load (void)
1859{
1860 if (TARGET_ALTIVEC)
1861 return altivec_builtin_mask_for_load;
1862 else
1863 return 0;
1864}
1865
f57d17f1
TM
1866/* Implement targetm.vectorize.builtin_conversion. */
1867static tree
1868rs6000_builtin_conversion (enum tree_code code, tree type)
1869{
1870 if (!TARGET_ALTIVEC)
1871 return NULL_TREE;
982afe02 1872
f57d17f1
TM
1873 switch (code)
1874 {
1875 case FLOAT_EXPR:
1876 switch (TYPE_MODE (type))
1877 {
1878 case V4SImode:
982afe02 1879 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1880 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1881 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1882 default:
1883 return NULL_TREE;
1884 }
1885 default:
1886 return NULL_TREE;
1887 }
1888}
1889
89d67cca
DN
1890/* Implement targetm.vectorize.builtin_mul_widen_even. */
1891static tree
1892rs6000_builtin_mul_widen_even (tree type)
1893{
1894 if (!TARGET_ALTIVEC)
1895 return NULL_TREE;
1896
1897 switch (TYPE_MODE (type))
1898 {
1899 case V8HImode:
982afe02 1900 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1901 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1902 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1903
1904 case V16QImode:
1905 return TYPE_UNSIGNED (type) ?
1906 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1907 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1908 default:
1909 return NULL_TREE;
1910 }
1911}
1912
1913/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1914static tree
1915rs6000_builtin_mul_widen_odd (tree type)
1916{
1917 if (!TARGET_ALTIVEC)
1918 return NULL_TREE;
1919
1920 switch (TYPE_MODE (type))
1921 {
1922 case V8HImode:
1923 return TYPE_UNSIGNED (type) ?
1924 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1925 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1926
1927 case V16QImode:
1928 return TYPE_UNSIGNED (type) ?
1929 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1930 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1931 default:
1932 return NULL_TREE;
1933 }
1934}
1935
5b900a4c
DN
1936
1937/* Return true iff, data reference of TYPE can reach vector alignment (16)
1938 after applying N number of iterations. This routine does not determine
1939 how may iterations are required to reach desired alignment. */
1940
1941static bool
3101faab 1942rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1943{
1944 if (is_packed)
1945 return false;
1946
1947 if (TARGET_32BIT)
1948 {
1949 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1950 return true;
1951
1952 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1953 return true;
1954
1955 return false;
1956 }
1957 else
1958 {
1959 if (TARGET_MACHO)
1960 return false;
1961
1962 /* Assuming that all other types are naturally aligned. CHECKME! */
1963 return true;
1964 }
1965}
1966
5da702b1
AH
1967/* Handle generic options of the form -mfoo=yes/no.
1968 NAME is the option name.
1969 VALUE is the option value.
1970 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1971 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1972static void
5da702b1 1973rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1974{
5da702b1 1975 if (value == 0)
993f19a8 1976 return;
5da702b1
AH
1977 else if (!strcmp (value, "yes"))
1978 *flag = 1;
1979 else if (!strcmp (value, "no"))
1980 *flag = 0;
08b57fb3 1981 else
5da702b1 1982 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1983}
1984
c4501e62
JJ
1985/* Validate and record the size specified with the -mtls-size option. */
1986
1987static void
863d938c 1988rs6000_parse_tls_size_option (void)
c4501e62
JJ
1989{
1990 if (rs6000_tls_size_string == 0)
1991 return;
1992 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1993 rs6000_tls_size = 16;
1994 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1995 rs6000_tls_size = 32;
1996 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1997 rs6000_tls_size = 64;
1998 else
9e637a26 1999 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2000}
2001
5accd822 2002void
a2369ed3 2003optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2004{
2e3f0db6
DJ
2005 if (DEFAULT_ABI == ABI_DARWIN)
2006 /* The Darwin libraries never set errno, so we might as well
2007 avoid calling them when that's the only reason we would. */
2008 flag_errno_math = 0;
59d6560b
DE
2009
2010 /* Double growth factor to counter reduced min jump length. */
2011 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2012
2013 /* Enable section anchors by default.
2014 Skip section anchors for Objective C and Objective C++
2015 until front-ends fixed. */
23f99493 2016 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2017 flag_section_anchors = 1;
5accd822 2018}
78f5898b
AH
2019
2020/* Implement TARGET_HANDLE_OPTION. */
2021
2022static bool
2023rs6000_handle_option (size_t code, const char *arg, int value)
2024{
2025 switch (code)
2026 {
2027 case OPT_mno_power:
2028 target_flags &= ~(MASK_POWER | MASK_POWER2
2029 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2030 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2031 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2032 break;
2033 case OPT_mno_powerpc:
2034 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2035 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2036 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2037 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2038 break;
2039 case OPT_mfull_toc:
d2894ab5
DE
2040 target_flags &= ~MASK_MINIMAL_TOC;
2041 TARGET_NO_FP_IN_TOC = 0;
2042 TARGET_NO_SUM_IN_TOC = 0;
2043 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2044#ifdef TARGET_USES_SYSV4_OPT
2045 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2046 just the same as -mminimal-toc. */
2047 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2048 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2049#endif
2050 break;
2051
2052#ifdef TARGET_USES_SYSV4_OPT
2053 case OPT_mtoc:
2054 /* Make -mtoc behave like -mminimal-toc. */
2055 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2056 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2057 break;
2058#endif
2059
2060#ifdef TARGET_USES_AIX64_OPT
2061 case OPT_maix64:
2062#else
2063 case OPT_m64:
2064#endif
2c9c9afd
AM
2065 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2066 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2067 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2068 break;
2069
2070#ifdef TARGET_USES_AIX64_OPT
2071 case OPT_maix32:
2072#else
2073 case OPT_m32:
2074#endif
2075 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2076 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2077 break;
2078
2079 case OPT_minsert_sched_nops_:
2080 rs6000_sched_insert_nops_str = arg;
2081 break;
2082
2083 case OPT_mminimal_toc:
2084 if (value == 1)
2085 {
d2894ab5
DE
2086 TARGET_NO_FP_IN_TOC = 0;
2087 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2088 }
2089 break;
2090
2091 case OPT_mpower:
2092 if (value == 1)
c2dba4ab
AH
2093 {
2094 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2095 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2096 }
78f5898b
AH
2097 break;
2098
2099 case OPT_mpower2:
2100 if (value == 1)
c2dba4ab
AH
2101 {
2102 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2103 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2104 }
78f5898b
AH
2105 break;
2106
2107 case OPT_mpowerpc_gpopt:
2108 case OPT_mpowerpc_gfxopt:
2109 if (value == 1)
c2dba4ab
AH
2110 {
2111 target_flags |= MASK_POWERPC;
2112 target_flags_explicit |= MASK_POWERPC;
2113 }
78f5898b
AH
2114 break;
2115
df01da37
DE
2116 case OPT_maix_struct_return:
2117 case OPT_msvr4_struct_return:
2118 rs6000_explicit_options.aix_struct_ret = true;
2119 break;
2120
78f5898b
AH
2121 case OPT_mvrsave_:
2122 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2123 break;
78f5898b
AH
2124
2125 case OPT_misel_:
2126 rs6000_explicit_options.isel = true;
2127 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2128 break;
2129
2130 case OPT_mspe_:
2131 rs6000_explicit_options.spe = true;
2132 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2133 break;
2134
2135 case OPT_mdebug_:
2136 rs6000_debug_name = arg;
2137 break;
2138
2139#ifdef TARGET_USES_SYSV4_OPT
2140 case OPT_mcall_:
2141 rs6000_abi_name = arg;
2142 break;
2143
2144 case OPT_msdata_:
2145 rs6000_sdata_name = arg;
2146 break;
2147
2148 case OPT_mtls_size_:
2149 rs6000_tls_size_string = arg;
2150 break;
2151
2152 case OPT_mrelocatable:
2153 if (value == 1)
c2dba4ab 2154 {
e0bf274f
AM
2155 target_flags |= MASK_MINIMAL_TOC;
2156 target_flags_explicit |= MASK_MINIMAL_TOC;
2157 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2158 }
78f5898b
AH
2159 break;
2160
2161 case OPT_mrelocatable_lib:
2162 if (value == 1)
c2dba4ab 2163 {
e0bf274f
AM
2164 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2165 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2166 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2167 }
78f5898b 2168 else
c2dba4ab
AH
2169 {
2170 target_flags &= ~MASK_RELOCATABLE;
2171 target_flags_explicit |= MASK_RELOCATABLE;
2172 }
78f5898b
AH
2173 break;
2174#endif
2175
2176 case OPT_mabi_:
78f5898b
AH
2177 if (!strcmp (arg, "altivec"))
2178 {
d3603e8c 2179 rs6000_explicit_options.abi = true;
78f5898b
AH
2180 rs6000_altivec_abi = 1;
2181 rs6000_spe_abi = 0;
2182 }
2183 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2184 {
2185 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2186 the default for rs6000_spe_abi to be chosen later. */
2187 rs6000_altivec_abi = 0;
2188 }
78f5898b
AH
2189 else if (! strcmp (arg, "spe"))
2190 {
d3603e8c 2191 rs6000_explicit_options.abi = true;
78f5898b
AH
2192 rs6000_spe_abi = 1;
2193 rs6000_altivec_abi = 0;
2194 if (!TARGET_SPE_ABI)
2195 error ("not configured for ABI: '%s'", arg);
2196 }
2197 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2198 {
2199 rs6000_explicit_options.abi = true;
2200 rs6000_spe_abi = 0;
2201 }
78f5898b
AH
2202
2203 /* These are here for testing during development only, do not
2204 document in the manual please. */
2205 else if (! strcmp (arg, "d64"))
2206 {
2207 rs6000_darwin64_abi = 1;
2208 warning (0, "Using darwin64 ABI");
2209 }
2210 else if (! strcmp (arg, "d32"))
2211 {
2212 rs6000_darwin64_abi = 0;
2213 warning (0, "Using old darwin ABI");
2214 }
2215
602ea4d3
JJ
2216 else if (! strcmp (arg, "ibmlongdouble"))
2217 {
d3603e8c 2218 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2219 rs6000_ieeequad = 0;
2220 warning (0, "Using IBM extended precision long double");
2221 }
2222 else if (! strcmp (arg, "ieeelongdouble"))
2223 {
d3603e8c 2224 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2225 rs6000_ieeequad = 1;
2226 warning (0, "Using IEEE extended precision long double");
2227 }
2228
78f5898b
AH
2229 else
2230 {
2231 error ("unknown ABI specified: '%s'", arg);
2232 return false;
2233 }
2234 break;
2235
2236 case OPT_mcpu_:
2237 rs6000_select[1].string = arg;
2238 break;
2239
2240 case OPT_mtune_:
2241 rs6000_select[2].string = arg;
2242 break;
2243
2244 case OPT_mtraceback_:
2245 rs6000_traceback_name = arg;
2246 break;
2247
2248 case OPT_mfloat_gprs_:
2249 rs6000_explicit_options.float_gprs = true;
2250 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2251 rs6000_float_gprs = 1;
2252 else if (! strcmp (arg, "double"))
2253 rs6000_float_gprs = 2;
2254 else if (! strcmp (arg, "no"))
2255 rs6000_float_gprs = 0;
2256 else
2257 {
2258 error ("invalid option for -mfloat-gprs: '%s'", arg);
2259 return false;
2260 }
2261 break;
2262
2263 case OPT_mlong_double_:
2264 rs6000_explicit_options.long_double = true;
2265 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2266 if (value != 64 && value != 128)
2267 {
2268 error ("Unknown switch -mlong-double-%s", arg);
2269 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2270 return false;
2271 }
2272 else
2273 rs6000_long_double_type_size = value;
2274 break;
2275
2276 case OPT_msched_costly_dep_:
2277 rs6000_sched_costly_dep_str = arg;
2278 break;
2279
2280 case OPT_malign_:
2281 rs6000_explicit_options.alignment = true;
2282 if (! strcmp (arg, "power"))
2283 {
2284 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2285 some C library functions, so warn about it. The flag may be
2286 useful for performance studies from time to time though, so
2287 don't disable it entirely. */
2288 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2289 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2290 " it is incompatible with the installed C and C++ libraries");
2291 rs6000_alignment_flags = MASK_ALIGN_POWER;
2292 }
2293 else if (! strcmp (arg, "natural"))
2294 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2295 else
2296 {
2297 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2298 return false;
2299 }
2300 break;
2301 }
2302 return true;
2303}
3cfa4909
MM
2304\f
2305/* Do anything needed at the start of the asm file. */
2306
1bc7c5b6 2307static void
863d938c 2308rs6000_file_start (void)
3cfa4909 2309{
c4d38ccb 2310 size_t i;
3cfa4909 2311 char buffer[80];
d330fd93 2312 const char *start = buffer;
3cfa4909 2313 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2314 const char *default_cpu = TARGET_CPU_DEFAULT;
2315 FILE *file = asm_out_file;
2316
2317 default_file_start ();
2318
2319#ifdef TARGET_BI_ARCH
2320 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2321 default_cpu = 0;
2322#endif
3cfa4909
MM
2323
2324 if (flag_verbose_asm)
2325 {
2326 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2327 rs6000_select[0].string = default_cpu;
2328
b6a1cbae 2329 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2330 {
2331 ptr = &rs6000_select[i];
2332 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2333 {
2334 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2335 start = "";
2336 }
2337 }
2338
9c6b4ed9 2339 if (PPC405_ERRATUM77)
b0bfee6e 2340 {
9c6b4ed9 2341 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2342 start = "";
2343 }
b0bfee6e 2344
b91da81f 2345#ifdef USING_ELFOS_H
3cfa4909
MM
2346 switch (rs6000_sdata)
2347 {
2348 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2349 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2350 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2351 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2352 }
2353
2354 if (rs6000_sdata && g_switch_value)
2355 {
307b599c
MK
2356 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2357 g_switch_value);
3cfa4909
MM
2358 start = "";
2359 }
2360#endif
2361
2362 if (*start == '\0')
949ea356 2363 putc ('\n', file);
3cfa4909 2364 }
b723e82f 2365
e51917ae
JM
2366#ifdef HAVE_AS_GNU_ATTRIBUTE
2367 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2368 {
2369 fprintf (file, "\t.gnu_attribute 4, %d\n",
2370 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2371 fprintf (file, "\t.gnu_attribute 8, %d\n",
2372 (TARGET_ALTIVEC_ABI ? 2
2373 : TARGET_SPE_ABI ? 3
2374 : 1));
2375 }
e51917ae
JM
2376#endif
2377
b723e82f
JJ
2378 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2379 {
d6b5193b
RS
2380 switch_to_section (toc_section);
2381 switch_to_section (text_section);
b723e82f 2382 }
3cfa4909 2383}
c4e18b1c 2384
5248c961 2385\f
a0ab749a 2386/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2387
2388int
863d938c 2389direct_return (void)
9878760c 2390{
4697a36c
MM
2391 if (reload_completed)
2392 {
2393 rs6000_stack_t *info = rs6000_stack_info ();
2394
2395 if (info->first_gp_reg_save == 32
2396 && info->first_fp_reg_save == 64
00b960c7 2397 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2398 && ! info->lr_save_p
2399 && ! info->cr_save_p
00b960c7 2400 && info->vrsave_mask == 0
c81fc13e 2401 && ! info->push_p)
4697a36c
MM
2402 return 1;
2403 }
2404
2405 return 0;
9878760c
RK
2406}
2407
4e74d8ec
MM
2408/* Return the number of instructions it takes to form a constant in an
2409 integer register. */
2410
48d72335 2411int
a2369ed3 2412num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2413{
2414 /* signed constant loadable with {cal|addi} */
547b216d 2415 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2416 return 1;
2417
4e74d8ec 2418 /* constant loadable with {cau|addis} */
547b216d
DE
2419 else if ((value & 0xffff) == 0
2420 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2421 return 1;
2422
5f59ecb7 2423#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2424 else if (TARGET_POWERPC64)
4e74d8ec 2425 {
a65c591c
DE
2426 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2427 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2428
a65c591c 2429 if (high == 0 || high == -1)
4e74d8ec
MM
2430 return 2;
2431
a65c591c 2432 high >>= 1;
4e74d8ec 2433
a65c591c 2434 if (low == 0)
4e74d8ec 2435 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2436 else
2437 return (num_insns_constant_wide (high)
e396202a 2438 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2439 }
2440#endif
2441
2442 else
2443 return 2;
2444}
2445
2446int
a2369ed3 2447num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2448{
37409796 2449 HOST_WIDE_INT low, high;
bb8df8a6 2450
37409796 2451 switch (GET_CODE (op))
0d30d435 2452 {
37409796 2453 case CONST_INT:
0d30d435 2454#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2455 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2456 && mask64_operand (op, mode))
c4ad648e 2457 return 2;
0d30d435
DE
2458 else
2459#endif
2460 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2461
37409796
NS
2462 case CONST_DOUBLE:
2463 if (mode == SFmode)
2464 {
2465 long l;
2466 REAL_VALUE_TYPE rv;
bb8df8a6 2467
37409796
NS
2468 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2469 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2470 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2471 }
a260abc9 2472
37409796
NS
2473 if (mode == VOIDmode || mode == DImode)
2474 {
2475 high = CONST_DOUBLE_HIGH (op);
2476 low = CONST_DOUBLE_LOW (op);
2477 }
2478 else
2479 {
2480 long l[2];
2481 REAL_VALUE_TYPE rv;
bb8df8a6 2482
37409796 2483 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2484 if (DECIMAL_FLOAT_MODE_P (mode))
2485 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2486 else
2487 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2488 high = l[WORDS_BIG_ENDIAN == 0];
2489 low = l[WORDS_BIG_ENDIAN != 0];
2490 }
47ad8c61 2491
37409796
NS
2492 if (TARGET_32BIT)
2493 return (num_insns_constant_wide (low)
2494 + num_insns_constant_wide (high));
2495 else
2496 {
2497 if ((high == 0 && low >= 0)
2498 || (high == -1 && low < 0))
2499 return num_insns_constant_wide (low);
bb8df8a6 2500
1990cd79 2501 else if (mask64_operand (op, mode))
37409796 2502 return 2;
bb8df8a6 2503
37409796
NS
2504 else if (low == 0)
2505 return num_insns_constant_wide (high) + 1;
bb8df8a6 2506
37409796
NS
2507 else
2508 return (num_insns_constant_wide (high)
2509 + num_insns_constant_wide (low) + 1);
2510 }
bb8df8a6 2511
37409796
NS
2512 default:
2513 gcc_unreachable ();
4e74d8ec 2514 }
4e74d8ec
MM
2515}
2516
0972012c
RS
2517/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2518 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2519 corresponding element of the vector, but for V4SFmode and V2SFmode,
2520 the corresponding "float" is interpreted as an SImode integer. */
2521
2522static HOST_WIDE_INT
2523const_vector_elt_as_int (rtx op, unsigned int elt)
2524{
2525 rtx tmp = CONST_VECTOR_ELT (op, elt);
2526 if (GET_MODE (op) == V4SFmode
2527 || GET_MODE (op) == V2SFmode)
2528 tmp = gen_lowpart (SImode, tmp);
2529 return INTVAL (tmp);
2530}
452a7d36 2531
77ccdfed 2532/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2533 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2534 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2535 all items are set to the same value and contain COPIES replicas of the
2536 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2537 operand and the others are set to the value of the operand's msb. */
2538
2539static bool
2540vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2541{
66180ff3
PB
2542 enum machine_mode mode = GET_MODE (op);
2543 enum machine_mode inner = GET_MODE_INNER (mode);
2544
2545 unsigned i;
2546 unsigned nunits = GET_MODE_NUNITS (mode);
2547 unsigned bitsize = GET_MODE_BITSIZE (inner);
2548 unsigned mask = GET_MODE_MASK (inner);
2549
0972012c 2550 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2551 HOST_WIDE_INT splat_val = val;
2552 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2553
2554 /* Construct the value to be splatted, if possible. If not, return 0. */
2555 for (i = 2; i <= copies; i *= 2)
452a7d36 2556 {
66180ff3
PB
2557 HOST_WIDE_INT small_val;
2558 bitsize /= 2;
2559 small_val = splat_val >> bitsize;
2560 mask >>= bitsize;
2561 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2562 return false;
2563 splat_val = small_val;
2564 }
c4ad648e 2565
66180ff3
PB
2566 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2567 if (EASY_VECTOR_15 (splat_val))
2568 ;
2569
2570 /* Also check if we can splat, and then add the result to itself. Do so if
2571 the value is positive, of if the splat instruction is using OP's mode;
2572 for splat_val < 0, the splat and the add should use the same mode. */
2573 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2574 && (splat_val >= 0 || (step == 1 && copies == 1)))
2575 ;
2576
2577 else
2578 return false;
2579
2580 /* Check if VAL is present in every STEP-th element, and the
2581 other elements are filled with its most significant bit. */
2582 for (i = 0; i < nunits - 1; ++i)
2583 {
2584 HOST_WIDE_INT desired_val;
2585 if (((i + 1) & (step - 1)) == 0)
2586 desired_val = val;
2587 else
2588 desired_val = msb_val;
2589
0972012c 2590 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2591 return false;
452a7d36 2592 }
66180ff3
PB
2593
2594 return true;
452a7d36
HP
2595}
2596
69ef87e2 2597
77ccdfed 2598/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2599 with a vspltisb, vspltish or vspltisw. */
2600
2601bool
2602easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2603{
66180ff3 2604 unsigned step, copies;
d744e06e 2605
66180ff3
PB
2606 if (mode == VOIDmode)
2607 mode = GET_MODE (op);
2608 else if (mode != GET_MODE (op))
2609 return false;
d744e06e 2610
66180ff3
PB
2611 /* Start with a vspltisw. */
2612 step = GET_MODE_NUNITS (mode) / 4;
2613 copies = 1;
2614
2615 if (vspltis_constant (op, step, copies))
2616 return true;
2617
2618 /* Then try with a vspltish. */
2619 if (step == 1)
2620 copies <<= 1;
2621 else
2622 step >>= 1;
2623
2624 if (vspltis_constant (op, step, copies))
2625 return true;
2626
2627 /* And finally a vspltisb. */
2628 if (step == 1)
2629 copies <<= 1;
2630 else
2631 step >>= 1;
2632
2633 if (vspltis_constant (op, step, copies))
2634 return true;
2635
2636 return false;
d744e06e
AH
2637}
2638
66180ff3
PB
2639/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2640 result is OP. Abort if it is not possible. */
d744e06e 2641
f676971a 2642rtx
66180ff3 2643gen_easy_altivec_constant (rtx op)
452a7d36 2644{
66180ff3
PB
2645 enum machine_mode mode = GET_MODE (op);
2646 int nunits = GET_MODE_NUNITS (mode);
2647 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2648 unsigned step = nunits / 4;
2649 unsigned copies = 1;
2650
2651 /* Start with a vspltisw. */
2652 if (vspltis_constant (op, step, copies))
2653 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2654
2655 /* Then try with a vspltish. */
2656 if (step == 1)
2657 copies <<= 1;
2658 else
2659 step >>= 1;
2660
2661 if (vspltis_constant (op, step, copies))
2662 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2663
2664 /* And finally a vspltisb. */
2665 if (step == 1)
2666 copies <<= 1;
2667 else
2668 step >>= 1;
2669
2670 if (vspltis_constant (op, step, copies))
2671 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2672
2673 gcc_unreachable ();
d744e06e
AH
2674}
2675
2676const char *
a2369ed3 2677output_vec_const_move (rtx *operands)
d744e06e
AH
2678{
2679 int cst, cst2;
2680 enum machine_mode mode;
2681 rtx dest, vec;
2682
2683 dest = operands[0];
2684 vec = operands[1];
d744e06e 2685 mode = GET_MODE (dest);
69ef87e2 2686
d744e06e
AH
2687 if (TARGET_ALTIVEC)
2688 {
66180ff3 2689 rtx splat_vec;
d744e06e
AH
2690 if (zero_constant (vec, mode))
2691 return "vxor %0,%0,%0";
37409796 2692
66180ff3
PB
2693 splat_vec = gen_easy_altivec_constant (vec);
2694 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2695 operands[1] = XEXP (splat_vec, 0);
2696 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2697 return "#";
bb8df8a6 2698
66180ff3 2699 switch (GET_MODE (splat_vec))
98ef3137 2700 {
37409796 2701 case V4SImode:
66180ff3 2702 return "vspltisw %0,%1";
c4ad648e 2703
37409796 2704 case V8HImode:
66180ff3 2705 return "vspltish %0,%1";
c4ad648e 2706
37409796 2707 case V16QImode:
66180ff3 2708 return "vspltisb %0,%1";
bb8df8a6 2709
37409796
NS
2710 default:
2711 gcc_unreachable ();
98ef3137 2712 }
69ef87e2
AH
2713 }
2714
37409796 2715 gcc_assert (TARGET_SPE);
bb8df8a6 2716
37409796
NS
2717 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2718 pattern of V1DI, V4HI, and V2SF.
2719
2720 FIXME: We should probably return # and add post reload
2721 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2722 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2723 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2724 operands[1] = CONST_VECTOR_ELT (vec, 0);
2725 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2726 if (cst == cst2)
2727 return "li %0,%1\n\tevmergelo %0,%0,%0";
2728 else
2729 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2730}
2731
f5027409
RE
2732/* Initialize TARGET of vector PAIRED to VALS. */
2733
2734void
2735paired_expand_vector_init (rtx target, rtx vals)
2736{
2737 enum machine_mode mode = GET_MODE (target);
2738 int n_elts = GET_MODE_NUNITS (mode);
2739 int n_var = 0;
2740 rtx x, new, tmp, constant_op, op1, op2;
2741 int i;
2742
2743 for (i = 0; i < n_elts; ++i)
2744 {
2745 x = XVECEXP (vals, 0, i);
2746 if (!CONSTANT_P (x))
2747 ++n_var;
2748 }
2749 if (n_var == 0)
2750 {
2751 /* Load from constant pool. */
2752 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2753 return;
2754 }
2755
2756 if (n_var == 2)
2757 {
2758 /* The vector is initialized only with non-constants. */
2759 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2760 XVECEXP (vals, 0, 1));
2761
2762 emit_move_insn (target, new);
2763 return;
2764 }
2765
2766 /* One field is non-constant and the other one is a constant. Load the
2767 constant from the constant pool and use ps_merge instruction to
2768 construct the whole vector. */
2769 op1 = XVECEXP (vals, 0, 0);
2770 op2 = XVECEXP (vals, 0, 1);
2771
2772 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2773
2774 tmp = gen_reg_rtx (GET_MODE (constant_op));
2775 emit_move_insn (tmp, constant_op);
2776
2777 if (CONSTANT_P (op1))
2778 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2779 else
2780 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2781
2782 emit_move_insn (target, new);
2783}
2784
e2e95f45
RE
2785void
2786paired_expand_vector_move (rtx operands[])
2787{
2788 rtx op0 = operands[0], op1 = operands[1];
2789
2790 emit_move_insn (op0, op1);
2791}
2792
2793/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2794 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2795 operands for the relation operation COND. This is a recursive
2796 function. */
2797
2798static void
2799paired_emit_vector_compare (enum rtx_code rcode,
2800 rtx dest, rtx op0, rtx op1,
2801 rtx cc_op0, rtx cc_op1)
2802{
2803 rtx tmp = gen_reg_rtx (V2SFmode);
2804 rtx tmp1, max, min, equal_zero;
2805
2806 gcc_assert (TARGET_PAIRED_FLOAT);
2807 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2808
2809 switch (rcode)
2810 {
2811 case LT:
2812 case LTU:
2813 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2814 return;
2815 case GE:
2816 case GEU:
2817 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2818 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2819 return;
2820 case LE:
2821 case LEU:
2822 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2823 return;
2824 case GT:
2825 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2826 return;
2827 case EQ:
2828 tmp1 = gen_reg_rtx (V2SFmode);
2829 max = gen_reg_rtx (V2SFmode);
2830 min = gen_reg_rtx (V2SFmode);
2831 equal_zero = gen_reg_rtx (V2SFmode);
2832
2833 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2834 emit_insn (gen_selv2sf4
2835 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2836 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2837 emit_insn (gen_selv2sf4
2838 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2839 emit_insn (gen_subv2sf3 (tmp1, min, max));
2840 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2841 return;
2842 case NE:
2843 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2844 return;
2845 case UNLE:
2846 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2847 return;
2848 case UNLT:
2849 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2850 return;
2851 case UNGE:
2852 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2853 return;
2854 case UNGT:
2855 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2856 return;
2857 default:
2858 gcc_unreachable ();
2859 }
2860
2861 return;
2862}
2863
2864/* Emit vector conditional expression.
2865 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2866 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2867
2868int
2869paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2870 rtx cond, rtx cc_op0, rtx cc_op1)
2871{
2872 enum rtx_code rcode = GET_CODE (cond);
2873
2874 if (!TARGET_PAIRED_FLOAT)
2875 return 0;
2876
2877 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2878
2879 return 1;
2880}
2881
7a4eca66
DE
2882/* Initialize vector TARGET to VALS. */
2883
2884void
2885rs6000_expand_vector_init (rtx target, rtx vals)
2886{
2887 enum machine_mode mode = GET_MODE (target);
2888 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2889 int n_elts = GET_MODE_NUNITS (mode);
2890 int n_var = 0, one_var = -1;
2891 bool all_same = true, all_const_zero = true;
2892 rtx x, mem;
2893 int i;
2894
2895 for (i = 0; i < n_elts; ++i)
2896 {
2897 x = XVECEXP (vals, 0, i);
2898 if (!CONSTANT_P (x))
2899 ++n_var, one_var = i;
2900 else if (x != CONST0_RTX (inner_mode))
2901 all_const_zero = false;
2902
2903 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2904 all_same = false;
2905 }
2906
2907 if (n_var == 0)
2908 {
2909 if (mode != V4SFmode && all_const_zero)
2910 {
2911 /* Zero register. */
2912 emit_insn (gen_rtx_SET (VOIDmode, target,
2913 gen_rtx_XOR (mode, target, target)));
2914 return;
2915 }
66180ff3 2916 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2917 {
2918 /* Splat immediate. */
66180ff3 2919 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2920 return;
2921 }
2922 else if (all_same)
2923 ; /* Splat vector element. */
2924 else
2925 {
2926 /* Load from constant pool. */
2927 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2928 return;
2929 }
2930 }
2931
2932 /* Store value to stack temp. Load vector element. Splat. */
2933 if (all_same)
2934 {
2935 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2936 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2937 XVECEXP (vals, 0, 0));
2938 x = gen_rtx_UNSPEC (VOIDmode,
2939 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2940 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2941 gen_rtvec (2,
2942 gen_rtx_SET (VOIDmode,
2943 target, mem),
2944 x)));
2945 x = gen_rtx_VEC_SELECT (inner_mode, target,
2946 gen_rtx_PARALLEL (VOIDmode,
2947 gen_rtvec (1, const0_rtx)));
2948 emit_insn (gen_rtx_SET (VOIDmode, target,
2949 gen_rtx_VEC_DUPLICATE (mode, x)));
2950 return;
2951 }
2952
2953 /* One field is non-constant. Load constant then overwrite
2954 varying field. */
2955 if (n_var == 1)
2956 {
2957 rtx copy = copy_rtx (vals);
2958
57b51d4d 2959 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2960 varying element. */
2961 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2962 rs6000_expand_vector_init (target, copy);
2963
2964 /* Insert variable. */
2965 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2966 return;
2967 }
2968
2969 /* Construct the vector in memory one field at a time
2970 and load the whole vector. */
2971 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2972 for (i = 0; i < n_elts; i++)
2973 emit_move_insn (adjust_address_nv (mem, inner_mode,
2974 i * GET_MODE_SIZE (inner_mode)),
2975 XVECEXP (vals, 0, i));
2976 emit_move_insn (target, mem);
2977}
2978
2979/* Set field ELT of TARGET to VAL. */
2980
2981void
2982rs6000_expand_vector_set (rtx target, rtx val, int elt)
2983{
2984 enum machine_mode mode = GET_MODE (target);
2985 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2986 rtx reg = gen_reg_rtx (mode);
2987 rtx mask, mem, x;
2988 int width = GET_MODE_SIZE (inner_mode);
2989 int i;
2990
2991 /* Load single variable value. */
2992 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2993 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2994 x = gen_rtx_UNSPEC (VOIDmode,
2995 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2996 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2997 gen_rtvec (2,
2998 gen_rtx_SET (VOIDmode,
2999 reg, mem),
3000 x)));
3001
3002 /* Linear sequence. */
3003 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3004 for (i = 0; i < 16; ++i)
3005 XVECEXP (mask, 0, i) = GEN_INT (i);
3006
3007 /* Set permute mask to insert element into target. */
3008 for (i = 0; i < width; ++i)
3009 XVECEXP (mask, 0, elt*width + i)
3010 = GEN_INT (i + 0x10);
3011 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3012 x = gen_rtx_UNSPEC (mode,
3013 gen_rtvec (3, target, reg,
3014 force_reg (V16QImode, x)),
3015 UNSPEC_VPERM);
3016 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3017}
3018
3019/* Extract field ELT from VEC into TARGET. */
3020
3021void
3022rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3023{
3024 enum machine_mode mode = GET_MODE (vec);
3025 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3026 rtx mem, x;
3027
3028 /* Allocate mode-sized buffer. */
3029 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3030
3031 /* Add offset to field within buffer matching vector element. */
3032 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3033
3034 /* Store single field into mode-sized buffer. */
3035 x = gen_rtx_UNSPEC (VOIDmode,
3036 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3037 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3038 gen_rtvec (2,
3039 gen_rtx_SET (VOIDmode,
3040 mem, vec),
3041 x)));
3042 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3043}
3044
0ba1b2ff
AM
3045/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3046 implement ANDing by the mask IN. */
3047void
a2369ed3 3048build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3049{
3050#if HOST_BITS_PER_WIDE_INT >= 64
3051 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3052 int shift;
3053
37409796 3054 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3055
3056 c = INTVAL (in);
3057 if (c & 1)
3058 {
3059 /* Assume c initially something like 0x00fff000000fffff. The idea
3060 is to rotate the word so that the middle ^^^^^^ group of zeros
3061 is at the MS end and can be cleared with an rldicl mask. We then
3062 rotate back and clear off the MS ^^ group of zeros with a
3063 second rldicl. */
3064 c = ~c; /* c == 0xff000ffffff00000 */
3065 lsb = c & -c; /* lsb == 0x0000000000100000 */
3066 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3067 c = ~c; /* c == 0x00fff000000fffff */
3068 c &= -lsb; /* c == 0x00fff00000000000 */
3069 lsb = c & -c; /* lsb == 0x0000100000000000 */
3070 c = ~c; /* c == 0xff000fffffffffff */
3071 c &= -lsb; /* c == 0xff00000000000000 */
3072 shift = 0;
3073 while ((lsb >>= 1) != 0)
3074 shift++; /* shift == 44 on exit from loop */
3075 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3076 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3077 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3078 }
3079 else
0ba1b2ff
AM
3080 {
3081 /* Assume c initially something like 0xff000f0000000000. The idea
3082 is to rotate the word so that the ^^^ middle group of zeros
3083 is at the LS end and can be cleared with an rldicr mask. We then
3084 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3085 a second rldicr. */
3086 lsb = c & -c; /* lsb == 0x0000010000000000 */
3087 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3088 c = ~c; /* c == 0x00fff0ffffffffff */
3089 c &= -lsb; /* c == 0x00fff00000000000 */
3090 lsb = c & -c; /* lsb == 0x0000100000000000 */
3091 c = ~c; /* c == 0xff000fffffffffff */
3092 c &= -lsb; /* c == 0xff00000000000000 */
3093 shift = 0;
3094 while ((lsb >>= 1) != 0)
3095 shift++; /* shift == 44 on exit from loop */
3096 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3097 m1 >>= shift; /* m1 == 0x0000000000000fff */
3098 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3099 }
3100
3101 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3102 masks will be all 1's. We are guaranteed more than one transition. */
3103 out[0] = GEN_INT (64 - shift);
3104 out[1] = GEN_INT (m1);
3105 out[2] = GEN_INT (shift);
3106 out[3] = GEN_INT (m2);
3107#else
045572c7
GK
3108 (void)in;
3109 (void)out;
37409796 3110 gcc_unreachable ();
0ba1b2ff 3111#endif
a260abc9
DE
3112}
3113
54b695e7 3114/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3115
3116bool
54b695e7
AH
3117invalid_e500_subreg (rtx op, enum machine_mode mode)
3118{
61c76239
JM
3119 if (TARGET_E500_DOUBLE)
3120 {
17caeff2
JM
3121 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3122 subreg:TI and reg:TF. */
61c76239 3123 if (GET_CODE (op) == SUBREG
17caeff2 3124 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3125 && REG_P (SUBREG_REG (op))
17caeff2 3126 && (GET_MODE (SUBREG_REG (op)) == DFmode
4d4447b5
PB
3127 || GET_MODE (SUBREG_REG (op)) == TFmode
3128 || GET_MODE (SUBREG_REG (op)) == DDmode
3129 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3130 return true;
3131
17caeff2
JM
3132 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3133 reg:TI. */
61c76239 3134 if (GET_CODE (op) == SUBREG
4d4447b5
PB
3135 && (mode == DFmode || mode == TFmode
3136 || mode == DDmode || mode == TDmode)
61c76239 3137 && REG_P (SUBREG_REG (op))
17caeff2
JM
3138 && (GET_MODE (SUBREG_REG (op)) == DImode
3139 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3140 return true;
3141 }
54b695e7 3142
61c76239
JM
3143 if (TARGET_SPE
3144 && GET_CODE (op) == SUBREG
3145 && mode == SImode
54b695e7 3146 && REG_P (SUBREG_REG (op))
14502dad 3147 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3148 return true;
3149
3150 return false;
3151}
3152
58182de3 3153/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3154 field is an FP double while the FP fields remain word aligned. */
3155
19d66194 3156unsigned int
fa5b0972
AM
3157rs6000_special_round_type_align (tree type, unsigned int computed,
3158 unsigned int specified)
95727fb8 3159{
fa5b0972 3160 unsigned int align = MAX (computed, specified);
95727fb8 3161 tree field = TYPE_FIELDS (type);
95727fb8 3162
bb8df8a6 3163 /* Skip all non field decls */
85962ac8 3164 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3165 field = TREE_CHAIN (field);
3166
fa5b0972
AM
3167 if (field != NULL && field != type)
3168 {
3169 type = TREE_TYPE (field);
3170 while (TREE_CODE (type) == ARRAY_TYPE)
3171 type = TREE_TYPE (type);
3172
3173 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3174 align = MAX (align, 64);
3175 }
95727fb8 3176
fa5b0972 3177 return align;
95727fb8
AP
3178}
3179
58182de3
GK
3180/* Darwin increases record alignment to the natural alignment of
3181 the first field. */
3182
3183unsigned int
3184darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3185 unsigned int specified)
3186{
3187 unsigned int align = MAX (computed, specified);
3188
3189 if (TYPE_PACKED (type))
3190 return align;
3191
3192 /* Find the first field, looking down into aggregates. */
3193 do {
3194 tree field = TYPE_FIELDS (type);
3195 /* Skip all non field decls */
3196 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3197 field = TREE_CHAIN (field);
3198 if (! field)
3199 break;
3200 type = TREE_TYPE (field);
3201 while (TREE_CODE (type) == ARRAY_TYPE)
3202 type = TREE_TYPE (type);
3203 } while (AGGREGATE_TYPE_P (type));
3204
3205 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3206 align = MAX (align, TYPE_ALIGN (type));
3207
3208 return align;
3209}
3210
a4f6c312 3211/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3212
3213int
f676971a 3214small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3215 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3216{
38c1f2d7 3217#if TARGET_ELF
5f59ecb7 3218 rtx sym_ref;
7509c759 3219
d9407988 3220 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3221 return 0;
a54d04b7 3222
f607bc57 3223 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3224 return 0;
3225
88228c4b
MM
3226 if (GET_CODE (op) == SYMBOL_REF)
3227 sym_ref = op;
3228
3229 else if (GET_CODE (op) != CONST
3230 || GET_CODE (XEXP (op, 0)) != PLUS
3231 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3232 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3233 return 0;
3234
88228c4b 3235 else
dbf55e53
MM
3236 {
3237 rtx sum = XEXP (op, 0);
3238 HOST_WIDE_INT summand;
3239
3240 /* We have to be careful here, because it is the referenced address
c4ad648e 3241 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3242 summand = INTVAL (XEXP (sum, 1));
307b599c 3243 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3244 return 0;
dbf55e53
MM
3245
3246 sym_ref = XEXP (sum, 0);
3247 }
88228c4b 3248
20bfcd69 3249 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3250#else
3251 return 0;
3252#endif
7509c759 3253}
46c07df8 3254
3a1f863f 3255/* Return true if either operand is a general purpose register. */
46c07df8 3256
3a1f863f
DE
3257bool
3258gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3259{
3a1f863f
DE
3260 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3261 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3262}
3263
9ebbca7d 3264\f
4d588c14
RH
3265/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3266
f676971a
EC
3267static int
3268constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3269{
9390387d 3270 switch (GET_CODE (op))
9ebbca7d
GK
3271 {
3272 case SYMBOL_REF:
c4501e62
JJ
3273 if (RS6000_SYMBOL_REF_TLS_P (op))
3274 return 0;
3275 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3276 {
3277 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3278 {
3279 *have_sym = 1;
3280 return 1;
3281 }
3282 else
3283 return 0;
3284 }
3285 else if (! strcmp (XSTR (op, 0), toc_label_name))
3286 {
3287 *have_toc = 1;
3288 return 1;
3289 }
3290 else
3291 return 0;
9ebbca7d
GK
3292 case PLUS:
3293 case MINUS:
c1f11548
DE
3294 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3295 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3296 case CONST:
a4f6c312 3297 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3298 case CONST_INT:
a4f6c312 3299 return 1;
9ebbca7d 3300 default:
a4f6c312 3301 return 0;
9ebbca7d
GK
3302 }
3303}
3304
4d588c14 3305static bool
a2369ed3 3306constant_pool_expr_p (rtx op)
9ebbca7d
GK
3307{
3308 int have_sym = 0;
3309 int have_toc = 0;
3310 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3311}
3312
48d72335 3313bool
a2369ed3 3314toc_relative_expr_p (rtx op)
9ebbca7d 3315{
4d588c14
RH
3316 int have_sym = 0;
3317 int have_toc = 0;
3318 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3319}
3320
4d588c14 3321bool
a2369ed3 3322legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3323{
3324 return (TARGET_TOC
3325 && GET_CODE (x) == PLUS
3326 && GET_CODE (XEXP (x, 0)) == REG
3327 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3328 && constant_pool_expr_p (XEXP (x, 1)));
3329}
3330
d04b6e6e
EB
3331static bool
3332legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3333{
3334 return (DEFAULT_ABI == ABI_V4
3335 && !flag_pic && !TARGET_TOC
3336 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3337 && small_data_operand (x, mode));
3338}
3339
60cdabab
DE
3340/* SPE offset addressing is limited to 5-bits worth of double words. */
3341#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3342
76d2b81d
DJ
3343bool
3344rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3345{
3346 unsigned HOST_WIDE_INT offset, extra;
3347
3348 if (GET_CODE (x) != PLUS)
3349 return false;
3350 if (GET_CODE (XEXP (x, 0)) != REG)
3351 return false;
3352 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3353 return false;
60cdabab
DE
3354 if (legitimate_constant_pool_address_p (x))
3355 return true;
4d588c14
RH
3356 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3357 return false;
3358
3359 offset = INTVAL (XEXP (x, 1));
3360 extra = 0;
3361 switch (mode)
3362 {
3363 case V16QImode:
3364 case V8HImode:
3365 case V4SFmode:
3366 case V4SImode:
7a4eca66
DE
3367 /* AltiVec vector modes. Only reg+reg addressing is valid and
3368 constant offset zero should not occur due to canonicalization.
3369 Allow any offset when not strict before reload. */
3370 return !strict;
4d588c14
RH
3371
3372 case V4HImode:
3373 case V2SImode:
3374 case V1DImode:
3375 case V2SFmode:
d42a3bae
RE
3376 /* Paired vector modes. Only reg+reg addressing is valid and
3377 constant offset zero should not occur due to canonicalization.
3378 Allow any offset when not strict before reload. */
3379 if (TARGET_PAIRED_FLOAT)
3380 return !strict;
4d588c14
RH
3381 /* SPE vector modes. */
3382 return SPE_CONST_OFFSET_OK (offset);
3383
3384 case DFmode:
7393f7f8 3385 case DDmode:
4d4cbc0e
AH
3386 if (TARGET_E500_DOUBLE)
3387 return SPE_CONST_OFFSET_OK (offset);
3388
4d588c14 3389 case DImode:
54b695e7
AH
3390 /* On e500v2, we may have:
3391
3392 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3393
3394 Which gets addressed with evldd instructions. */
3395 if (TARGET_E500_DOUBLE)
3396 return SPE_CONST_OFFSET_OK (offset);
3397
7393f7f8 3398 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3399 extra = 4;
3400 else if (offset & 3)
3401 return false;
3402 break;
3403
3404 case TFmode:
4d4447b5 3405 case TDmode:
17caeff2
JM
3406 if (TARGET_E500_DOUBLE)
3407 return (SPE_CONST_OFFSET_OK (offset)
3408 && SPE_CONST_OFFSET_OK (offset + 8));
3409
4d588c14 3410 case TImode:
7393f7f8 3411 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3412 extra = 12;
3413 else if (offset & 3)
3414 return false;
3415 else
3416 extra = 8;
3417 break;
3418
3419 default:
3420 break;
3421 }
3422
b1917422
AM
3423 offset += 0x8000;
3424 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3425}
3426
6fb5fa3c 3427bool
a2369ed3 3428legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3429{
3430 rtx op0, op1;
3431
3432 if (GET_CODE (x) != PLUS)
3433 return false;
850e8d3d 3434
4d588c14
RH
3435 op0 = XEXP (x, 0);
3436 op1 = XEXP (x, 1);
3437
bf00cc0f 3438 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3439 replaced with proper base and index regs. */
3440 if (!strict
3441 && reload_in_progress
3442 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3443 && REG_P (op1))
3444 return true;
3445
3446 return (REG_P (op0) && REG_P (op1)
3447 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3448 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3449 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3450 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3451}
3452
48d72335 3453inline bool
a2369ed3 3454legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3455{
3456 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3457}
3458
48d72335 3459bool
4c81e946
FJ
3460macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3461{
c4ad648e 3462 if (!TARGET_MACHO || !flag_pic
9390387d 3463 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3464 return false;
3465 x = XEXP (x, 0);
4c81e946
FJ
3466
3467 if (GET_CODE (x) != LO_SUM)
3468 return false;
3469 if (GET_CODE (XEXP (x, 0)) != REG)
3470 return false;
3471 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3472 return false;
3473 x = XEXP (x, 1);
3474
3475 return CONSTANT_P (x);
3476}
3477
4d588c14 3478static bool
a2369ed3 3479legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3480{
3481 if (GET_CODE (x) != LO_SUM)
3482 return false;
3483 if (GET_CODE (XEXP (x, 0)) != REG)
3484 return false;
3485 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3486 return false;
54b695e7 3487 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3488 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3489 || mode == DDmode || mode == TDmode
17caeff2 3490 || mode == DImode))
f82f556d 3491 return false;
4d588c14
RH
3492 x = XEXP (x, 1);
3493
8622e235 3494 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3495 {
a29077da 3496 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3497 return false;
3498 if (TARGET_TOC)
3499 return false;
3500 if (GET_MODE_NUNITS (mode) != 1)
3501 return false;
5e5f01b9 3502 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3503 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3504 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3505 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3506 return false;
3507
3508 return CONSTANT_P (x);
3509 }
3510
3511 return false;
3512}
3513
3514
9ebbca7d
GK
3515/* Try machine-dependent ways of modifying an illegitimate address
3516 to be legitimate. If we find one, return the new, valid address.
3517 This is used from only one place: `memory_address' in explow.c.
3518
a4f6c312
SS
3519 OLDX is the address as it was before break_out_memory_refs was
3520 called. In some cases it is useful to look at this to decide what
3521 needs to be done.
9ebbca7d 3522
a4f6c312 3523 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3524
a4f6c312
SS
3525 It is always safe for this function to do nothing. It exists to
3526 recognize opportunities to optimize the output.
9ebbca7d
GK
3527
3528 On RS/6000, first check for the sum of a register with a constant
3529 integer that is out of range. If so, generate code to add the
3530 constant with the low-order 16 bits masked to the register and force
3531 this result into another register (this can be done with `cau').
3532 Then generate an address of REG+(CONST&0xffff), allowing for the
3533 possibility of bit 16 being a one.
3534
3535 Then check for the sum of a register and something not constant, try to
3536 load the other things into a register and return the sum. */
4d588c14 3537
9ebbca7d 3538rtx
a2369ed3
DJ
3539rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3540 enum machine_mode mode)
0ac081f6 3541{
c4501e62
JJ
3542 if (GET_CODE (x) == SYMBOL_REF)
3543 {
3544 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3545 if (model != 0)
3546 return rs6000_legitimize_tls_address (x, model);
3547 }
3548
f676971a 3549 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3550 && GET_CODE (XEXP (x, 0)) == REG
3551 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3552 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3553 && !(SPE_VECTOR_MODE (mode)
3554 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3555 || mode == DImode))))
f676971a 3556 {
9ebbca7d
GK
3557 HOST_WIDE_INT high_int, low_int;
3558 rtx sum;
a65c591c
DE
3559 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3560 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3561 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3562 GEN_INT (high_int)), 0);
3563 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3564 }
f676971a 3565 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3566 && GET_CODE (XEXP (x, 0)) == REG
3567 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3568 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3569 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3570 || TARGET_POWERPC64
7393f7f8
BE
3571 || (((mode != DImode && mode != DFmode && mode != DDmode)
3572 || TARGET_E500_DOUBLE)
3573 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3574 && (TARGET_POWERPC64 || mode != DImode)
3575 && mode != TImode)
3576 {
3577 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3578 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3579 }
0ac081f6
AH
3580 else if (ALTIVEC_VECTOR_MODE (mode))
3581 {
3582 rtx reg;
3583
3584 /* Make sure both operands are registers. */
3585 if (GET_CODE (x) == PLUS)
9f85ed45 3586 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3587 force_reg (Pmode, XEXP (x, 1)));
3588
3589 reg = force_reg (Pmode, x);
3590 return reg;
3591 }
4d4cbc0e 3592 else if (SPE_VECTOR_MODE (mode)
17caeff2 3593 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3594 || mode == DDmode || mode == TDmode
54b695e7 3595 || mode == DImode)))
a3170dc6 3596 {
54b695e7
AH
3597 if (mode == DImode)
3598 return NULL_RTX;
a3170dc6
AH
3599 /* We accept [reg + reg] and [reg + OFFSET]. */
3600
3601 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3602 {
3603 rtx op1 = XEXP (x, 0);
3604 rtx op2 = XEXP (x, 1);
a3170dc6 3605
c4ad648e 3606 op1 = force_reg (Pmode, op1);
a3170dc6 3607
c4ad648e
AM
3608 if (GET_CODE (op2) != REG
3609 && (GET_CODE (op2) != CONST_INT
3610 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3611 op2 = force_reg (Pmode, op2);
a3170dc6 3612
c4ad648e
AM
3613 return gen_rtx_PLUS (Pmode, op1, op2);
3614 }
a3170dc6
AH
3615
3616 return force_reg (Pmode, x);
3617 }
f1384257
AM
3618 else if (TARGET_ELF
3619 && TARGET_32BIT
3620 && TARGET_NO_TOC
3621 && ! flag_pic
9ebbca7d 3622 && GET_CODE (x) != CONST_INT
f676971a 3623 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3624 && CONSTANT_P (x)
6ac7bf2c
GK
3625 && GET_MODE_NUNITS (mode) == 1
3626 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3627 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3628 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3629 {
3630 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3631 emit_insn (gen_elf_high (reg, x));
3632 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3633 }
ee890fe2
SS
3634 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3635 && ! flag_pic
ab82a49f
AP
3636#if TARGET_MACHO
3637 && ! MACHO_DYNAMIC_NO_PIC_P
3638#endif
ee890fe2 3639 && GET_CODE (x) != CONST_INT
f676971a 3640 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3641 && CONSTANT_P (x)
4d4447b5
PB
3642 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3643 || (mode != DFmode && mode != DDmode))
f676971a 3644 && mode != DImode
ee890fe2
SS
3645 && mode != TImode)
3646 {
3647 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3648 emit_insn (gen_macho_high (reg, x));
3649 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3650 }
f676971a 3651 else if (TARGET_TOC
4d588c14 3652 && constant_pool_expr_p (x)
a9098fd0 3653 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3654 {
3655 return create_TOC_reference (x);
3656 }
3657 else
3658 return NULL_RTX;
3659}
258bfae2 3660
fdbe66f2 3661/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3662 We need to emit DTP-relative relocations. */
3663
fdbe66f2 3664static void
c973d557
JJ
3665rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3666{
3667 switch (size)
3668 {
3669 case 4:
3670 fputs ("\t.long\t", file);
3671 break;
3672 case 8:
3673 fputs (DOUBLE_INT_ASM_OP, file);
3674 break;
3675 default:
37409796 3676 gcc_unreachable ();
c973d557
JJ
3677 }
3678 output_addr_const (file, x);
3679 fputs ("@dtprel+0x8000", file);
3680}
3681
c4501e62
JJ
3682/* Construct the SYMBOL_REF for the tls_get_addr function. */
3683
3684static GTY(()) rtx rs6000_tls_symbol;
3685static rtx
863d938c 3686rs6000_tls_get_addr (void)
c4501e62
JJ
3687{
3688 if (!rs6000_tls_symbol)
3689 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3690
3691 return rs6000_tls_symbol;
3692}
3693
3694/* Construct the SYMBOL_REF for TLS GOT references. */
3695
3696static GTY(()) rtx rs6000_got_symbol;
3697static rtx
863d938c 3698rs6000_got_sym (void)
c4501e62
JJ
3699{
3700 if (!rs6000_got_symbol)
3701 {
3702 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3703 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3704 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3705 }
c4501e62
JJ
3706
3707 return rs6000_got_symbol;
3708}
3709
3710/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3711 this (thread-local) address. */
3712
3713static rtx
a2369ed3 3714rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3715{
3716 rtx dest, insn;
3717
3718 dest = gen_reg_rtx (Pmode);
3719 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3720 {
3721 rtx tlsreg;
3722
3723 if (TARGET_64BIT)
3724 {
3725 tlsreg = gen_rtx_REG (Pmode, 13);
3726 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3727 }
3728 else
3729 {
3730 tlsreg = gen_rtx_REG (Pmode, 2);
3731 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3732 }
3733 emit_insn (insn);
3734 }
3735 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3736 {
3737 rtx tlsreg, tmp;
3738
3739 tmp = gen_reg_rtx (Pmode);
3740 if (TARGET_64BIT)
3741 {
3742 tlsreg = gen_rtx_REG (Pmode, 13);
3743 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3744 }
3745 else
3746 {
3747 tlsreg = gen_rtx_REG (Pmode, 2);
3748 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3749 }
3750 emit_insn (insn);
3751 if (TARGET_64BIT)
3752 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3753 else
3754 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3755 emit_insn (insn);
3756 }
3757 else
3758 {
3759 rtx r3, got, tga, tmp1, tmp2, eqv;
3760
4fed8f8f
AM
3761 /* We currently use relocations like @got@tlsgd for tls, which
3762 means the linker will handle allocation of tls entries, placing
3763 them in the .got section. So use a pointer to the .got section,
3764 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3765 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3766 if (TARGET_64BIT)
972f427b 3767 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3768 else
3769 {
3770 if (flag_pic == 1)
3771 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3772 else
3773 {
3774 rtx gsym = rs6000_got_sym ();
3775 got = gen_reg_rtx (Pmode);
3776 if (flag_pic == 0)
3777 rs6000_emit_move (got, gsym, Pmode);
3778 else
3779 {
e65a3857 3780 rtx tmp3, mem;
c4501e62
JJ
3781 rtx first, last;
3782
c4501e62
JJ
3783 tmp1 = gen_reg_rtx (Pmode);
3784 tmp2 = gen_reg_rtx (Pmode);
3785 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3786 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3787
e65a3857
DE
3788 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3789 emit_move_insn (tmp1,
1de43f85 3790 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3791 emit_move_insn (tmp2, mem);
3792 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3793 last = emit_move_insn (got, tmp3);
bd94cb6e 3794 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3795 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3796 }
3797 }
3798 }
3799
3800 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3801 {
3802 r3 = gen_rtx_REG (Pmode, 3);
3803 if (TARGET_64BIT)
3804 insn = gen_tls_gd_64 (r3, got, addr);
3805 else
3806 insn = gen_tls_gd_32 (r3, got, addr);
3807 start_sequence ();
3808 emit_insn (insn);
3809 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3810 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3811 insn = emit_call_insn (insn);
3812 CONST_OR_PURE_CALL_P (insn) = 1;
3813 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3814 insn = get_insns ();
3815 end_sequence ();
3816 emit_libcall_block (insn, dest, r3, addr);
3817 }
3818 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3819 {
3820 r3 = gen_rtx_REG (Pmode, 3);
3821 if (TARGET_64BIT)
3822 insn = gen_tls_ld_64 (r3, got);
3823 else
3824 insn = gen_tls_ld_32 (r3, got);
3825 start_sequence ();
3826 emit_insn (insn);
3827 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3828 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3829 insn = emit_call_insn (insn);
3830 CONST_OR_PURE_CALL_P (insn) = 1;
3831 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3832 insn = get_insns ();
3833 end_sequence ();
3834 tmp1 = gen_reg_rtx (Pmode);
3835 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3836 UNSPEC_TLSLD);
3837 emit_libcall_block (insn, tmp1, r3, eqv);
3838 if (rs6000_tls_size == 16)
3839 {
3840 if (TARGET_64BIT)
3841 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3842 else
3843 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3844 }
3845 else if (rs6000_tls_size == 32)
3846 {
3847 tmp2 = gen_reg_rtx (Pmode);
3848 if (TARGET_64BIT)
3849 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3850 else
3851 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3852 emit_insn (insn);
3853 if (TARGET_64BIT)
3854 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3855 else
3856 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3857 }
3858 else
3859 {
3860 tmp2 = gen_reg_rtx (Pmode);
3861 if (TARGET_64BIT)
3862 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3863 else
3864 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3865 emit_insn (insn);
3866 insn = gen_rtx_SET (Pmode, dest,
3867 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3868 }
3869 emit_insn (insn);
3870 }
3871 else
3872 {
a7b376ee 3873 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3874 tmp2 = gen_reg_rtx (Pmode);
3875 if (TARGET_64BIT)
3876 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3877 else
3878 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3879 emit_insn (insn);
3880 if (TARGET_64BIT)
3881 insn = gen_tls_tls_64 (dest, tmp2, addr);
3882 else
3883 insn = gen_tls_tls_32 (dest, tmp2, addr);
3884 emit_insn (insn);
3885 }
3886 }
3887
3888 return dest;
3889}
3890
c4501e62
JJ
3891/* Return 1 if X contains a thread-local symbol. */
3892
3893bool
a2369ed3 3894rs6000_tls_referenced_p (rtx x)
c4501e62 3895{
cd413cab
AP
3896 if (! TARGET_HAVE_TLS)
3897 return false;
3898
c4501e62
JJ
3899 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3900}
3901
3902/* Return 1 if *X is a thread-local symbol. This is the same as
3903 rs6000_tls_symbol_ref except for the type of the unused argument. */
3904
9390387d 3905static int
a2369ed3 3906rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3907{
3908 return RS6000_SYMBOL_REF_TLS_P (*x);
3909}
3910
24ea750e
DJ
3911/* The convention appears to be to define this wherever it is used.
3912 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3913 is now used here. */
3914#ifndef REG_MODE_OK_FOR_BASE_P
3915#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3916#endif
3917
3918/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3919 replace the input X, or the original X if no replacement is called for.
3920 The output parameter *WIN is 1 if the calling macro should goto WIN,
3921 0 if it should not.
3922
3923 For RS/6000, we wish to handle large displacements off a base
3924 register by splitting the addend across an addiu/addis and the mem insn.
3925 This cuts number of extra insns needed from 3 to 1.
3926
3927 On Darwin, we use this to generate code for floating point constants.
3928 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3929 The Darwin code is inside #if TARGET_MACHO because only then is
3930 machopic_function_base_name() defined. */
3931rtx
f676971a 3932rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3933 int opnum, int type,
3934 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3935{
f676971a 3936 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3937 if (GET_CODE (x) == PLUS
3938 && GET_CODE (XEXP (x, 0)) == PLUS
3939 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3940 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3941 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3942 {
3943 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3944 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3945 opnum, (enum reload_type)type);
24ea750e
DJ
3946 *win = 1;
3947 return x;
3948 }
3deb2758 3949
24ea750e
DJ
3950#if TARGET_MACHO
3951 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3952 && GET_CODE (x) == LO_SUM
3953 && GET_CODE (XEXP (x, 0)) == PLUS
3954 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3955 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3956 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3957 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3958 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3959 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3960 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3961 {
3962 /* Result of previous invocation of this function on Darwin
6f317ef3 3963 floating point constant. */
24ea750e 3964 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3965 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3966 opnum, (enum reload_type)type);
24ea750e
DJ
3967 *win = 1;
3968 return x;
3969 }
3970#endif
4937d02d
DE
3971
3972 /* Force ld/std non-word aligned offset into base register by wrapping
3973 in offset 0. */
3974 if (GET_CODE (x) == PLUS
3975 && GET_CODE (XEXP (x, 0)) == REG
3976 && REGNO (XEXP (x, 0)) < 32
3977 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3978 && GET_CODE (XEXP (x, 1)) == CONST_INT
3979 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3980 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3981 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3982 && TARGET_POWERPC64)
3983 {
3984 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3985 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3986 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3987 opnum, (enum reload_type) type);
3988 *win = 1;
3989 return x;
3990 }
3991
24ea750e
DJ
3992 if (GET_CODE (x) == PLUS
3993 && GET_CODE (XEXP (x, 0)) == REG
3994 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3995 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3996 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3997 && !SPE_VECTOR_MODE (mode)
17caeff2 3998 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3999 || mode == DDmode || mode == TDmode
54b695e7 4000 || mode == DImode))
78c875e8 4001 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4002 {
4003 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4004 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4005 HOST_WIDE_INT high
c4ad648e 4006 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4007
4008 /* Check for 32-bit overflow. */
4009 if (high + low != val)
c4ad648e 4010 {
24ea750e
DJ
4011 *win = 0;
4012 return x;
4013 }
4014
4015 /* Reload the high part into a base reg; leave the low part
c4ad648e 4016 in the mem directly. */
24ea750e
DJ
4017
4018 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4019 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4020 GEN_INT (high)),
4021 GEN_INT (low));
24ea750e
DJ
4022
4023 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4024 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4025 opnum, (enum reload_type)type);
24ea750e
DJ
4026 *win = 1;
4027 return x;
4028 }
4937d02d 4029
24ea750e 4030 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4031 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4032 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4033#if TARGET_MACHO
4034 && DEFAULT_ABI == ABI_DARWIN
a29077da 4035 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4036#else
4037 && DEFAULT_ABI == ABI_V4
4038 && !flag_pic
4039#endif
7393f7f8 4040 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4041 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4042 without fprs. */
0d8c1c97 4043 && mode != TFmode
7393f7f8 4044 && mode != TDmode
7b5d92b2 4045 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4046 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4047 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4048 {
8308679f 4049#if TARGET_MACHO
a29077da
GK
4050 if (flag_pic)
4051 {
4052 rtx offset = gen_rtx_CONST (Pmode,
4053 gen_rtx_MINUS (Pmode, x,
11abc112 4054 machopic_function_base_sym ()));
a29077da
GK
4055 x = gen_rtx_LO_SUM (GET_MODE (x),
4056 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4057 gen_rtx_HIGH (Pmode, offset)), offset);
4058 }
4059 else
8308679f 4060#endif
a29077da 4061 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4062 gen_rtx_HIGH (Pmode, x), x);
a29077da 4063
24ea750e 4064 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4065 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4066 opnum, (enum reload_type)type);
24ea750e
DJ
4067 *win = 1;
4068 return x;
4069 }
4937d02d 4070
dec1f3aa
DE
4071 /* Reload an offset address wrapped by an AND that represents the
4072 masking of the lower bits. Strip the outer AND and let reload
4073 convert the offset address into an indirect address. */
4074 if (TARGET_ALTIVEC
4075 && ALTIVEC_VECTOR_MODE (mode)
4076 && GET_CODE (x) == AND
4077 && GET_CODE (XEXP (x, 0)) == PLUS
4078 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4079 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4080 && GET_CODE (XEXP (x, 1)) == CONST_INT
4081 && INTVAL (XEXP (x, 1)) == -16)
4082 {
4083 x = XEXP (x, 0);
4084 *win = 1;
4085 return x;
4086 }
4087
24ea750e 4088 if (TARGET_TOC
4d588c14 4089 && constant_pool_expr_p (x)
c1f11548 4090 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4091 {
194c524a 4092 x = create_TOC_reference (x);
24ea750e
DJ
4093 *win = 1;
4094 return x;
4095 }
4096 *win = 0;
4097 return x;
f676971a 4098}
24ea750e 4099
258bfae2
FS
4100/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4101 that is a valid memory address for an instruction.
4102 The MODE argument is the machine mode for the MEM expression
4103 that wants to use this address.
4104
4105 On the RS/6000, there are four valid address: a SYMBOL_REF that
4106 refers to a constant pool entry of an address (or the sum of it
4107 plus a constant), a short (16-bit signed) constant plus a register,
4108 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4109 auto-increment. For DFmode, DDmode and DImode with a constant plus
4110 register, we must ensure that both words are addressable or PowerPC64
4111 with offset word aligned.
258bfae2 4112
4d4447b5 4113 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4114 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4115 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4116 during assembly output. */
4117int
a2369ed3 4118rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4119{
850e8d3d
DN
4120 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4121 if (TARGET_ALTIVEC
4122 && ALTIVEC_VECTOR_MODE (mode)
4123 && GET_CODE (x) == AND
4124 && GET_CODE (XEXP (x, 1)) == CONST_INT
4125 && INTVAL (XEXP (x, 1)) == -16)
4126 x = XEXP (x, 0);
4127
c4501e62
JJ
4128 if (RS6000_SYMBOL_REF_TLS_P (x))
4129 return 0;
4d588c14 4130 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4131 return 1;
4132 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4133 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4134 && !SPE_VECTOR_MODE (mode)
429ec7dc 4135 && mode != TFmode
7393f7f8 4136 && mode != TDmode
54b695e7 4137 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4138 && !(TARGET_E500_DOUBLE
4139 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4140 && TARGET_UPDATE
4d588c14 4141 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4142 return 1;
d04b6e6e 4143 if (legitimate_small_data_p (mode, x))
258bfae2 4144 return 1;
4d588c14 4145 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4146 return 1;
4147 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4148 if (! reg_ok_strict
4149 && GET_CODE (x) == PLUS
4150 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4151 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4152 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4153 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4154 return 1;
76d2b81d 4155 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4156 return 1;
4157 if (mode != TImode
76d2b81d 4158 && mode != TFmode
7393f7f8 4159 && mode != TDmode
a3170dc6
AH
4160 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4161 || TARGET_POWERPC64
4d4447b5 4162 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
258bfae2 4163 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4164 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4165 return 1;
6fb5fa3c
DB
4166 if (GET_CODE (x) == PRE_MODIFY
4167 && mode != TImode
4168 && mode != TFmode
4169 && mode != TDmode
4170 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4171 || TARGET_POWERPC64
4d4447b5 4172 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4173 && (TARGET_POWERPC64 || mode != DImode)
4174 && !ALTIVEC_VECTOR_MODE (mode)
4175 && !SPE_VECTOR_MODE (mode)
4176 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4177 && !(TARGET_E500_DOUBLE
4178 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4179 && TARGET_UPDATE
4180 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4181 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4182 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4183 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4184 return 1;
4d588c14 4185 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4186 return 1;
4187 return 0;
4188}
4d588c14
RH
4189
4190/* Go to LABEL if ADDR (a legitimate address expression)
4191 has an effect that depends on the machine mode it is used for.
4192
4193 On the RS/6000 this is true of all integral offsets (since AltiVec
4194 modes don't allow them) or is a pre-increment or decrement.
4195
4196 ??? Except that due to conceptual problems in offsettable_address_p
4197 we can't really report the problems of integral offsets. So leave
f676971a 4198 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4199 sub-words of a TFmode operand, which is what we had before. */
4200
4201bool
a2369ed3 4202rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4203{
4204 switch (GET_CODE (addr))
4205 {
4206 case PLUS:
4207 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4208 {
4209 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4210 return val + 12 + 0x8000 >= 0x10000;
4211 }
4212 break;
4213
4214 case LO_SUM:
4215 return true;
4216
6fb5fa3c
DB
4217 case PRE_INC:
4218 case PRE_DEC:
4219 case PRE_MODIFY:
4220 return TARGET_UPDATE;
4d588c14
RH
4221
4222 default:
4223 break;
4224 }
4225
4226 return false;
4227}
d8ecbcdb 4228
d04b6e6e
EB
4229/* More elaborate version of recog's offsettable_memref_p predicate
4230 that works around the ??? note of rs6000_mode_dependent_address.
4231 In particular it accepts
4232
4233 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4234
4235 in 32-bit mode, that the recog predicate rejects. */
4236
4237bool
4238rs6000_offsettable_memref_p (rtx op)
4239{
4240 if (!MEM_P (op))
4241 return false;
4242
4243 /* First mimic offsettable_memref_p. */
4244 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4245 return true;
4246
4247 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4248 the latter predicate knows nothing about the mode of the memory
4249 reference and, therefore, assumes that it is the largest supported
4250 mode (TFmode). As a consequence, legitimate offsettable memory
4251 references are rejected. rs6000_legitimate_offset_address_p contains
4252 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4253 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4254}
4255
d8ecbcdb
AH
4256/* Return number of consecutive hard regs needed starting at reg REGNO
4257 to hold something of mode MODE.
4258 This is ordinarily the length in words of a value of mode MODE
4259 but can be less for certain modes in special long registers.
4260
4261 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4262 scalar instructions. The upper 32 bits are only available to the
4263 SIMD instructions.
4264
4265 POWER and PowerPC GPRs hold 32 bits worth;
4266 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4267
4268int
4269rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4270{
4271 if (FP_REGNO_P (regno))
4272 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4273
4274 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4275 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4276
4277 if (ALTIVEC_REGNO_P (regno))
4278 return
4279 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4280
8521c414
JM
4281 /* The value returned for SCmode in the E500 double case is 2 for
4282 ABI compatibility; storing an SCmode value in a single register
4283 would require function_arg and rs6000_spe_function_arg to handle
4284 SCmode so as to pass the value correctly in a pair of
4285 registers. */
4286 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4287 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4288
d8ecbcdb
AH
4289 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4290}
2aa4498c
AH
4291
4292/* Change register usage conditional on target flags. */
4293void
4294rs6000_conditional_register_usage (void)
4295{
4296 int i;
4297
4298 /* Set MQ register fixed (already call_used) if not POWER
4299 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4300 be allocated. */
4301 if (! TARGET_POWER)
4302 fixed_regs[64] = 1;
4303
7c9ac5c0 4304 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4305 if (TARGET_64BIT)
4306 fixed_regs[13] = call_used_regs[13]
4307 = call_really_used_regs[13] = 1;
4308
4309 /* Conditionally disable FPRs. */
4310 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4311 for (i = 32; i < 64; i++)
4312 fixed_regs[i] = call_used_regs[i]
c4ad648e 4313 = call_really_used_regs[i] = 1;
2aa4498c 4314
7c9ac5c0
PH
4315 /* The TOC register is not killed across calls in a way that is
4316 visible to the compiler. */
4317 if (DEFAULT_ABI == ABI_AIX)
4318 call_really_used_regs[2] = 0;
4319
2aa4498c
AH
4320 if (DEFAULT_ABI == ABI_V4
4321 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4322 && flag_pic == 2)
4323 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4324
4325 if (DEFAULT_ABI == ABI_V4
4326 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4327 && flag_pic == 1)
4328 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4329 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4330 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4331
4332 if (DEFAULT_ABI == ABI_DARWIN
4333 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4334 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4335 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4336 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4337
b4db40bf
JJ
4338 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4339 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4340 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4341
2aa4498c
AH
4342 if (TARGET_SPE)
4343 {
4344 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4345 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4346 registers in prologues and epilogues. We no longer use r14
4347 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4348 pool for link-compatibility with older versions of GCC. Once
4349 "old" code has died out, we can return r14 to the allocation
4350 pool. */
4351 fixed_regs[14]
4352 = call_used_regs[14]
4353 = call_really_used_regs[14] = 1;
2aa4498c
AH
4354 }
4355
0db747be 4356 if (!TARGET_ALTIVEC)
2aa4498c
AH
4357 {
4358 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4359 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4360 call_really_used_regs[VRSAVE_REGNO] = 1;
4361 }
4362
0db747be
DE
4363 if (TARGET_ALTIVEC)
4364 global_regs[VSCR_REGNO] = 1;
4365
2aa4498c 4366 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4367 {
4368 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4369 call_used_regs[i] = call_really_used_regs[i] = 1;
4370
4371 /* AIX reserves VR20:31 in non-extended ABI mode. */
4372 if (TARGET_XCOFF)
4373 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4374 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4375 }
2aa4498c 4376}
fb4d4348 4377\f
a4f6c312
SS
4378/* Try to output insns to set TARGET equal to the constant C if it can
4379 be done in less than N insns. Do all computations in MODE.
4380 Returns the place where the output has been placed if it can be
4381 done and the insns have been emitted. If it would take more than N
4382 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4383
4384rtx
f676971a 4385rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4386 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4387{
af8cb5c5 4388 rtx result, insn, set;
2bfcf297
DB
4389 HOST_WIDE_INT c0, c1;
4390
37409796 4391 switch (mode)
2bfcf297 4392 {
37409796
NS
4393 case QImode:
4394 case HImode:
2bfcf297 4395 if (dest == NULL)
c4ad648e 4396 dest = gen_reg_rtx (mode);
2bfcf297
DB
4397 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4398 return dest;
bb8df8a6 4399
37409796 4400 case SImode:
b3a13419 4401 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4402
d448860e 4403 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4404 GEN_INT (INTVAL (source)
4405 & (~ (HOST_WIDE_INT) 0xffff))));
4406 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4407 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4408 GEN_INT (INTVAL (source) & 0xffff))));
4409 result = dest;
37409796
NS
4410 break;
4411
4412 case DImode:
4413 switch (GET_CODE (source))
af8cb5c5 4414 {
37409796 4415 case CONST_INT:
af8cb5c5
DE
4416 c0 = INTVAL (source);
4417 c1 = -(c0 < 0);
37409796 4418 break;
bb8df8a6 4419
37409796 4420 case CONST_DOUBLE:
2bfcf297 4421#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4422 c0 = CONST_DOUBLE_LOW (source);
4423 c1 = -(c0 < 0);
2bfcf297 4424#else
af8cb5c5
DE
4425 c0 = CONST_DOUBLE_LOW (source);
4426 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4427#endif
37409796
NS
4428 break;
4429
4430 default:
4431 gcc_unreachable ();
af8cb5c5 4432 }
af8cb5c5
DE
4433
4434 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4435 break;
4436
4437 default:
4438 gcc_unreachable ();
2bfcf297 4439 }
2bfcf297 4440
af8cb5c5
DE
4441 insn = get_last_insn ();
4442 set = single_set (insn);
4443 if (! CONSTANT_P (SET_SRC (set)))
4444 set_unique_reg_note (insn, REG_EQUAL, source);
4445
4446 return result;
2bfcf297
DB
4447}
4448
4449/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4450 fall back to a straight forward decomposition. We do this to avoid
4451 exponential run times encountered when looking for longer sequences
4452 with rs6000_emit_set_const. */
4453static rtx
a2369ed3 4454rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4455{
4456 if (!TARGET_POWERPC64)
4457 {
4458 rtx operand1, operand2;
4459
4460 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4461 DImode);
d448860e 4462 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4463 DImode);
4464 emit_move_insn (operand1, GEN_INT (c1));
4465 emit_move_insn (operand2, GEN_INT (c2));
4466 }
4467 else
4468 {
bc06712d 4469 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4470
bc06712d 4471 ud1 = c1 & 0xffff;
f921c9c9 4472 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4473#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4474 c2 = c1 >> 32;
2bfcf297 4475#endif
bc06712d 4476 ud3 = c2 & 0xffff;
f921c9c9 4477 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4478
f676971a 4479 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4480 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4481 {
bc06712d 4482 if (ud1 & 0x8000)
b78d48dd 4483 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4484 else
4485 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4486 }
2bfcf297 4487
f676971a 4488 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4489 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4490 {
bc06712d 4491 if (ud2 & 0x8000)
f676971a 4492 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4493 - 0x80000000));
252b88f7 4494 else
bc06712d
TR
4495 emit_move_insn (dest, GEN_INT (ud2 << 16));
4496 if (ud1 != 0)
d448860e
JH
4497 emit_move_insn (copy_rtx (dest),
4498 gen_rtx_IOR (DImode, copy_rtx (dest),
4499 GEN_INT (ud1)));
252b88f7 4500 }
f676971a 4501 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4502 || (ud4 == 0 && ! (ud3 & 0x8000)))
4503 {
4504 if (ud3 & 0x8000)
f676971a 4505 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4506 - 0x80000000));
4507 else
4508 emit_move_insn (dest, GEN_INT (ud3 << 16));
4509
4510 if (ud2 != 0)
d448860e
JH
4511 emit_move_insn (copy_rtx (dest),
4512 gen_rtx_IOR (DImode, copy_rtx (dest),
4513 GEN_INT (ud2)));
4514 emit_move_insn (copy_rtx (dest),
4515 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4516 GEN_INT (16)));
bc06712d 4517 if (ud1 != 0)
d448860e
JH
4518 emit_move_insn (copy_rtx (dest),
4519 gen_rtx_IOR (DImode, copy_rtx (dest),
4520 GEN_INT (ud1)));
bc06712d 4521 }
f676971a 4522 else
bc06712d
TR
4523 {
4524 if (ud4 & 0x8000)
f676971a 4525 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4526 - 0x80000000));
4527 else
4528 emit_move_insn (dest, GEN_INT (ud4 << 16));
4529
4530 if (ud3 != 0)
d448860e
JH
4531 emit_move_insn (copy_rtx (dest),
4532 gen_rtx_IOR (DImode, copy_rtx (dest),
4533 GEN_INT (ud3)));
2bfcf297 4534
d448860e
JH
4535 emit_move_insn (copy_rtx (dest),
4536 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4537 GEN_INT (32)));
bc06712d 4538 if (ud2 != 0)
d448860e
JH
4539 emit_move_insn (copy_rtx (dest),
4540 gen_rtx_IOR (DImode, copy_rtx (dest),
4541 GEN_INT (ud2 << 16)));
bc06712d 4542 if (ud1 != 0)
d448860e
JH
4543 emit_move_insn (copy_rtx (dest),
4544 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4545 }
4546 }
2bfcf297
DB
4547 return dest;
4548}
4549
76d2b81d 4550/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4551 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4552
4553static void
4554rs6000_eliminate_indexed_memrefs (rtx operands[2])
4555{
4556 if (GET_CODE (operands[0]) == MEM
4557 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4558 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4559 && ! reload_in_progress)
4560 operands[0]
4561 = replace_equiv_address (operands[0],
4562 copy_addr_to_reg (XEXP (operands[0], 0)));
4563
4564 if (GET_CODE (operands[1]) == MEM
4565 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4566 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4567 && ! reload_in_progress)
4568 operands[1]
4569 = replace_equiv_address (operands[1],
4570 copy_addr_to_reg (XEXP (operands[1], 0)));
4571}
4572
fb4d4348
GK
4573/* Emit a move from SOURCE to DEST in mode MODE. */
4574void
a2369ed3 4575rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4576{
4577 rtx operands[2];
4578 operands[0] = dest;
4579 operands[1] = source;
f676971a 4580
fb4d4348
GK
4581 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4582 if (GET_CODE (operands[1]) == CONST_DOUBLE
4583 && ! FLOAT_MODE_P (mode)
4584 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4585 {
4586 /* FIXME. This should never happen. */
4587 /* Since it seems that it does, do the safe thing and convert
4588 to a CONST_INT. */
2496c7bd 4589 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4590 }
37409796
NS
4591 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4592 || FLOAT_MODE_P (mode)
4593 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4594 || CONST_DOUBLE_LOW (operands[1]) < 0)
4595 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4596 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4597
c9e8cb32
DD
4598 /* Check if GCC is setting up a block move that will end up using FP
4599 registers as temporaries. We must make sure this is acceptable. */
4600 if (GET_CODE (operands[0]) == MEM
4601 && GET_CODE (operands[1]) == MEM
4602 && mode == DImode
41543739
GK
4603 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4604 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4605 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4606 ? 32 : MEM_ALIGN (operands[0])))
4607 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4608 ? 32
41543739
GK
4609 : MEM_ALIGN (operands[1]))))
4610 && ! MEM_VOLATILE_P (operands [0])
4611 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4612 {
41543739
GK
4613 emit_move_insn (adjust_address (operands[0], SImode, 0),
4614 adjust_address (operands[1], SImode, 0));
d448860e
JH
4615 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4616 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4617 return;
4618 }
630d42a0 4619
b3a13419 4620 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4621 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4622 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4623
a3170dc6
AH
4624 if (mode == SFmode && ! TARGET_POWERPC
4625 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4626 && GET_CODE (operands[0]) == MEM)
fb4d4348 4627 {
ffc14f31
GK
4628 int regnum;
4629
4630 if (reload_in_progress || reload_completed)
4631 regnum = true_regnum (operands[1]);
4632 else if (GET_CODE (operands[1]) == REG)
4633 regnum = REGNO (operands[1]);
4634 else
4635 regnum = -1;
f676971a 4636
fb4d4348
GK
4637 /* If operands[1] is a register, on POWER it may have
4638 double-precision data in it, so truncate it to single
4639 precision. */
4640 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4641 {
4642 rtx newreg;
b3a13419 4643 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4644 : gen_reg_rtx (mode));
fb4d4348
GK
4645 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4646 operands[1] = newreg;
4647 }
4648 }
4649
c4501e62
JJ
4650 /* Recognize the case where operand[1] is a reference to thread-local
4651 data and load its address to a register. */
84f52ebd 4652 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4653 {
84f52ebd
RH
4654 enum tls_model model;
4655 rtx tmp = operands[1];
4656 rtx addend = NULL;
4657
4658 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4659 {
4660 addend = XEXP (XEXP (tmp, 0), 1);
4661 tmp = XEXP (XEXP (tmp, 0), 0);
4662 }
4663
4664 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4665 model = SYMBOL_REF_TLS_MODEL (tmp);
4666 gcc_assert (model != 0);
4667
4668 tmp = rs6000_legitimize_tls_address (tmp, model);
4669 if (addend)
4670 {
4671 tmp = gen_rtx_PLUS (mode, tmp, addend);
4672 tmp = force_operand (tmp, operands[0]);
4673 }
4674 operands[1] = tmp;
c4501e62
JJ
4675 }
4676
8f4e6caf
RH
4677 /* Handle the case where reload calls us with an invalid address. */
4678 if (reload_in_progress && mode == Pmode
69ef87e2 4679 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4680 || ! nonimmediate_operand (operands[0], mode)))
4681 goto emit_set;
4682
a9baceb1
GK
4683 /* 128-bit constant floating-point values on Darwin should really be
4684 loaded as two parts. */
8521c414 4685 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4686 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4687 {
4688 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4689 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4690 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4691 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4692 simplify_gen_subreg (imode, operands[1], mode, 0),
4693 imode);
4694 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4695 GET_MODE_SIZE (imode)),
4696 simplify_gen_subreg (imode, operands[1], mode,
4697 GET_MODE_SIZE (imode)),
4698 imode);
a9baceb1
GK
4699 return;
4700 }
4701
fb4d4348
GK
4702 /* FIXME: In the long term, this switch statement should go away
4703 and be replaced by a sequence of tests based on things like
4704 mode == Pmode. */
4705 switch (mode)
4706 {
4707 case HImode:
4708 case QImode:
4709 if (CONSTANT_P (operands[1])
4710 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4711 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4712 break;
4713
06f4e019 4714 case TFmode:
7393f7f8 4715 case TDmode:
76d2b81d
DJ
4716 rs6000_eliminate_indexed_memrefs (operands);
4717 /* fall through */
4718
fb4d4348 4719 case DFmode:
7393f7f8 4720 case DDmode:
fb4d4348 4721 case SFmode:
f676971a 4722 if (CONSTANT_P (operands[1])
fb4d4348 4723 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4724 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4725 break;
f676971a 4726
0ac081f6
AH
4727 case V16QImode:
4728 case V8HImode:
4729 case V4SFmode:
4730 case V4SImode:
a3170dc6
AH
4731 case V4HImode:
4732 case V2SFmode:
4733 case V2SImode:
00a892b8 4734 case V1DImode:
69ef87e2 4735 if (CONSTANT_P (operands[1])
d744e06e 4736 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4737 operands[1] = force_const_mem (mode, operands[1]);
4738 break;
f676971a 4739
fb4d4348 4740 case SImode:
a9098fd0 4741 case DImode:
fb4d4348
GK
4742 /* Use default pattern for address of ELF small data */
4743 if (TARGET_ELF
a9098fd0 4744 && mode == Pmode
f607bc57 4745 && DEFAULT_ABI == ABI_V4
f676971a 4746 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4747 || GET_CODE (operands[1]) == CONST)
4748 && small_data_operand (operands[1], mode))
fb4d4348
GK
4749 {
4750 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4751 return;
4752 }
4753
f607bc57 4754 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4755 && mode == Pmode && mode == SImode
4756 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4757 {
4758 emit_insn (gen_movsi_got (operands[0], operands[1]));
4759 return;
4760 }
4761
ee890fe2 4762 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4763 && TARGET_NO_TOC
4764 && ! flag_pic
a9098fd0 4765 && mode == Pmode
fb4d4348
GK
4766 && CONSTANT_P (operands[1])
4767 && GET_CODE (operands[1]) != HIGH
4768 && GET_CODE (operands[1]) != CONST_INT)
4769 {
b3a13419
ILT
4770 rtx target = (!can_create_pseudo_p ()
4771 ? operands[0]
4772 : gen_reg_rtx (mode));
fb4d4348
GK
4773
4774 /* If this is a function address on -mcall-aixdesc,
4775 convert it to the address of the descriptor. */
4776 if (DEFAULT_ABI == ABI_AIX
4777 && GET_CODE (operands[1]) == SYMBOL_REF
4778 && XSTR (operands[1], 0)[0] == '.')
4779 {
4780 const char *name = XSTR (operands[1], 0);
4781 rtx new_ref;
4782 while (*name == '.')
4783 name++;
4784 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4785 CONSTANT_POOL_ADDRESS_P (new_ref)
4786 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4787 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4788 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4789 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4790 operands[1] = new_ref;
4791 }
7509c759 4792
ee890fe2
SS
4793 if (DEFAULT_ABI == ABI_DARWIN)
4794 {
ab82a49f
AP
4795#if TARGET_MACHO
4796 if (MACHO_DYNAMIC_NO_PIC_P)
4797 {
4798 /* Take care of any required data indirection. */
4799 operands[1] = rs6000_machopic_legitimize_pic_address (
4800 operands[1], mode, operands[0]);
4801 if (operands[0] != operands[1])
4802 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4803 operands[0], operands[1]));
ab82a49f
AP
4804 return;
4805 }
4806#endif
b8a55285
AP
4807 emit_insn (gen_macho_high (target, operands[1]));
4808 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4809 return;
4810 }
4811
fb4d4348
GK
4812 emit_insn (gen_elf_high (target, operands[1]));
4813 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4814 return;
4815 }
4816
a9098fd0
GK
4817 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4818 and we have put it in the TOC, we just need to make a TOC-relative
4819 reference to it. */
4820 if (TARGET_TOC
4821 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4822 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4823 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4824 get_pool_mode (operands[1])))
fb4d4348 4825 {
a9098fd0 4826 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4827 }
a9098fd0
GK
4828 else if (mode == Pmode
4829 && CONSTANT_P (operands[1])
38886f37
AO
4830 && ((GET_CODE (operands[1]) != CONST_INT
4831 && ! easy_fp_constant (operands[1], mode))
4832 || (GET_CODE (operands[1]) == CONST_INT
4833 && num_insns_constant (operands[1], mode) > 2)
4834 || (GET_CODE (operands[0]) == REG
4835 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4836 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4837 && ! legitimate_constant_pool_address_p (operands[1])
4838 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4839 {
4840 /* Emit a USE operation so that the constant isn't deleted if
4841 expensive optimizations are turned on because nobody
4842 references it. This should only be done for operands that
4843 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4844 This should not be done for operands that contain LABEL_REFs.
4845 For now, we just handle the obvious case. */
4846 if (GET_CODE (operands[1]) != LABEL_REF)
4847 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4848
c859cda6 4849#if TARGET_MACHO
ee890fe2 4850 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4851 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4852 {
ee890fe2
SS
4853 operands[1] =
4854 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4855 operands[0]);
4856 if (operands[0] != operands[1])
4857 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4858 return;
4859 }
c859cda6 4860#endif
ee890fe2 4861
fb4d4348
GK
4862 /* If we are to limit the number of things we put in the TOC and
4863 this is a symbol plus a constant we can add in one insn,
4864 just put the symbol in the TOC and add the constant. Don't do
4865 this if reload is in progress. */
4866 if (GET_CODE (operands[1]) == CONST
4867 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4868 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4869 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4870 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4871 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4872 && ! side_effects_p (operands[0]))
4873 {
a4f6c312
SS
4874 rtx sym =
4875 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4876 rtx other = XEXP (XEXP (operands[1], 0), 1);
4877
a9098fd0
GK
4878 sym = force_reg (mode, sym);
4879 if (mode == SImode)
4880 emit_insn (gen_addsi3 (operands[0], sym, other));
4881 else
4882 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4883 return;
4884 }
4885
a9098fd0 4886 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4887
f676971a 4888 if (TARGET_TOC
4d588c14 4889 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4890 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4891 get_pool_constant (XEXP (operands[1], 0)),
4892 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4893 {
ba4828e0 4894 operands[1]
542a8afa 4895 = gen_const_mem (mode,
c4ad648e 4896 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4897 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4898 }
fb4d4348
GK
4899 }
4900 break;
a9098fd0 4901
fb4d4348 4902 case TImode:
76d2b81d
DJ
4903 rs6000_eliminate_indexed_memrefs (operands);
4904
27dc0551
DE
4905 if (TARGET_POWER)
4906 {
4907 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4908 gen_rtvec (2,
4909 gen_rtx_SET (VOIDmode,
4910 operands[0], operands[1]),
4911 gen_rtx_CLOBBER (VOIDmode,
4912 gen_rtx_SCRATCH (SImode)))));
4913 return;
4914 }
fb4d4348
GK
4915 break;
4916
4917 default:
37409796 4918 gcc_unreachable ();
fb4d4348
GK
4919 }
4920
a9098fd0
GK
4921 /* Above, we may have called force_const_mem which may have returned
4922 an invalid address. If we can, fix this up; otherwise, reload will
4923 have to deal with it. */
8f4e6caf
RH
4924 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4925 operands[1] = validize_mem (operands[1]);
a9098fd0 4926
8f4e6caf 4927 emit_set:
fb4d4348
GK
4928 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4929}
4697a36c 4930\f
2858f73a
GK
4931/* Nonzero if we can use a floating-point register to pass this arg. */
4932#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4933 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4934 && (MODE) != SDmode \
2858f73a
GK
4935 && (CUM)->fregno <= FP_ARG_MAX_REG \
4936 && TARGET_HARD_FLOAT && TARGET_FPRS)
4937
4938/* Nonzero if we can use an AltiVec register to pass this arg. */
4939#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4940 (ALTIVEC_VECTOR_MODE (MODE) \
4941 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4942 && TARGET_ALTIVEC_ABI \
83953138 4943 && (NAMED))
2858f73a 4944
c6e8c921
GK
4945/* Return a nonzero value to say to return the function value in
4946 memory, just as large structures are always returned. TYPE will be
4947 the data type of the value, and FNTYPE will be the type of the
4948 function doing the returning, or @code{NULL} for libcalls.
4949
4950 The AIX ABI for the RS/6000 specifies that all structures are
4951 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4952 specifies that structures <= 8 bytes are returned in r3/r4, but a
4953 draft put them in memory, and GCC used to implement the draft
df01da37 4954 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4955 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4956 compatibility can change DRAFT_V4_STRUCT_RET to override the
4957 default, and -m switches get the final word. See
4958 rs6000_override_options for more details.
4959
4960 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4961 long double support is enabled. These values are returned in memory.
4962
4963 int_size_in_bytes returns -1 for variable size objects, which go in
4964 memory always. The cast to unsigned makes -1 > 8. */
4965
4966static bool
586de218 4967rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 4968{
594a51fe
SS
4969 /* In the darwin64 abi, try to use registers for larger structs
4970 if possible. */
0b5383eb 4971 if (rs6000_darwin64_abi
594a51fe 4972 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4973 && int_size_in_bytes (type) > 0)
4974 {
4975 CUMULATIVE_ARGS valcum;
4976 rtx valret;
4977
4978 valcum.words = 0;
4979 valcum.fregno = FP_ARG_MIN_REG;
4980 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4981 /* Do a trial code generation as if this were going to be passed
4982 as an argument; if any part goes in memory, we return NULL. */
4983 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4984 if (valret)
4985 return false;
4986 /* Otherwise fall through to more conventional ABI rules. */
4987 }
594a51fe 4988
c6e8c921 4989 if (AGGREGATE_TYPE_P (type)
df01da37 4990 && (aix_struct_return
c6e8c921
GK
4991 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4992 return true;
b693336b 4993
bada2eb8
DE
4994 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4995 modes only exist for GCC vector types if -maltivec. */
4996 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4997 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4998 return false;
4999
b693336b
PB
5000 /* Return synthetic vectors in memory. */
5001 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5002 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5003 {
5004 static bool warned_for_return_big_vectors = false;
5005 if (!warned_for_return_big_vectors)
5006 {
d4ee4d25 5007 warning (0, "GCC vector returned by reference: "
b693336b
PB
5008 "non-standard ABI extension with no compatibility guarantee");
5009 warned_for_return_big_vectors = true;
5010 }
5011 return true;
5012 }
5013
602ea4d3 5014 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5015 return true;
ad630bef 5016
c6e8c921
GK
5017 return false;
5018}
5019
4697a36c
MM
5020/* Initialize a variable CUM of type CUMULATIVE_ARGS
5021 for a call to a function whose data type is FNTYPE.
5022 For a library call, FNTYPE is 0.
5023
5024 For incoming args we set the number of arguments in the prototype large
1c20ae99 5025 so we never return a PARALLEL. */
4697a36c
MM
5026
5027void
f676971a 5028init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5029 rtx libname ATTRIBUTE_UNUSED, int incoming,
5030 int libcall, int n_named_args)
4697a36c
MM
5031{
5032 static CUMULATIVE_ARGS zero_cumulative;
5033
5034 *cum = zero_cumulative;
5035 cum->words = 0;
5036 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5037 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5038 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5039 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5040 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5041 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5042 cum->stdarg = fntype
5043 && (TYPE_ARG_TYPES (fntype) != 0
5044 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5045 != void_type_node));
4697a36c 5046
0f6937fe
AM
5047 cum->nargs_prototype = 0;
5048 if (incoming || cum->prototype)
5049 cum->nargs_prototype = n_named_args;
4697a36c 5050
a5c76ee6 5051 /* Check for a longcall attribute. */
3eb4e360
AM
5052 if ((!fntype && rs6000_default_long_calls)
5053 || (fntype
5054 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5055 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5056 cum->call_cookie |= CALL_LONG;
6a4cee5f 5057
4697a36c
MM
5058 if (TARGET_DEBUG_ARG)
5059 {
5060 fprintf (stderr, "\ninit_cumulative_args:");
5061 if (fntype)
5062 {
5063 tree ret_type = TREE_TYPE (fntype);
5064 fprintf (stderr, " ret code = %s,",
5065 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5066 }
5067
6a4cee5f
MM
5068 if (cum->call_cookie & CALL_LONG)
5069 fprintf (stderr, " longcall,");
5070
4697a36c
MM
5071 fprintf (stderr, " proto = %d, nargs = %d\n",
5072 cum->prototype, cum->nargs_prototype);
5073 }
f676971a 5074
c4ad648e
AM
5075 if (fntype
5076 && !TARGET_ALTIVEC
5077 && TARGET_ALTIVEC_ABI
5078 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5079 {
c85ce869 5080 error ("cannot return value in vector register because"
c4ad648e 5081 " altivec instructions are disabled, use -maltivec"
c85ce869 5082 " to enable them");
c4ad648e 5083 }
4697a36c
MM
5084}
5085\f
fe984136
RH
5086/* Return true if TYPE must be passed on the stack and not in registers. */
5087
5088static bool
586de218 5089rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5090{
5091 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5092 return must_pass_in_stack_var_size (mode, type);
5093 else
5094 return must_pass_in_stack_var_size_or_pad (mode, type);
5095}
5096
c229cba9
DE
5097/* If defined, a C expression which determines whether, and in which
5098 direction, to pad out an argument with extra space. The value
5099 should be of type `enum direction': either `upward' to pad above
5100 the argument, `downward' to pad below, or `none' to inhibit
5101 padding.
5102
5103 For the AIX ABI structs are always stored left shifted in their
5104 argument slot. */
5105
9ebbca7d 5106enum direction
586de218 5107function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5108{
6e985040
AM
5109#ifndef AGGREGATE_PADDING_FIXED
5110#define AGGREGATE_PADDING_FIXED 0
5111#endif
5112#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5113#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5114#endif
5115
5116 if (!AGGREGATE_PADDING_FIXED)
5117 {
5118 /* GCC used to pass structures of the same size as integer types as
5119 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5120 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5121 passed padded downward, except that -mstrict-align further
5122 muddied the water in that multi-component structures of 2 and 4
5123 bytes in size were passed padded upward.
5124
5125 The following arranges for best compatibility with previous
5126 versions of gcc, but removes the -mstrict-align dependency. */
5127 if (BYTES_BIG_ENDIAN)
5128 {
5129 HOST_WIDE_INT size = 0;
5130
5131 if (mode == BLKmode)
5132 {
5133 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5134 size = int_size_in_bytes (type);
5135 }
5136 else
5137 size = GET_MODE_SIZE (mode);
5138
5139 if (size == 1 || size == 2 || size == 4)
5140 return downward;
5141 }
5142 return upward;
5143 }
5144
5145 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5146 {
5147 if (type != 0 && AGGREGATE_TYPE_P (type))
5148 return upward;
5149 }
c229cba9 5150
d3704c46
KH
5151 /* Fall back to the default. */
5152 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5153}
5154
b6c9286a 5155/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5156 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5157 PARM_BOUNDARY is used for all arguments.
f676971a 5158
84e9ad15
AM
5159 V.4 wants long longs and doubles to be double word aligned. Just
5160 testing the mode size is a boneheaded way to do this as it means
5161 that other types such as complex int are also double word aligned.
5162 However, we're stuck with this because changing the ABI might break
5163 existing library interfaces.
5164
b693336b
PB
5165 Doubleword align SPE vectors.
5166 Quadword align Altivec vectors.
5167 Quadword align large synthetic vector types. */
b6c9286a
MM
5168
5169int
b693336b 5170function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5171{
84e9ad15
AM
5172 if (DEFAULT_ABI == ABI_V4
5173 && (GET_MODE_SIZE (mode) == 8
5174 || (TARGET_HARD_FLOAT
5175 && TARGET_FPRS
7393f7f8 5176 && (mode == TFmode || mode == TDmode))))
4ed78545 5177 return 64;
ad630bef
DE
5178 else if (SPE_VECTOR_MODE (mode)
5179 || (type && TREE_CODE (type) == VECTOR_TYPE
5180 && int_size_in_bytes (type) >= 8
5181 && int_size_in_bytes (type) < 16))
e1f83b4d 5182 return 64;
ad630bef
DE
5183 else if (ALTIVEC_VECTOR_MODE (mode)
5184 || (type && TREE_CODE (type) == VECTOR_TYPE
5185 && int_size_in_bytes (type) >= 16))
0ac081f6 5186 return 128;
0b5383eb
DJ
5187 else if (rs6000_darwin64_abi && mode == BLKmode
5188 && type && TYPE_ALIGN (type) > 64)
5189 return 128;
9ebbca7d 5190 else
b6c9286a 5191 return PARM_BOUNDARY;
b6c9286a 5192}
c53bdcf5 5193
294bd182
AM
5194/* For a function parm of MODE and TYPE, return the starting word in
5195 the parameter area. NWORDS of the parameter area are already used. */
5196
5197static unsigned int
5198rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5199{
5200 unsigned int align;
5201 unsigned int parm_offset;
5202
5203 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5204 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5205 return nwords + (-(parm_offset + nwords) & align);
5206}
5207
c53bdcf5
AM
5208/* Compute the size (in words) of a function argument. */
5209
5210static unsigned long
5211rs6000_arg_size (enum machine_mode mode, tree type)
5212{
5213 unsigned long size;
5214
5215 if (mode != BLKmode)
5216 size = GET_MODE_SIZE (mode);
5217 else
5218 size = int_size_in_bytes (type);
5219
5220 if (TARGET_32BIT)
5221 return (size + 3) >> 2;
5222 else
5223 return (size + 7) >> 3;
5224}
b6c9286a 5225\f
0b5383eb 5226/* Use this to flush pending int fields. */
594a51fe
SS
5227
5228static void
0b5383eb
DJ
5229rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5230 HOST_WIDE_INT bitpos)
594a51fe 5231{
0b5383eb
DJ
5232 unsigned int startbit, endbit;
5233 int intregs, intoffset;
5234 enum machine_mode mode;
594a51fe 5235
0b5383eb
DJ
5236 if (cum->intoffset == -1)
5237 return;
594a51fe 5238
0b5383eb
DJ
5239 intoffset = cum->intoffset;
5240 cum->intoffset = -1;
5241
5242 if (intoffset % BITS_PER_WORD != 0)
5243 {
5244 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5245 MODE_INT, 0);
5246 if (mode == BLKmode)
594a51fe 5247 {
0b5383eb
DJ
5248 /* We couldn't find an appropriate mode, which happens,
5249 e.g., in packed structs when there are 3 bytes to load.
5250 Back intoffset back to the beginning of the word in this
5251 case. */
5252 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5253 }
594a51fe 5254 }
0b5383eb
DJ
5255
5256 startbit = intoffset & -BITS_PER_WORD;
5257 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5258 intregs = (endbit - startbit) / BITS_PER_WORD;
5259 cum->words += intregs;
5260}
5261
5262/* The darwin64 ABI calls for us to recurse down through structs,
5263 looking for elements passed in registers. Unfortunately, we have
5264 to track int register count here also because of misalignments
5265 in powerpc alignment mode. */
5266
5267static void
5268rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5269 tree type,
5270 HOST_WIDE_INT startbitpos)
5271{
5272 tree f;
5273
5274 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5275 if (TREE_CODE (f) == FIELD_DECL)
5276 {
5277 HOST_WIDE_INT bitpos = startbitpos;
5278 tree ftype = TREE_TYPE (f);
70fb00df
AP
5279 enum machine_mode mode;
5280 if (ftype == error_mark_node)
5281 continue;
5282 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5283
5284 if (DECL_SIZE (f) != 0
5285 && host_integerp (bit_position (f), 1))
5286 bitpos += int_bit_position (f);
5287
5288 /* ??? FIXME: else assume zero offset. */
5289
5290 if (TREE_CODE (ftype) == RECORD_TYPE)
5291 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5292 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5293 {
5294 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5295 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5296 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5297 }
5298 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5299 {
5300 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5301 cum->vregno++;
5302 cum->words += 2;
5303 }
5304 else if (cum->intoffset == -1)
5305 cum->intoffset = bitpos;
5306 }
594a51fe
SS
5307}
5308
4697a36c
MM
5309/* Update the data in CUM to advance over an argument
5310 of mode MODE and data type TYPE.
b2d04ecf
AM
5311 (TYPE is null for libcalls where that information may not be available.)
5312
5313 Note that for args passed by reference, function_arg will be called
5314 with MODE and TYPE set to that of the pointer to the arg, not the arg
5315 itself. */
4697a36c
MM
5316
5317void
f676971a 5318function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5319 tree type, int named, int depth)
4697a36c 5320{
0b5383eb
DJ
5321 int size;
5322
594a51fe
SS
5323 /* Only tick off an argument if we're not recursing. */
5324 if (depth == 0)
5325 cum->nargs_prototype--;
4697a36c 5326
ad630bef
DE
5327 if (TARGET_ALTIVEC_ABI
5328 && (ALTIVEC_VECTOR_MODE (mode)
5329 || (type && TREE_CODE (type) == VECTOR_TYPE
5330 && int_size_in_bytes (type) == 16)))
0ac081f6 5331 {
4ed78545
AM
5332 bool stack = false;
5333
2858f73a 5334 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5335 {
6d0ef01e
HP
5336 cum->vregno++;
5337 if (!TARGET_ALTIVEC)
c85ce869 5338 error ("cannot pass argument in vector register because"
6d0ef01e 5339 " altivec instructions are disabled, use -maltivec"
c85ce869 5340 " to enable them");
4ed78545
AM
5341
5342 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5343 even if it is going to be passed in a vector register.
4ed78545
AM
5344 Darwin does the same for variable-argument functions. */
5345 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5346 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5347 stack = true;
6d0ef01e 5348 }
4ed78545
AM
5349 else
5350 stack = true;
5351
5352 if (stack)
c4ad648e 5353 {
a594a19c 5354 int align;
f676971a 5355
2858f73a
GK
5356 /* Vector parameters must be 16-byte aligned. This places
5357 them at 2 mod 4 in terms of words in 32-bit mode, since
5358 the parameter save area starts at offset 24 from the
5359 stack. In 64-bit mode, they just have to start on an
5360 even word, since the parameter save area is 16-byte
5361 aligned. Space for GPRs is reserved even if the argument
5362 will be passed in memory. */
5363 if (TARGET_32BIT)
4ed78545 5364 align = (2 - cum->words) & 3;
2858f73a
GK
5365 else
5366 align = cum->words & 1;
c53bdcf5 5367 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5368
a594a19c
GK
5369 if (TARGET_DEBUG_ARG)
5370 {
f676971a 5371 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5372 cum->words, align);
5373 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5374 cum->nargs_prototype, cum->prototype,
2858f73a 5375 GET_MODE_NAME (mode));
a594a19c
GK
5376 }
5377 }
0ac081f6 5378 }
a4b0320c 5379 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5380 && !cum->stdarg
5381 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5382 cum->sysv_gregno++;
594a51fe
SS
5383
5384 else if (rs6000_darwin64_abi
5385 && mode == BLKmode
0b5383eb
DJ
5386 && TREE_CODE (type) == RECORD_TYPE
5387 && (size = int_size_in_bytes (type)) > 0)
5388 {
5389 /* Variable sized types have size == -1 and are
5390 treated as if consisting entirely of ints.
5391 Pad to 16 byte boundary if needed. */
5392 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5393 && (cum->words % 2) != 0)
5394 cum->words++;
5395 /* For varargs, we can just go up by the size of the struct. */
5396 if (!named)
5397 cum->words += (size + 7) / 8;
5398 else
5399 {
5400 /* It is tempting to say int register count just goes up by
5401 sizeof(type)/8, but this is wrong in a case such as
5402 { int; double; int; } [powerpc alignment]. We have to
5403 grovel through the fields for these too. */
5404 cum->intoffset = 0;
5405 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5406 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5407 size * BITS_PER_UNIT);
5408 }
5409 }
f607bc57 5410 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5411 {
a3170dc6 5412 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5413 && (mode == SFmode || mode == DFmode
7393f7f8 5414 || mode == DDmode || mode == TDmode
602ea4d3 5415 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5416 {
2d83f070
JJ
5417 /* _Decimal128 must use an even/odd register pair. This assumes
5418 that the register number is odd when fregno is odd. */
5419 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5420 cum->fregno++;
5421
5422 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5423 <= FP_ARG_V4_MAX_REG)
602ea4d3 5424 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5425 else
5426 {
602ea4d3 5427 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5428 if (mode == DFmode || mode == TFmode
5429 || mode == DDmode || mode == TDmode)
c4ad648e 5430 cum->words += cum->words & 1;
c53bdcf5 5431 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5432 }
4697a36c 5433 }
4cc833b7
RH
5434 else
5435 {
b2d04ecf 5436 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5437 int gregno = cum->sysv_gregno;
5438
4ed78545
AM
5439 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5440 (r7,r8) or (r9,r10). As does any other 2 word item such
5441 as complex int due to a historical mistake. */
5442 if (n_words == 2)
5443 gregno += (1 - gregno) & 1;
4cc833b7 5444
4ed78545 5445 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5446 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5447 {
4ed78545
AM
5448 /* Long long and SPE vectors are aligned on the stack.
5449 So are other 2 word items such as complex int due to
5450 a historical mistake. */
4cc833b7
RH
5451 if (n_words == 2)
5452 cum->words += cum->words & 1;
5453 cum->words += n_words;
5454 }
4697a36c 5455
4cc833b7
RH
5456 /* Note: continuing to accumulate gregno past when we've started
5457 spilling to the stack indicates the fact that we've started
5458 spilling to the stack to expand_builtin_saveregs. */
5459 cum->sysv_gregno = gregno + n_words;
5460 }
4697a36c 5461
4cc833b7
RH
5462 if (TARGET_DEBUG_ARG)
5463 {
5464 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5465 cum->words, cum->fregno);
5466 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5467 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5468 fprintf (stderr, "mode = %4s, named = %d\n",
5469 GET_MODE_NAME (mode), named);
5470 }
4697a36c
MM
5471 }
5472 else
4cc833b7 5473 {
b2d04ecf 5474 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5475 int start_words = cum->words;
5476 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5477
294bd182 5478 cum->words = align_words + n_words;
4697a36c 5479
ebb109ad 5480 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5481 && mode != SDmode
a3170dc6 5482 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5483 {
5484 /* _Decimal128 must be passed in an even/odd float register pair.
5485 This assumes that the register number is odd when fregno is
5486 odd. */
5487 if (mode == TDmode && (cum->fregno % 2) == 1)
5488 cum->fregno++;
5489 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5490 }
4cc833b7
RH
5491
5492 if (TARGET_DEBUG_ARG)
5493 {
5494 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5495 cum->words, cum->fregno);
5496 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5497 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5498 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5499 named, align_words - start_words, depth);
4cc833b7
RH
5500 }
5501 }
4697a36c 5502}
a6c9bed4 5503
f82f556d
AH
5504static rtx
5505spe_build_register_parallel (enum machine_mode mode, int gregno)
5506{
17caeff2 5507 rtx r1, r3, r5, r7;
f82f556d 5508
37409796 5509 switch (mode)
f82f556d 5510 {
37409796 5511 case DFmode:
4d4447b5 5512 case DDmode:
54b695e7
AH
5513 r1 = gen_rtx_REG (DImode, gregno);
5514 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5515 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5516
5517 case DCmode:
17caeff2 5518 case TFmode:
4d4447b5 5519 case TDmode:
54b695e7
AH
5520 r1 = gen_rtx_REG (DImode, gregno);
5521 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5522 r3 = gen_rtx_REG (DImode, gregno + 2);
5523 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5524 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5525
17caeff2
JM
5526 case TCmode:
5527 r1 = gen_rtx_REG (DImode, gregno);
5528 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5529 r3 = gen_rtx_REG (DImode, gregno + 2);
5530 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5531 r5 = gen_rtx_REG (DImode, gregno + 4);
5532 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5533 r7 = gen_rtx_REG (DImode, gregno + 6);
5534 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5535 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5536
37409796
NS
5537 default:
5538 gcc_unreachable ();
f82f556d 5539 }
f82f556d 5540}
b78d48dd 5541
f82f556d 5542/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5543static rtx
f676971a 5544rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5545 tree type)
a6c9bed4 5546{
f82f556d
AH
5547 int gregno = cum->sysv_gregno;
5548
5549 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5550 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5
PB
5551 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
5552 || mode == DDmode || mode == TDmode
5553 || mode == DCmode || mode == TCmode))
f82f556d 5554 {
b5870bee
AH
5555 int n_words = rs6000_arg_size (mode, type);
5556
f82f556d 5557 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4d4447b5 5558 if (mode == DFmode || mode == DDmode)
b5870bee 5559 gregno += (1 - gregno) & 1;
f82f556d 5560
b5870bee
AH
5561 /* Multi-reg args are not split between registers and stack. */
5562 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5563 return NULL_RTX;
5564
5565 return spe_build_register_parallel (mode, gregno);
5566 }
a6c9bed4
AH
5567 if (cum->stdarg)
5568 {
c53bdcf5 5569 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5570
5571 /* SPE vectors are put in odd registers. */
5572 if (n_words == 2 && (gregno & 1) == 0)
5573 gregno += 1;
5574
5575 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5576 {
5577 rtx r1, r2;
5578 enum machine_mode m = SImode;
5579
5580 r1 = gen_rtx_REG (m, gregno);
5581 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5582 r2 = gen_rtx_REG (m, gregno + 1);
5583 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5584 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5585 }
5586 else
b78d48dd 5587 return NULL_RTX;
a6c9bed4
AH
5588 }
5589 else
5590 {
f82f556d
AH
5591 if (gregno <= GP_ARG_MAX_REG)
5592 return gen_rtx_REG (mode, gregno);
a6c9bed4 5593 else
b78d48dd 5594 return NULL_RTX;
a6c9bed4
AH
5595 }
5596}
5597
0b5383eb
DJ
5598/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5599 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5600
0b5383eb 5601static void
bb8df8a6 5602rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5603 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5604{
0b5383eb
DJ
5605 enum machine_mode mode;
5606 unsigned int regno;
5607 unsigned int startbit, endbit;
5608 int this_regno, intregs, intoffset;
5609 rtx reg;
594a51fe 5610
0b5383eb
DJ
5611 if (cum->intoffset == -1)
5612 return;
5613
5614 intoffset = cum->intoffset;
5615 cum->intoffset = -1;
5616
5617 /* If this is the trailing part of a word, try to only load that
5618 much into the register. Otherwise load the whole register. Note
5619 that in the latter case we may pick up unwanted bits. It's not a
5620 problem at the moment but may wish to revisit. */
5621
5622 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5623 {
0b5383eb
DJ
5624 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5625 MODE_INT, 0);
5626 if (mode == BLKmode)
5627 {
5628 /* We couldn't find an appropriate mode, which happens,
5629 e.g., in packed structs when there are 3 bytes to load.
5630 Back intoffset back to the beginning of the word in this
5631 case. */
5632 intoffset = intoffset & -BITS_PER_WORD;
5633 mode = word_mode;
5634 }
5635 }
5636 else
5637 mode = word_mode;
5638
5639 startbit = intoffset & -BITS_PER_WORD;
5640 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5641 intregs = (endbit - startbit) / BITS_PER_WORD;
5642 this_regno = cum->words + intoffset / BITS_PER_WORD;
5643
5644 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5645 cum->use_stack = 1;
bb8df8a6 5646
0b5383eb
DJ
5647 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5648 if (intregs <= 0)
5649 return;
5650
5651 intoffset /= BITS_PER_UNIT;
5652 do
5653 {
5654 regno = GP_ARG_MIN_REG + this_regno;
5655 reg = gen_rtx_REG (mode, regno);
5656 rvec[(*k)++] =
5657 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5658
5659 this_regno += 1;
5660 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5661 mode = word_mode;
5662 intregs -= 1;
5663 }
5664 while (intregs > 0);
5665}
5666
5667/* Recursive workhorse for the following. */
5668
5669static void
586de218 5670rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5671 HOST_WIDE_INT startbitpos, rtx rvec[],
5672 int *k)
5673{
5674 tree f;
5675
5676 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5677 if (TREE_CODE (f) == FIELD_DECL)
5678 {
5679 HOST_WIDE_INT bitpos = startbitpos;
5680 tree ftype = TREE_TYPE (f);
70fb00df
AP
5681 enum machine_mode mode;
5682 if (ftype == error_mark_node)
5683 continue;
5684 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5685
5686 if (DECL_SIZE (f) != 0
5687 && host_integerp (bit_position (f), 1))
5688 bitpos += int_bit_position (f);
5689
5690 /* ??? FIXME: else assume zero offset. */
5691
5692 if (TREE_CODE (ftype) == RECORD_TYPE)
5693 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5694 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5695 {
0b5383eb
DJ
5696#if 0
5697 switch (mode)
594a51fe 5698 {
0b5383eb
DJ
5699 case SCmode: mode = SFmode; break;
5700 case DCmode: mode = DFmode; break;
5701 case TCmode: mode = TFmode; break;
5702 default: break;
594a51fe 5703 }
0b5383eb
DJ
5704#endif
5705 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5706 rvec[(*k)++]
bb8df8a6 5707 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5708 gen_rtx_REG (mode, cum->fregno++),
5709 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5710 if (mode == TFmode || mode == TDmode)
0b5383eb 5711 cum->fregno++;
594a51fe 5712 }
0b5383eb
DJ
5713 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5714 {
5715 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5716 rvec[(*k)++]
bb8df8a6
EC
5717 = gen_rtx_EXPR_LIST (VOIDmode,
5718 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5719 GEN_INT (bitpos / BITS_PER_UNIT));
5720 }
5721 else if (cum->intoffset == -1)
5722 cum->intoffset = bitpos;
5723 }
5724}
594a51fe 5725
0b5383eb
DJ
5726/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5727 the register(s) to be used for each field and subfield of a struct
5728 being passed by value, along with the offset of where the
5729 register's value may be found in the block. FP fields go in FP
5730 register, vector fields go in vector registers, and everything
bb8df8a6 5731 else goes in int registers, packed as in memory.
8ff40a74 5732
0b5383eb
DJ
5733 This code is also used for function return values. RETVAL indicates
5734 whether this is the case.
8ff40a74 5735
a4d05547 5736 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5737 calling convention. */
594a51fe 5738
0b5383eb 5739static rtx
586de218 5740rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5741 int named, bool retval)
5742{
5743 rtx rvec[FIRST_PSEUDO_REGISTER];
5744 int k = 1, kbase = 1;
5745 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5746 /* This is a copy; modifications are not visible to our caller. */
5747 CUMULATIVE_ARGS copy_cum = *orig_cum;
5748 CUMULATIVE_ARGS *cum = &copy_cum;
5749
5750 /* Pad to 16 byte boundary if needed. */
5751 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5752 && (cum->words % 2) != 0)
5753 cum->words++;
5754
5755 cum->intoffset = 0;
5756 cum->use_stack = 0;
5757 cum->named = named;
5758
5759 /* Put entries into rvec[] for individual FP and vector fields, and
5760 for the chunks of memory that go in int regs. Note we start at
5761 element 1; 0 is reserved for an indication of using memory, and
5762 may or may not be filled in below. */
5763 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5764 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5765
5766 /* If any part of the struct went on the stack put all of it there.
5767 This hack is because the generic code for
5768 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5769 parts of the struct are not at the beginning. */
5770 if (cum->use_stack)
5771 {
5772 if (retval)
5773 return NULL_RTX; /* doesn't go in registers at all */
5774 kbase = 0;
5775 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5776 }
5777 if (k > 1 || cum->use_stack)
5778 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5779 else
5780 return NULL_RTX;
5781}
5782
b78d48dd
FJ
5783/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5784
5785static rtx
ec6376ab 5786rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5787{
ec6376ab
AM
5788 int n_units;
5789 int i, k;
5790 rtx rvec[GP_ARG_NUM_REG + 1];
5791
5792 if (align_words >= GP_ARG_NUM_REG)
5793 return NULL_RTX;
5794
5795 n_units = rs6000_arg_size (mode, type);
5796
5797 /* Optimize the simple case where the arg fits in one gpr, except in
5798 the case of BLKmode due to assign_parms assuming that registers are
5799 BITS_PER_WORD wide. */
5800 if (n_units == 0
5801 || (n_units == 1 && mode != BLKmode))
5802 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5803
5804 k = 0;
5805 if (align_words + n_units > GP_ARG_NUM_REG)
5806 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5807 using a magic NULL_RTX component.
79773478
AM
5808 This is not strictly correct. Only some of the arg belongs in
5809 memory, not all of it. However, the normal scheme using
5810 function_arg_partial_nregs can result in unusual subregs, eg.
5811 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5812 store the whole arg to memory is often more efficient than code
5813 to store pieces, and we know that space is available in the right
5814 place for the whole arg. */
ec6376ab
AM
5815 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5816
5817 i = 0;
5818 do
36a454e1 5819 {
ec6376ab
AM
5820 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5821 rtx off = GEN_INT (i++ * 4);
5822 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5823 }
ec6376ab
AM
5824 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5825
5826 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5827}
5828
4697a36c
MM
5829/* Determine where to put an argument to a function.
5830 Value is zero to push the argument on the stack,
5831 or a hard register in which to store the argument.
5832
5833 MODE is the argument's machine mode.
5834 TYPE is the data type of the argument (as a tree).
5835 This is null for libcalls where that information may
5836 not be available.
5837 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5838 the preceding args and about the function being called. It is
5839 not modified in this routine.
4697a36c
MM
5840 NAMED is nonzero if this argument is a named parameter
5841 (otherwise it is an extra parameter matching an ellipsis).
5842
5843 On RS/6000 the first eight words of non-FP are normally in registers
5844 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5845 Under V.4, the first 8 FP args are in registers.
5846
5847 If this is floating-point and no prototype is specified, we use
5848 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5849 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5850 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5851 doesn't support PARALLEL anyway.
5852
5853 Note that for args passed by reference, function_arg will be called
5854 with MODE and TYPE set to that of the pointer to the arg, not the arg
5855 itself. */
4697a36c 5856
9390387d 5857rtx
f676971a 5858function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5859 tree type, int named)
4697a36c 5860{
4cc833b7 5861 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5862
a4f6c312
SS
5863 /* Return a marker to indicate whether CR1 needs to set or clear the
5864 bit that V.4 uses to say fp args were passed in registers.
5865 Assume that we don't need the marker for software floating point,
5866 or compiler generated library calls. */
4697a36c
MM
5867 if (mode == VOIDmode)
5868 {
f607bc57 5869 if (abi == ABI_V4
b9599e46 5870 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5871 && (cum->stdarg
5872 || (cum->nargs_prototype < 0
5873 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5874 {
a3170dc6
AH
5875 /* For the SPE, we need to crxor CR6 always. */
5876 if (TARGET_SPE_ABI)
5877 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5878 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5879 return GEN_INT (cum->call_cookie
5880 | ((cum->fregno == FP_ARG_MIN_REG)
5881 ? CALL_V4_SET_FP_ARGS
5882 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5883 }
4697a36c 5884
7509c759 5885 return GEN_INT (cum->call_cookie);
4697a36c
MM
5886 }
5887
0b5383eb
DJ
5888 if (rs6000_darwin64_abi && mode == BLKmode
5889 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5890 {
0b5383eb 5891 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5892 if (rslt != NULL_RTX)
5893 return rslt;
5894 /* Else fall through to usual handling. */
5895 }
5896
2858f73a 5897 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5898 if (TARGET_64BIT && ! cum->prototype)
5899 {
c4ad648e
AM
5900 /* Vector parameters get passed in vector register
5901 and also in GPRs or memory, in absence of prototype. */
5902 int align_words;
5903 rtx slot;
5904 align_words = (cum->words + 1) & ~1;
5905
5906 if (align_words >= GP_ARG_NUM_REG)
5907 {
5908 slot = NULL_RTX;
5909 }
5910 else
5911 {
5912 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5913 }
5914 return gen_rtx_PARALLEL (mode,
5915 gen_rtvec (2,
5916 gen_rtx_EXPR_LIST (VOIDmode,
5917 slot, const0_rtx),
5918 gen_rtx_EXPR_LIST (VOIDmode,
5919 gen_rtx_REG (mode, cum->vregno),
5920 const0_rtx)));
c72d6c26
HP
5921 }
5922 else
5923 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5924 else if (TARGET_ALTIVEC_ABI
5925 && (ALTIVEC_VECTOR_MODE (mode)
5926 || (type && TREE_CODE (type) == VECTOR_TYPE
5927 && int_size_in_bytes (type) == 16)))
0ac081f6 5928 {
2858f73a 5929 if (named || abi == ABI_V4)
a594a19c 5930 return NULL_RTX;
0ac081f6 5931 else
a594a19c
GK
5932 {
5933 /* Vector parameters to varargs functions under AIX or Darwin
5934 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5935 int align, align_words, n_words;
5936 enum machine_mode part_mode;
a594a19c
GK
5937
5938 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5939 2 mod 4 in terms of words in 32-bit mode, since the parameter
5940 save area starts at offset 24 from the stack. In 64-bit mode,
5941 they just have to start on an even word, since the parameter
5942 save area is 16-byte aligned. */
5943 if (TARGET_32BIT)
4ed78545 5944 align = (2 - cum->words) & 3;
2858f73a
GK
5945 else
5946 align = cum->words & 1;
a594a19c
GK
5947 align_words = cum->words + align;
5948
5949 /* Out of registers? Memory, then. */
5950 if (align_words >= GP_ARG_NUM_REG)
5951 return NULL_RTX;
ec6376ab
AM
5952
5953 if (TARGET_32BIT && TARGET_POWERPC64)
5954 return rs6000_mixed_function_arg (mode, type, align_words);
5955
2858f73a
GK
5956 /* The vector value goes in GPRs. Only the part of the
5957 value in GPRs is reported here. */
ec6376ab
AM
5958 part_mode = mode;
5959 n_words = rs6000_arg_size (mode, type);
5960 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5961 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5962 is either wholly in GPRs or half in GPRs and half not. */
5963 part_mode = DImode;
ec6376ab
AM
5964
5965 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5966 }
0ac081f6 5967 }
f82f556d
AH
5968 else if (TARGET_SPE_ABI && TARGET_SPE
5969 && (SPE_VECTOR_MODE (mode)
18f63bfa 5970 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5971 || mode == DDmode
17caeff2
JM
5972 || mode == DCmode
5973 || mode == TFmode
7393f7f8 5974 || mode == TDmode
17caeff2 5975 || mode == TCmode))))
a6c9bed4 5976 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5977
f607bc57 5978 else if (abi == ABI_V4)
4697a36c 5979 {
a3170dc6 5980 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5981 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5982 || (mode == TFmode && !TARGET_IEEEQUAD)
5983 || mode == DDmode || mode == TDmode))
4cc833b7 5984 {
2d83f070
JJ
5985 /* _Decimal128 must use an even/odd register pair. This assumes
5986 that the register number is odd when fregno is odd. */
5987 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5988 cum->fregno++;
5989
5990 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5991 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5992 return gen_rtx_REG (mode, cum->fregno);
5993 else
b78d48dd 5994 return NULL_RTX;
4cc833b7
RH
5995 }
5996 else
5997 {
b2d04ecf 5998 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5999 int gregno = cum->sysv_gregno;
6000
4ed78545
AM
6001 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6002 (r7,r8) or (r9,r10). As does any other 2 word item such
6003 as complex int due to a historical mistake. */
6004 if (n_words == 2)
6005 gregno += (1 - gregno) & 1;
4cc833b7 6006
4ed78545 6007 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6008 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6009 return NULL_RTX;
ec6376ab
AM
6010
6011 if (TARGET_32BIT && TARGET_POWERPC64)
6012 return rs6000_mixed_function_arg (mode, type,
6013 gregno - GP_ARG_MIN_REG);
6014 return gen_rtx_REG (mode, gregno);
4cc833b7 6015 }
4697a36c 6016 }
4cc833b7
RH
6017 else
6018 {
294bd182 6019 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6020
2d83f070
JJ
6021 /* _Decimal128 must be passed in an even/odd float register pair.
6022 This assumes that the register number is odd when fregno is odd. */
6023 if (mode == TDmode && (cum->fregno % 2) == 1)
6024 cum->fregno++;
6025
2858f73a 6026 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6027 {
ec6376ab
AM
6028 rtx rvec[GP_ARG_NUM_REG + 1];
6029 rtx r;
6030 int k;
c53bdcf5
AM
6031 bool needs_psave;
6032 enum machine_mode fmode = mode;
c53bdcf5
AM
6033 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6034
6035 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6036 {
c53bdcf5
AM
6037 /* Currently, we only ever need one reg here because complex
6038 doubles are split. */
7393f7f8
BE
6039 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6040 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6041
7393f7f8
BE
6042 /* Long double or _Decimal128 split over regs and memory. */
6043 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6044 }
c53bdcf5
AM
6045
6046 /* Do we also need to pass this arg in the parameter save
6047 area? */
6048 needs_psave = (type
6049 && (cum->nargs_prototype <= 0
6050 || (DEFAULT_ABI == ABI_AIX
de17c25f 6051 && TARGET_XL_COMPAT
c53bdcf5
AM
6052 && align_words >= GP_ARG_NUM_REG)));
6053
6054 if (!needs_psave && mode == fmode)
ec6376ab 6055 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6056
ec6376ab 6057 k = 0;
c53bdcf5
AM
6058 if (needs_psave)
6059 {
ec6376ab 6060 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6061 This piece must come first, before the fprs. */
c53bdcf5
AM
6062 if (align_words < GP_ARG_NUM_REG)
6063 {
6064 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6065
6066 if (align_words + n_words > GP_ARG_NUM_REG
6067 || (TARGET_32BIT && TARGET_POWERPC64))
6068 {
6069 /* If this is partially on the stack, then we only
6070 include the portion actually in registers here. */
6071 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6072 rtx off;
79773478
AM
6073 int i = 0;
6074 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6075 /* Not all of the arg fits in gprs. Say that it
6076 goes in memory too, using a magic NULL_RTX
6077 component. Also see comment in
6078 rs6000_mixed_function_arg for why the normal
6079 function_arg_partial_nregs scheme doesn't work
6080 in this case. */
6081 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6082 const0_rtx);
ec6376ab
AM
6083 do
6084 {
6085 r = gen_rtx_REG (rmode,
6086 GP_ARG_MIN_REG + align_words);
2e6c9641 6087 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6088 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6089 }
6090 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6091 }
6092 else
6093 {
6094 /* The whole arg fits in gprs. */
6095 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6096 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6097 }
c53bdcf5 6098 }
ec6376ab
AM
6099 else
6100 /* It's entirely in memory. */
6101 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6102 }
6103
ec6376ab
AM
6104 /* Describe where this piece goes in the fprs. */
6105 r = gen_rtx_REG (fmode, cum->fregno);
6106 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6107
6108 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6109 }
6110 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6111 {
ec6376ab
AM
6112 if (TARGET_32BIT && TARGET_POWERPC64)
6113 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6114
4eeca74f
AM
6115 if (mode == BLKmode)
6116 mode = Pmode;
6117
b2d04ecf
AM
6118 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6119 }
4cc833b7
RH
6120 else
6121 return NULL_RTX;
4697a36c 6122 }
4697a36c
MM
6123}
6124\f
ec6376ab 6125/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6126 the number of bytes passed in registers. For args passed entirely in
6127 registers or entirely in memory, zero. When an arg is described by a
6128 PARALLEL, perhaps using more than one register type, this function
6129 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6130
78a52f11
RH
6131static int
6132rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6133 tree type, bool named)
4697a36c 6134{
c53bdcf5 6135 int ret = 0;
ec6376ab 6136 int align_words;
c53bdcf5 6137
f607bc57 6138 if (DEFAULT_ABI == ABI_V4)
4697a36c 6139 return 0;
4697a36c 6140
c53bdcf5
AM
6141 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6142 && cum->nargs_prototype >= 0)
6143 return 0;
6144
0b5383eb
DJ
6145 /* In this complicated case we just disable the partial_nregs code. */
6146 if (rs6000_darwin64_abi && mode == BLKmode
6147 && TREE_CODE (type) == RECORD_TYPE
6148 && int_size_in_bytes (type) > 0)
6149 return 0;
6150
294bd182 6151 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6152
79773478
AM
6153 if (USE_FP_FOR_ARG_P (cum, mode, type))
6154 {
fb63c729
AM
6155 /* If we are passing this arg in the fixed parameter save area
6156 (gprs or memory) as well as fprs, then this function should
79773478
AM
6157 return the number of partial bytes passed in the parameter
6158 save area rather than partial bytes passed in fprs. */
6159 if (type
6160 && (cum->nargs_prototype <= 0
6161 || (DEFAULT_ABI == ABI_AIX
6162 && TARGET_XL_COMPAT
6163 && align_words >= GP_ARG_NUM_REG)))
6164 return 0;
6165 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6166 > FP_ARG_MAX_REG + 1)
ac7e839c 6167 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6168 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6169 return 0;
6170 }
6171
ec6376ab
AM
6172 if (align_words < GP_ARG_NUM_REG
6173 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6174 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6175
c53bdcf5 6176 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6177 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6178
c53bdcf5 6179 return ret;
4697a36c
MM
6180}
6181\f
6182/* A C expression that indicates when an argument must be passed by
6183 reference. If nonzero for an argument, a copy of that argument is
6184 made in memory and a pointer to the argument is passed instead of
6185 the argument itself. The pointer is passed in whatever way is
6186 appropriate for passing a pointer to that type.
6187
b2d04ecf
AM
6188 Under V.4, aggregates and long double are passed by reference.
6189
6190 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6191 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6192
6193 As an extension to all ABIs, variable sized types are passed by
6194 reference. */
4697a36c 6195
8cd5a4e0 6196static bool
f676971a 6197rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6198 enum machine_mode mode, const_tree type,
bada2eb8 6199 bool named ATTRIBUTE_UNUSED)
4697a36c 6200{
602ea4d3 6201 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6202 {
6203 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6204 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6205 return 1;
6206 }
6207
6208 if (!type)
6209 return 0;
4697a36c 6210
bada2eb8
DE
6211 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6212 {
6213 if (TARGET_DEBUG_ARG)
6214 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6215 return 1;
6216 }
6217
6218 if (int_size_in_bytes (type) < 0)
6219 {
6220 if (TARGET_DEBUG_ARG)
6221 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6222 return 1;
6223 }
6224
6225 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6226 modes only exist for GCC vector types if -maltivec. */
6227 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6228 {
6229 if (TARGET_DEBUG_ARG)
6230 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6231 return 1;
6232 }
b693336b
PB
6233
6234 /* Pass synthetic vectors in memory. */
bada2eb8 6235 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6236 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6237 {
6238 static bool warned_for_pass_big_vectors = false;
6239 if (TARGET_DEBUG_ARG)
6240 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6241 if (!warned_for_pass_big_vectors)
6242 {
d4ee4d25 6243 warning (0, "GCC vector passed by reference: "
b693336b
PB
6244 "non-standard ABI extension with no compatibility guarantee");
6245 warned_for_pass_big_vectors = true;
6246 }
6247 return 1;
6248 }
6249
b2d04ecf 6250 return 0;
4697a36c 6251}
5985c7a6
FJ
6252
6253static void
2d9db8eb 6254rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6255{
6256 int i;
6257 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6258
6259 if (nregs == 0)
6260 return;
6261
c4ad648e 6262 for (i = 0; i < nregs; i++)
5985c7a6 6263 {
9390387d 6264 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6265 if (reload_completed)
c4ad648e
AM
6266 {
6267 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6268 tem = NULL_RTX;
6269 else
6270 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6271 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6272 }
5985c7a6
FJ
6273 else
6274 tem = replace_equiv_address (tem, XEXP (tem, 0));
6275
37409796 6276 gcc_assert (tem);
5985c7a6
FJ
6277
6278 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6279 }
6280}
4697a36c
MM
6281\f
6282/* Perform any needed actions needed for a function that is receiving a
f676971a 6283 variable number of arguments.
4697a36c
MM
6284
6285 CUM is as above.
6286
6287 MODE and TYPE are the mode and type of the current parameter.
6288
6289 PRETEND_SIZE is a variable that should be set to the amount of stack
6290 that must be pushed by the prolog to pretend that our caller pushed
6291 it.
6292
6293 Normally, this macro will push all remaining incoming registers on the
6294 stack and set PRETEND_SIZE to the length of the registers pushed. */
6295
c6e8c921 6296static void
f676971a 6297setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6298 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6299 int no_rtl)
4697a36c 6300{
4cc833b7
RH
6301 CUMULATIVE_ARGS next_cum;
6302 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6303 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6304 int first_reg_offset;
6305 alias_set_type set;
4697a36c 6306
f31bf321 6307 /* Skip the last named argument. */
d34c5b80 6308 next_cum = *cum;
594a51fe 6309 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6310
f607bc57 6311 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6312 {
5b667039
JJ
6313 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6314
60e2d0ca 6315 if (! no_rtl)
5b667039
JJ
6316 {
6317 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6318 HOST_WIDE_INT offset = 0;
6319
6320 /* Try to optimize the size of the varargs save area.
6321 The ABI requires that ap.reg_save_area is doubleword
6322 aligned, but we don't need to allocate space for all
6323 the bytes, only those to which we actually will save
6324 anything. */
6325 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6326 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6327 if (TARGET_HARD_FLOAT && TARGET_FPRS
6328 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6329 && cfun->va_list_fpr_size)
6330 {
6331 if (gpr_reg_num)
6332 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6333 * UNITS_PER_FP_WORD;
6334 if (cfun->va_list_fpr_size
6335 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6336 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6337 else
6338 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6339 * UNITS_PER_FP_WORD;
6340 }
6341 if (gpr_reg_num)
6342 {
6343 offset = -((first_reg_offset * reg_size) & ~7);
6344 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6345 {
6346 gpr_reg_num = cfun->va_list_gpr_size;
6347 if (reg_size == 4 && (first_reg_offset & 1))
6348 gpr_reg_num++;
6349 }
6350 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6351 }
6352 else if (fpr_size)
6353 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6354 * UNITS_PER_FP_WORD
6355 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6356
5b667039
JJ
6357 if (gpr_size + fpr_size)
6358 {
6359 rtx reg_save_area
6360 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6361 gcc_assert (GET_CODE (reg_save_area) == MEM);
6362 reg_save_area = XEXP (reg_save_area, 0);
6363 if (GET_CODE (reg_save_area) == PLUS)
6364 {
6365 gcc_assert (XEXP (reg_save_area, 0)
6366 == virtual_stack_vars_rtx);
6367 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6368 offset += INTVAL (XEXP (reg_save_area, 1));
6369 }
6370 else
6371 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6372 }
6373
6374 cfun->machine->varargs_save_offset = offset;
6375 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6376 }
4697a36c 6377 }
60e2d0ca 6378 else
4697a36c 6379 {
d34c5b80 6380 first_reg_offset = next_cum.words;
4cc833b7 6381 save_area = virtual_incoming_args_rtx;
4697a36c 6382
fe984136 6383 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6384 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6385 }
4697a36c 6386
dfafc897 6387 set = get_varargs_alias_set ();
9d30f3c1
JJ
6388 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6389 && cfun->va_list_gpr_size)
4cc833b7 6390 {
9d30f3c1
JJ
6391 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6392
6393 if (va_list_gpr_counter_field)
6394 {
6395 /* V4 va_list_gpr_size counts number of registers needed. */
6396 if (nregs > cfun->va_list_gpr_size)
6397 nregs = cfun->va_list_gpr_size;
6398 }
6399 else
6400 {
6401 /* char * va_list instead counts number of bytes needed. */
6402 if (nregs > cfun->va_list_gpr_size / reg_size)
6403 nregs = cfun->va_list_gpr_size / reg_size;
6404 }
6405
dfafc897 6406 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6407 plus_constant (save_area,
13e2e16e
DE
6408 first_reg_offset * reg_size));
6409 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6410 set_mem_alias_set (mem, set);
8ac61af7 6411 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6412
f676971a 6413 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6414 nregs);
4697a36c
MM
6415 }
6416
4697a36c 6417 /* Save FP registers if needed. */
f607bc57 6418 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6419 && TARGET_HARD_FLOAT && TARGET_FPRS
6420 && ! no_rtl
9d30f3c1
JJ
6421 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6422 && cfun->va_list_fpr_size)
4697a36c 6423 {
9d30f3c1 6424 int fregno = next_cum.fregno, nregs;
9ebbca7d 6425 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6426 rtx lab = gen_label_rtx ();
5b667039
JJ
6427 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6428 * UNITS_PER_FP_WORD);
4697a36c 6429
c4ad648e
AM
6430 emit_jump_insn
6431 (gen_rtx_SET (VOIDmode,
6432 pc_rtx,
6433 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6434 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6435 const0_rtx),
39403d82 6436 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6437 pc_rtx)));
6438
9d30f3c1
JJ
6439 for (nregs = 0;
6440 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6441 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6442 {
5496b36f 6443 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6444 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6445 set_mem_alias_set (mem, set);
94ff898d 6446 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6447 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6448 }
4cc833b7
RH
6449
6450 emit_label (lab);
4697a36c 6451 }
4697a36c 6452}
4697a36c 6453
dfafc897 6454/* Create the va_list data type. */
2c4974b7 6455
c35d187f
RH
6456static tree
6457rs6000_build_builtin_va_list (void)
dfafc897 6458{
64c2816f 6459 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6460
9ebbca7d
GK
6461 /* For AIX, prefer 'char *' because that's what the system
6462 header files like. */
f607bc57 6463 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6464 return build_pointer_type (char_type_node);
dfafc897 6465
f1e639b1 6466 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6467 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6468
f676971a 6469 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6470 unsigned_char_type_node);
f676971a 6471 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6472 unsigned_char_type_node);
64c2816f
DT
6473 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6474 every user file. */
6475 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6476 short_unsigned_type_node);
dfafc897
FS
6477 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6478 ptr_type_node);
6479 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6480 ptr_type_node);
6481
9d30f3c1
JJ
6482 va_list_gpr_counter_field = f_gpr;
6483 va_list_fpr_counter_field = f_fpr;
6484
dfafc897
FS
6485 DECL_FIELD_CONTEXT (f_gpr) = record;
6486 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6487 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6488 DECL_FIELD_CONTEXT (f_ovf) = record;
6489 DECL_FIELD_CONTEXT (f_sav) = record;
6490
bab45a51
FS
6491 TREE_CHAIN (record) = type_decl;
6492 TYPE_NAME (record) = type_decl;
dfafc897
FS
6493 TYPE_FIELDS (record) = f_gpr;
6494 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6495 TREE_CHAIN (f_fpr) = f_res;
6496 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6497 TREE_CHAIN (f_ovf) = f_sav;
6498
6499 layout_type (record);
6500
6501 /* The correct type is an array type of one element. */
6502 return build_array_type (record, build_index_type (size_zero_node));
6503}
6504
6505/* Implement va_start. */
6506
d7bd8aeb 6507static void
a2369ed3 6508rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6509{
dfafc897 6510 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6511 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6512 tree gpr, fpr, ovf, sav, t;
2c4974b7 6513
dfafc897 6514 /* Only SVR4 needs something special. */
f607bc57 6515 if (DEFAULT_ABI != ABI_V4)
dfafc897 6516 {
e5faf155 6517 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6518 return;
6519 }
6520
973a648b 6521 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6522 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6523 f_res = TREE_CHAIN (f_fpr);
6524 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6525 f_sav = TREE_CHAIN (f_ovf);
6526
872a65b5 6527 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6528 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6529 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6530 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6531 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6532
6533 /* Count number of gp and fp argument registers used. */
4cc833b7 6534 words = current_function_args_info.words;
987732e0
DE
6535 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6536 GP_ARG_NUM_REG);
6537 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6538 FP_ARG_NUM_REG);
dfafc897
FS
6539
6540 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6541 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6542 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6543 words, n_gpr, n_fpr);
dfafc897 6544
9d30f3c1
JJ
6545 if (cfun->va_list_gpr_size)
6546 {
07beea0d 6547 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6548 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6549 TREE_SIDE_EFFECTS (t) = 1;
6550 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6551 }
58c8adc1 6552
9d30f3c1
JJ
6553 if (cfun->va_list_fpr_size)
6554 {
07beea0d 6555 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6556 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6557 TREE_SIDE_EFFECTS (t) = 1;
6558 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6559 }
dfafc897
FS
6560
6561 /* Find the overflow area. */
6562 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6563 if (words != 0)
5be014d5
AP
6564 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6565 size_int (words * UNITS_PER_WORD));
07beea0d 6566 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6567 TREE_SIDE_EFFECTS (t) = 1;
6568 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6569
9d30f3c1
JJ
6570 /* If there were no va_arg invocations, don't set up the register
6571 save area. */
6572 if (!cfun->va_list_gpr_size
6573 && !cfun->va_list_fpr_size
6574 && n_gpr < GP_ARG_NUM_REG
6575 && n_fpr < FP_ARG_V4_MAX_REG)
6576 return;
6577
dfafc897
FS
6578 /* Find the register save area. */
6579 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6580 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6581 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6582 size_int (cfun->machine->varargs_save_offset));
07beea0d 6583 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6584 TREE_SIDE_EFFECTS (t) = 1;
6585 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6586}
6587
6588/* Implement va_arg. */
6589
23a60a04
JM
6590tree
6591rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6592{
cd3ce9b4
JM
6593 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6594 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6595 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6596 tree lab_false, lab_over, addr;
6597 int align;
6598 tree ptrtype = build_pointer_type (type);
7393f7f8 6599 int regalign = 0;
cd3ce9b4 6600
08b0dc1b
RH
6601 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6602 {
6603 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6604 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6605 }
6606
cd3ce9b4
JM
6607 if (DEFAULT_ABI != ABI_V4)
6608 {
08b0dc1b 6609 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6610 {
6611 tree elem_type = TREE_TYPE (type);
6612 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6613 int elem_size = GET_MODE_SIZE (elem_mode);
6614
6615 if (elem_size < UNITS_PER_WORD)
6616 {
23a60a04 6617 tree real_part, imag_part;
cd3ce9b4
JM
6618 tree post = NULL_TREE;
6619
23a60a04
JM
6620 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6621 &post);
6622 /* Copy the value into a temporary, lest the formal temporary
6623 be reused out from under us. */
6624 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6625 append_to_statement_list (post, pre_p);
6626
23a60a04
JM
6627 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6628 post_p);
cd3ce9b4 6629
47a25a46 6630 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6631 }
6632 }
6633
23a60a04 6634 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6635 }
6636
6637 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6638 f_fpr = TREE_CHAIN (f_gpr);
6639 f_res = TREE_CHAIN (f_fpr);
6640 f_ovf = TREE_CHAIN (f_res);
6641 f_sav = TREE_CHAIN (f_ovf);
6642
872a65b5 6643 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6644 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6645 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6646 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6647 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6648
6649 size = int_size_in_bytes (type);
6650 rsize = (size + 3) / 4;
6651 align = 1;
6652
08b0dc1b 6653 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6654 && (TYPE_MODE (type) == SFmode
6655 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6656 || TYPE_MODE (type) == TFmode
6657 || TYPE_MODE (type) == DDmode
6658 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6659 {
6660 /* FP args go in FP registers, if present. */
cd3ce9b4 6661 reg = fpr;
602ea4d3 6662 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6663 sav_ofs = 8*4;
6664 sav_scale = 8;
602ea4d3 6665 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6666 align = 8;
6667 }
6668 else
6669 {
6670 /* Otherwise into GP registers. */
cd3ce9b4
JM
6671 reg = gpr;
6672 n_reg = rsize;
6673 sav_ofs = 0;
6674 sav_scale = 4;
6675 if (n_reg == 2)
6676 align = 8;
6677 }
6678
6679 /* Pull the value out of the saved registers.... */
6680
6681 lab_over = NULL;
6682 addr = create_tmp_var (ptr_type_node, "addr");
6683 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6684
6685 /* AltiVec vectors never go in registers when -mabi=altivec. */
6686 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6687 align = 16;
6688 else
6689 {
6690 lab_false = create_artificial_label ();
6691 lab_over = create_artificial_label ();
6692
6693 /* Long long and SPE vectors are aligned in the registers.
6694 As are any other 2 gpr item such as complex int due to a
6695 historical mistake. */
6696 u = reg;
602ea4d3 6697 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6698 {
7393f7f8 6699 regalign = 1;
cd3ce9b4 6700 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6701 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6702 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6703 }
7393f7f8
BE
6704 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6705 reg number is 0 for f1, so we want to make it odd. */
6706 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6707 {
6708 regalign = 1;
6709 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6710 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6711 }
cd3ce9b4 6712
95674810 6713 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6714 t = build2 (GE_EXPR, boolean_type_node, u, t);
6715 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6716 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6717 gimplify_and_add (t, pre_p);
6718
6719 t = sav;
6720 if (sav_ofs)
5be014d5 6721 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6722
8fb632eb
ZD
6723 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6724 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6725 u = fold_convert (sizetype, u);
6726 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6727 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6728
07beea0d 6729 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6730 gimplify_and_add (t, pre_p);
6731
6732 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6733 gimplify_and_add (t, pre_p);
6734
6735 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6736 append_to_statement_list (t, pre_p);
6737
7393f7f8 6738 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6739 {
6740 /* Ensure that we don't find any more args in regs.
7393f7f8 6741 Alignment has taken care of for special cases. */
07beea0d 6742 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6743 gimplify_and_add (t, pre_p);
6744 }
6745 }
6746
6747 /* ... otherwise out of the overflow area. */
6748
6749 /* Care for on-stack alignment if needed. */
6750 t = ovf;
6751 if (align != 1)
6752 {
5be014d5
AP
6753 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6754 t = fold_convert (sizetype, t);
4a90aeeb 6755 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6756 size_int (-align));
6757 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6758 }
6759 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6760
07beea0d 6761 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6762 gimplify_and_add (u, pre_p);
6763
5be014d5 6764 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6765 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6766 gimplify_and_add (t, pre_p);
6767
6768 if (lab_over)
6769 {
6770 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6771 append_to_statement_list (t, pre_p);
6772 }
6773
0cfbc62b
JM
6774 if (STRICT_ALIGNMENT
6775 && (TYPE_ALIGN (type)
6776 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6777 {
6778 /* The value (of type complex double, for example) may not be
6779 aligned in memory in the saved registers, so copy via a
6780 temporary. (This is the same code as used for SPARC.) */
6781 tree tmp = create_tmp_var (type, "va_arg_tmp");
6782 tree dest_addr = build_fold_addr_expr (tmp);
6783
5039610b
SL
6784 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6785 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6786
6787 gimplify_and_add (copy, pre_p);
6788 addr = dest_addr;
6789 }
6790
08b0dc1b 6791 addr = fold_convert (ptrtype, addr);
872a65b5 6792 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6793}
6794
0ac081f6
AH
6795/* Builtins. */
6796
58646b77
PB
6797static void
6798def_builtin (int mask, const char *name, tree type, int code)
6799{
96038623 6800 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6801 {
6802 if (rs6000_builtin_decls[code])
6803 abort ();
6804
6805 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6806 add_builtin_function (name, type, code, BUILT_IN_MD,
6807 NULL, NULL_TREE);
58646b77
PB
6808 }
6809}
0ac081f6 6810
24408032
AH
6811/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6812
2212663f 6813static const struct builtin_description bdesc_3arg[] =
24408032
AH
6814{
6815 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6816 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6817 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6818 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6819 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6820 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6821 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6822 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6823 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6824 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6825 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6826 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6827 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6828 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6829 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6830 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6831 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6832 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6833 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6834 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6835 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6836 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6837 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6838
6839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6842 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6843 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6844 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6845 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6846 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6847 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6848 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6849 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6850 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6851 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6852 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6853 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6854
6855 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6856 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6857 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6858 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6859 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6860 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6861 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6862 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6863 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6864};
2212663f 6865
95385cbb
AH
6866/* DST operations: void foo (void *, const int, const char). */
6867
6868static const struct builtin_description bdesc_dst[] =
6869{
6870 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6871 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6872 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6873 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6874
6875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6879};
6880
2212663f 6881/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6882
a3170dc6 6883static struct builtin_description bdesc_2arg[] =
0ac081f6 6884{
f18c054f
DB
6885 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6886 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6887 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6888 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6889 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6890 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6891 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6892 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6893 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6894 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6895 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6896 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6897 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6898 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6899 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6900 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6901 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6902 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6903 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6904 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6905 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6906 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6907 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6908 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6909 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6910 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6911 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6912 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6913 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6914 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6915 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6916 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6917 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6918 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6919 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6920 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6921 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6922 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6923 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6924 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6925 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6926 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6927 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6928 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6929 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6930 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6931 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6932 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6933 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6934 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6935 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6936 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6937 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6938 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6939 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6940 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6941 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6942 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6943 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6944 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6945 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6946 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6947 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6948 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6949 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6950 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6951 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6952 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6953 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6954 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6955 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6956 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6957 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6958 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6959 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6960 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6961 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6962 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6963 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6964 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6965 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6966 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6967 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6968 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6969 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6970 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6971 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6972 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6973 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6974 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6975 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6976 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6977 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6978 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6979 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6980 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6981 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6982 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6983 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6984 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6985 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6986 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6987 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6988 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6989 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6990 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6991 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6992 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6993 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6994 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6995 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6996
58646b77
PB
6997 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6998 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6999 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7000 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7001 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7002 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7003 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7004 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7005 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7006 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7007 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7008 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7009 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7010 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7011 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7012 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7013 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7014 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7015 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7016 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7017 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7018 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7019 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7020 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7021 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7022 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7023 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7024 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7025 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7026 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7027 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7028 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7029 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7030 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7031 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7032 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7033 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7034 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7035 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7036 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7037 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7038 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7039 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7040 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7041 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7042 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7043 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7044 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7045 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7046 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7047 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7048 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7049 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7050 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7051 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7052 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7053 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7054 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7055 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7056 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7057 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7058 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7059 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7060 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7061 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7062 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7063 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7064 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7065 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7066 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7067 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7068 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7069 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7070 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7071 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7072 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7073 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7074 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7075 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7076 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7077 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7078 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7079 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7080 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7081 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7082 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7083 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7084 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7085 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7124
96038623
DE
7125 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7126 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7127 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7128 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7129 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7130 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7131 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7132 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7133 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7134 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7135
a3170dc6
AH
7136 /* Place holder, leave as first spe builtin. */
7137 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7138 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7139 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7140 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7141 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7142 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7143 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7144 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7145 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7146 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7147 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7148 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7149 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7150 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7151 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7152 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7153 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7154 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7155 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7156 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7157 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7158 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7159 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7160 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7161 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7162 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7163 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7164 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7165 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7166 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7167 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7168 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7169 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7170 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7171 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7172 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7173 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7174 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7175 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7176 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7177 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7178 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7179 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7180 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7181 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7182 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7183 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7184 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7185 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7186 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7187 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7188 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7189 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7190 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7191 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7192 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7193 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7194 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7195 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7196 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7197 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7198 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7199 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7200 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7201 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7202 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7203 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7204 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7205 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7206 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7207 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7208 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7209 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7210 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7211 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7212 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7213 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7214 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7215 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7216 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7217 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7218 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7219 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7220 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7221 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7222 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7223 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7224 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7225 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7226 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7227 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7228 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7229 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7230 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7231 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7232 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7233 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7234 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7235 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7236 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7237 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7238 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7239 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7240 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7241 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7242 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7243 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7244 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7245 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7246
7247 /* SPE binary operations expecting a 5-bit unsigned literal. */
7248 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7249
7250 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7251 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7252 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7253 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7254 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7255 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7256 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7257 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7258 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7259 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7260 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7261 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7262 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7263 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7264 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7265 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7266 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7267 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7268 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7269 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7270 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7271 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7272 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7273 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7274 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7275 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7276
7277 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7278 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7279};
7280
7281/* AltiVec predicates. */
7282
7283struct builtin_description_predicates
7284{
7285 const unsigned int mask;
7286 const enum insn_code icode;
7287 const char *opcode;
7288 const char *const name;
7289 const enum rs6000_builtins code;
7290};
7291
7292static const struct builtin_description_predicates bdesc_altivec_preds[] =
7293{
7294 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7295 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7296 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7297 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7298 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7299 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7300 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7301 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7302 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7303 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7304 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7305 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7306 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7307
7308 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7309 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7310 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7311};
24408032 7312
a3170dc6
AH
7313/* SPE predicates. */
7314static struct builtin_description bdesc_spe_predicates[] =
7315{
7316 /* Place-holder. Leave as first. */
7317 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7318 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7319 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7320 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7321 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7322 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7323 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7324 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7325 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7326 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7327 /* Place-holder. Leave as last. */
7328 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7329};
7330
7331/* SPE evsel predicates. */
7332static struct builtin_description bdesc_spe_evsel[] =
7333{
7334 /* Place-holder. Leave as first. */
7335 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7336 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7337 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7338 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7339 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7340 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7341 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7342 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7343 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7344 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7345 /* Place-holder. Leave as last. */
7346 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7347};
7348
96038623
DE
7349/* PAIRED predicates. */
7350static const struct builtin_description bdesc_paired_preds[] =
7351{
7352 /* Place-holder. Leave as first. */
7353 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7354 /* Place-holder. Leave as last. */
7355 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7356};
7357
b6d08ca1 7358/* ABS* operations. */
100c4561
AH
7359
7360static const struct builtin_description bdesc_abs[] =
7361{
7362 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7363 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7364 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7365 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7366 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7367 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7368 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7369};
7370
617e0e1d
DB
7371/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7372 foo (VECa). */
24408032 7373
a3170dc6 7374static struct builtin_description bdesc_1arg[] =
2212663f 7375{
617e0e1d
DB
7376 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7377 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7378 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7379 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7380 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7381 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7382 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7383 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7384 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7385 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7386 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7387 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7388 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7389 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7390 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7391 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7392 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7393
58646b77
PB
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7411 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7412 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7413
a3170dc6
AH
7414 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7415 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7416 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7417 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7418 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7419 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7420 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7421 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7422 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7423 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7424 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7425 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7426 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7427 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7428 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7429 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7430 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7431 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7432 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7433 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7434 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7435 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7436 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7437 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7438 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7439 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7440 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7441 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7442 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7443 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7444
7445 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7446 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7447
7448 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7449 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7450 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7451 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7452 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7453};
7454
7455static rtx
5039610b 7456rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7457{
7458 rtx pat;
5039610b 7459 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7460 rtx op0 = expand_normal (arg0);
2212663f
DB
7461 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7462 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7463
0559cc77
DE
7464 if (icode == CODE_FOR_nothing)
7465 /* Builtin not supported on this processor. */
7466 return 0;
7467
20e26713
AH
7468 /* If we got invalid arguments bail out before generating bad rtl. */
7469 if (arg0 == error_mark_node)
9a171fcd 7470 return const0_rtx;
20e26713 7471
0559cc77
DE
7472 if (icode == CODE_FOR_altivec_vspltisb
7473 || icode == CODE_FOR_altivec_vspltish
7474 || icode == CODE_FOR_altivec_vspltisw
7475 || icode == CODE_FOR_spe_evsplatfi
7476 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7477 {
7478 /* Only allow 5-bit *signed* literals. */
b44140e7 7479 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7480 || INTVAL (op0) > 15
7481 || INTVAL (op0) < -16)
b44140e7
AH
7482 {
7483 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7484 return const0_rtx;
b44140e7 7485 }
b44140e7
AH
7486 }
7487
c62f2db5 7488 if (target == 0
2212663f
DB
7489 || GET_MODE (target) != tmode
7490 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7491 target = gen_reg_rtx (tmode);
7492
7493 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7494 op0 = copy_to_mode_reg (mode0, op0);
7495
7496 pat = GEN_FCN (icode) (target, op0);
7497 if (! pat)
7498 return 0;
7499 emit_insn (pat);
0ac081f6 7500
2212663f
DB
7501 return target;
7502}
ae4b4a02 7503
100c4561 7504static rtx
5039610b 7505altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7506{
7507 rtx pat, scratch1, scratch2;
5039610b 7508 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7509 rtx op0 = expand_normal (arg0);
100c4561
AH
7510 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7511 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7512
7513 /* If we have invalid arguments, bail out before generating bad rtl. */
7514 if (arg0 == error_mark_node)
9a171fcd 7515 return const0_rtx;
100c4561
AH
7516
7517 if (target == 0
7518 || GET_MODE (target) != tmode
7519 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7520 target = gen_reg_rtx (tmode);
7521
7522 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7523 op0 = copy_to_mode_reg (mode0, op0);
7524
7525 scratch1 = gen_reg_rtx (mode0);
7526 scratch2 = gen_reg_rtx (mode0);
7527
7528 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7529 if (! pat)
7530 return 0;
7531 emit_insn (pat);
7532
7533 return target;
7534}
7535
0ac081f6 7536static rtx
5039610b 7537rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7538{
7539 rtx pat;
5039610b
SL
7540 tree arg0 = CALL_EXPR_ARG (exp, 0);
7541 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7542 rtx op0 = expand_normal (arg0);
7543 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7544 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7545 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7546 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7547
0559cc77
DE
7548 if (icode == CODE_FOR_nothing)
7549 /* Builtin not supported on this processor. */
7550 return 0;
7551
20e26713
AH
7552 /* If we got invalid arguments bail out before generating bad rtl. */
7553 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7554 return const0_rtx;
20e26713 7555
0559cc77
DE
7556 if (icode == CODE_FOR_altivec_vcfux
7557 || icode == CODE_FOR_altivec_vcfsx
7558 || icode == CODE_FOR_altivec_vctsxs
7559 || icode == CODE_FOR_altivec_vctuxs
7560 || icode == CODE_FOR_altivec_vspltb
7561 || icode == CODE_FOR_altivec_vsplth
7562 || icode == CODE_FOR_altivec_vspltw
7563 || icode == CODE_FOR_spe_evaddiw
7564 || icode == CODE_FOR_spe_evldd
7565 || icode == CODE_FOR_spe_evldh
7566 || icode == CODE_FOR_spe_evldw
7567 || icode == CODE_FOR_spe_evlhhesplat
7568 || icode == CODE_FOR_spe_evlhhossplat
7569 || icode == CODE_FOR_spe_evlhhousplat
7570 || icode == CODE_FOR_spe_evlwhe
7571 || icode == CODE_FOR_spe_evlwhos
7572 || icode == CODE_FOR_spe_evlwhou
7573 || icode == CODE_FOR_spe_evlwhsplat
7574 || icode == CODE_FOR_spe_evlwwsplat
7575 || icode == CODE_FOR_spe_evrlwi
7576 || icode == CODE_FOR_spe_evslwi
7577 || icode == CODE_FOR_spe_evsrwis
f5119d10 7578 || icode == CODE_FOR_spe_evsubifw
0559cc77 7579 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7580 {
7581 /* Only allow 5-bit unsigned literals. */
8bb418a3 7582 STRIP_NOPS (arg1);
b44140e7
AH
7583 if (TREE_CODE (arg1) != INTEGER_CST
7584 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7585 {
7586 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7587 return const0_rtx;
b44140e7 7588 }
b44140e7
AH
7589 }
7590
c62f2db5 7591 if (target == 0
0ac081f6
AH
7592 || GET_MODE (target) != tmode
7593 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7594 target = gen_reg_rtx (tmode);
7595
7596 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7597 op0 = copy_to_mode_reg (mode0, op0);
7598 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7599 op1 = copy_to_mode_reg (mode1, op1);
7600
7601 pat = GEN_FCN (icode) (target, op0, op1);
7602 if (! pat)
7603 return 0;
7604 emit_insn (pat);
7605
7606 return target;
7607}
6525c0e7 7608
ae4b4a02 7609static rtx
f676971a 7610altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7611 tree exp, rtx target)
ae4b4a02
AH
7612{
7613 rtx pat, scratch;
5039610b
SL
7614 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7615 tree arg0 = CALL_EXPR_ARG (exp, 1);
7616 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7617 rtx op0 = expand_normal (arg0);
7618 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7619 enum machine_mode tmode = SImode;
7620 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7621 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7622 int cr6_form_int;
7623
7624 if (TREE_CODE (cr6_form) != INTEGER_CST)
7625 {
7626 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7627 return const0_rtx;
ae4b4a02
AH
7628 }
7629 else
7630 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7631
37409796 7632 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7633
7634 /* If we have invalid arguments, bail out before generating bad rtl. */
7635 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7636 return const0_rtx;
ae4b4a02
AH
7637
7638 if (target == 0
7639 || GET_MODE (target) != tmode
7640 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7641 target = gen_reg_rtx (tmode);
7642
7643 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7644 op0 = copy_to_mode_reg (mode0, op0);
7645 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7646 op1 = copy_to_mode_reg (mode1, op1);
7647
7648 scratch = gen_reg_rtx (mode0);
7649
7650 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7651 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7652 if (! pat)
7653 return 0;
7654 emit_insn (pat);
7655
7656 /* The vec_any* and vec_all* predicates use the same opcodes for two
7657 different operations, but the bits in CR6 will be different
7658 depending on what information we want. So we have to play tricks
7659 with CR6 to get the right bits out.
7660
7661 If you think this is disgusting, look at the specs for the
7662 AltiVec predicates. */
7663
c4ad648e
AM
7664 switch (cr6_form_int)
7665 {
7666 case 0:
7667 emit_insn (gen_cr6_test_for_zero (target));
7668 break;
7669 case 1:
7670 emit_insn (gen_cr6_test_for_zero_reverse (target));
7671 break;
7672 case 2:
7673 emit_insn (gen_cr6_test_for_lt (target));
7674 break;
7675 case 3:
7676 emit_insn (gen_cr6_test_for_lt_reverse (target));
7677 break;
7678 default:
7679 error ("argument 1 of __builtin_altivec_predicate is out of range");
7680 break;
7681 }
ae4b4a02
AH
7682
7683 return target;
7684}
7685
96038623
DE
7686static rtx
7687paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7688{
7689 rtx pat, addr;
7690 tree arg0 = CALL_EXPR_ARG (exp, 0);
7691 tree arg1 = CALL_EXPR_ARG (exp, 1);
7692 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7693 enum machine_mode mode0 = Pmode;
7694 enum machine_mode mode1 = Pmode;
7695 rtx op0 = expand_normal (arg0);
7696 rtx op1 = expand_normal (arg1);
7697
7698 if (icode == CODE_FOR_nothing)
7699 /* Builtin not supported on this processor. */
7700 return 0;
7701
7702 /* If we got invalid arguments bail out before generating bad rtl. */
7703 if (arg0 == error_mark_node || arg1 == error_mark_node)
7704 return const0_rtx;
7705
7706 if (target == 0
7707 || GET_MODE (target) != tmode
7708 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7709 target = gen_reg_rtx (tmode);
7710
7711 op1 = copy_to_mode_reg (mode1, op1);
7712
7713 if (op0 == const0_rtx)
7714 {
7715 addr = gen_rtx_MEM (tmode, op1);
7716 }
7717 else
7718 {
7719 op0 = copy_to_mode_reg (mode0, op0);
7720 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7721 }
7722
7723 pat = GEN_FCN (icode) (target, addr);
7724
7725 if (! pat)
7726 return 0;
7727 emit_insn (pat);
7728
7729 return target;
7730}
7731
b4a62fa0 7732static rtx
5039610b 7733altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7734{
7735 rtx pat, addr;
5039610b
SL
7736 tree arg0 = CALL_EXPR_ARG (exp, 0);
7737 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7738 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7739 enum machine_mode mode0 = Pmode;
7740 enum machine_mode mode1 = Pmode;
84217346
MD
7741 rtx op0 = expand_normal (arg0);
7742 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7743
7744 if (icode == CODE_FOR_nothing)
7745 /* Builtin not supported on this processor. */
7746 return 0;
7747
7748 /* If we got invalid arguments bail out before generating bad rtl. */
7749 if (arg0 == error_mark_node || arg1 == error_mark_node)
7750 return const0_rtx;
7751
7752 if (target == 0
7753 || GET_MODE (target) != tmode
7754 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7755 target = gen_reg_rtx (tmode);
7756
f676971a 7757 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7758
7759 if (op0 == const0_rtx)
7760 {
7761 addr = gen_rtx_MEM (tmode, op1);
7762 }
7763 else
7764 {
7765 op0 = copy_to_mode_reg (mode0, op0);
7766 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7767 }
7768
7769 pat = GEN_FCN (icode) (target, addr);
7770
7771 if (! pat)
7772 return 0;
7773 emit_insn (pat);
7774
7775 return target;
7776}
7777
61bea3b0 7778static rtx
5039610b 7779spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7780{
5039610b
SL
7781 tree arg0 = CALL_EXPR_ARG (exp, 0);
7782 tree arg1 = CALL_EXPR_ARG (exp, 1);
7783 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7784 rtx op0 = expand_normal (arg0);
7785 rtx op1 = expand_normal (arg1);
7786 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7787 rtx pat;
7788 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7789 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7790 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7791
7792 /* Invalid arguments. Bail before doing anything stoopid! */
7793 if (arg0 == error_mark_node
7794 || arg1 == error_mark_node
7795 || arg2 == error_mark_node)
7796 return const0_rtx;
7797
7798 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7799 op0 = copy_to_mode_reg (mode2, op0);
7800 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7801 op1 = copy_to_mode_reg (mode0, op1);
7802 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7803 op2 = copy_to_mode_reg (mode1, op2);
7804
7805 pat = GEN_FCN (icode) (op1, op2, op0);
7806 if (pat)
7807 emit_insn (pat);
7808 return NULL_RTX;
7809}
7810
96038623
DE
7811static rtx
7812paired_expand_stv_builtin (enum insn_code icode, tree exp)
7813{
7814 tree arg0 = CALL_EXPR_ARG (exp, 0);
7815 tree arg1 = CALL_EXPR_ARG (exp, 1);
7816 tree arg2 = CALL_EXPR_ARG (exp, 2);
7817 rtx op0 = expand_normal (arg0);
7818 rtx op1 = expand_normal (arg1);
7819 rtx op2 = expand_normal (arg2);
7820 rtx pat, addr;
7821 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7822 enum machine_mode mode1 = Pmode;
7823 enum machine_mode mode2 = Pmode;
7824
7825 /* Invalid arguments. Bail before doing anything stoopid! */
7826 if (arg0 == error_mark_node
7827 || arg1 == error_mark_node
7828 || arg2 == error_mark_node)
7829 return const0_rtx;
7830
7831 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7832 op0 = copy_to_mode_reg (tmode, op0);
7833
7834 op2 = copy_to_mode_reg (mode2, op2);
7835
7836 if (op1 == const0_rtx)
7837 {
7838 addr = gen_rtx_MEM (tmode, op2);
7839 }
7840 else
7841 {
7842 op1 = copy_to_mode_reg (mode1, op1);
7843 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7844 }
7845
7846 pat = GEN_FCN (icode) (addr, op0);
7847 if (pat)
7848 emit_insn (pat);
7849 return NULL_RTX;
7850}
7851
6525c0e7 7852static rtx
5039610b 7853altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7854{
5039610b
SL
7855 tree arg0 = CALL_EXPR_ARG (exp, 0);
7856 tree arg1 = CALL_EXPR_ARG (exp, 1);
7857 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7858 rtx op0 = expand_normal (arg0);
7859 rtx op1 = expand_normal (arg1);
7860 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7861 rtx pat, addr;
7862 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7863 enum machine_mode mode1 = Pmode;
7864 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7865
7866 /* Invalid arguments. Bail before doing anything stoopid! */
7867 if (arg0 == error_mark_node
7868 || arg1 == error_mark_node
7869 || arg2 == error_mark_node)
9a171fcd 7870 return const0_rtx;
6525c0e7 7871
b4a62fa0
SB
7872 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7873 op0 = copy_to_mode_reg (tmode, op0);
7874
f676971a 7875 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7876
7877 if (op1 == const0_rtx)
7878 {
7879 addr = gen_rtx_MEM (tmode, op2);
7880 }
7881 else
7882 {
7883 op1 = copy_to_mode_reg (mode1, op1);
7884 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7885 }
6525c0e7 7886
b4a62fa0 7887 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7888 if (pat)
7889 emit_insn (pat);
7890 return NULL_RTX;
7891}
7892
2212663f 7893static rtx
5039610b 7894rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7895{
7896 rtx pat;
5039610b
SL
7897 tree arg0 = CALL_EXPR_ARG (exp, 0);
7898 tree arg1 = CALL_EXPR_ARG (exp, 1);
7899 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7900 rtx op0 = expand_normal (arg0);
7901 rtx op1 = expand_normal (arg1);
7902 rtx op2 = expand_normal (arg2);
2212663f
DB
7903 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7904 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7905 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7906 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7907
774b5662
DE
7908 if (icode == CODE_FOR_nothing)
7909 /* Builtin not supported on this processor. */
7910 return 0;
7911
20e26713
AH
7912 /* If we got invalid arguments bail out before generating bad rtl. */
7913 if (arg0 == error_mark_node
7914 || arg1 == error_mark_node
7915 || arg2 == error_mark_node)
9a171fcd 7916 return const0_rtx;
20e26713 7917
aba5fb01
NS
7918 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7919 || icode == CODE_FOR_altivec_vsldoi_v4si
7920 || icode == CODE_FOR_altivec_vsldoi_v8hi
7921 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7922 {
7923 /* Only allow 4-bit unsigned literals. */
8bb418a3 7924 STRIP_NOPS (arg2);
b44140e7
AH
7925 if (TREE_CODE (arg2) != INTEGER_CST
7926 || TREE_INT_CST_LOW (arg2) & ~0xf)
7927 {
7928 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7929 return const0_rtx;
b44140e7 7930 }
b44140e7
AH
7931 }
7932
c62f2db5 7933 if (target == 0
2212663f
DB
7934 || GET_MODE (target) != tmode
7935 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7936 target = gen_reg_rtx (tmode);
7937
7938 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7939 op0 = copy_to_mode_reg (mode0, op0);
7940 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7941 op1 = copy_to_mode_reg (mode1, op1);
7942 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7943 op2 = copy_to_mode_reg (mode2, op2);
7944
49e39588
RE
7945 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
7946 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
7947 else
7948 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
7949 if (! pat)
7950 return 0;
7951 emit_insn (pat);
7952
7953 return target;
7954}
92898235 7955
3a9b8c7e 7956/* Expand the lvx builtins. */
0ac081f6 7957static rtx
a2369ed3 7958altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7959{
5039610b 7960 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7961 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7962 tree arg0;
7963 enum machine_mode tmode, mode0;
7c3abc73 7964 rtx pat, op0;
3a9b8c7e 7965 enum insn_code icode;
92898235 7966
0ac081f6
AH
7967 switch (fcode)
7968 {
f18c054f 7969 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7970 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7971 break;
f18c054f 7972 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7973 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7974 break;
7975 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7976 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7977 break;
7978 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7979 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7980 break;
7981 default:
7982 *expandedp = false;
7983 return NULL_RTX;
7984 }
0ac081f6 7985
3a9b8c7e 7986 *expandedp = true;
f18c054f 7987
5039610b 7988 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7989 op0 = expand_normal (arg0);
3a9b8c7e
AH
7990 tmode = insn_data[icode].operand[0].mode;
7991 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7992
3a9b8c7e
AH
7993 if (target == 0
7994 || GET_MODE (target) != tmode
7995 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7996 target = gen_reg_rtx (tmode);
24408032 7997
3a9b8c7e
AH
7998 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7999 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8000
3a9b8c7e
AH
8001 pat = GEN_FCN (icode) (target, op0);
8002 if (! pat)
8003 return 0;
8004 emit_insn (pat);
8005 return target;
8006}
f18c054f 8007
3a9b8c7e
AH
8008/* Expand the stvx builtins. */
8009static rtx
f676971a 8010altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8011 bool *expandedp)
3a9b8c7e 8012{
5039610b 8013 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8014 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8015 tree arg0, arg1;
8016 enum machine_mode mode0, mode1;
7c3abc73 8017 rtx pat, op0, op1;
3a9b8c7e 8018 enum insn_code icode;
f18c054f 8019
3a9b8c7e
AH
8020 switch (fcode)
8021 {
8022 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8023 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8024 break;
8025 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8026 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8027 break;
8028 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8029 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8030 break;
8031 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8032 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8033 break;
8034 default:
8035 *expandedp = false;
8036 return NULL_RTX;
8037 }
24408032 8038
5039610b
SL
8039 arg0 = CALL_EXPR_ARG (exp, 0);
8040 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8041 op0 = expand_normal (arg0);
8042 op1 = expand_normal (arg1);
3a9b8c7e
AH
8043 mode0 = insn_data[icode].operand[0].mode;
8044 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8045
3a9b8c7e
AH
8046 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8047 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8048 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8049 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8050
3a9b8c7e
AH
8051 pat = GEN_FCN (icode) (op0, op1);
8052 if (pat)
8053 emit_insn (pat);
f18c054f 8054
3a9b8c7e
AH
8055 *expandedp = true;
8056 return NULL_RTX;
8057}
f18c054f 8058
3a9b8c7e
AH
8059/* Expand the dst builtins. */
8060static rtx
f676971a 8061altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8062 bool *expandedp)
3a9b8c7e 8063{
5039610b 8064 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8065 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8066 tree arg0, arg1, arg2;
8067 enum machine_mode mode0, mode1, mode2;
7c3abc73 8068 rtx pat, op0, op1, op2;
586de218 8069 const struct builtin_description *d;
a3170dc6 8070 size_t i;
f18c054f 8071
3a9b8c7e 8072 *expandedp = false;
f18c054f 8073
3a9b8c7e 8074 /* Handle DST variants. */
586de218 8075 d = bdesc_dst;
3a9b8c7e
AH
8076 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8077 if (d->code == fcode)
8078 {
5039610b
SL
8079 arg0 = CALL_EXPR_ARG (exp, 0);
8080 arg1 = CALL_EXPR_ARG (exp, 1);
8081 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8082 op0 = expand_normal (arg0);
8083 op1 = expand_normal (arg1);
8084 op2 = expand_normal (arg2);
3a9b8c7e
AH
8085 mode0 = insn_data[d->icode].operand[0].mode;
8086 mode1 = insn_data[d->icode].operand[1].mode;
8087 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8088
3a9b8c7e
AH
8089 /* Invalid arguments, bail out before generating bad rtl. */
8090 if (arg0 == error_mark_node
8091 || arg1 == error_mark_node
8092 || arg2 == error_mark_node)
8093 return const0_rtx;
f18c054f 8094
86e7df90 8095 *expandedp = true;
8bb418a3 8096 STRIP_NOPS (arg2);
3a9b8c7e
AH
8097 if (TREE_CODE (arg2) != INTEGER_CST
8098 || TREE_INT_CST_LOW (arg2) & ~0x3)
8099 {
9e637a26 8100 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8101 return const0_rtx;
8102 }
f18c054f 8103
3a9b8c7e 8104 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8105 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8106 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8107 op1 = copy_to_mode_reg (mode1, op1);
24408032 8108
3a9b8c7e
AH
8109 pat = GEN_FCN (d->icode) (op0, op1, op2);
8110 if (pat != 0)
8111 emit_insn (pat);
f18c054f 8112
3a9b8c7e
AH
8113 return NULL_RTX;
8114 }
f18c054f 8115
3a9b8c7e
AH
8116 return NULL_RTX;
8117}
24408032 8118
7a4eca66
DE
8119/* Expand vec_init builtin. */
8120static rtx
5039610b 8121altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8122{
8123 enum machine_mode tmode = TYPE_MODE (type);
8124 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8125 int i, n_elt = GET_MODE_NUNITS (tmode);
8126 rtvec v = rtvec_alloc (n_elt);
8127
8128 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8129 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8130
5039610b 8131 for (i = 0; i < n_elt; ++i)
7a4eca66 8132 {
5039610b 8133 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8134 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8135 }
8136
7a4eca66
DE
8137 if (!target || !register_operand (target, tmode))
8138 target = gen_reg_rtx (tmode);
8139
8140 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8141 return target;
8142}
8143
8144/* Return the integer constant in ARG. Constrain it to be in the range
8145 of the subparts of VEC_TYPE; issue an error if not. */
8146
8147static int
8148get_element_number (tree vec_type, tree arg)
8149{
8150 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8151
8152 if (!host_integerp (arg, 1)
8153 || (elt = tree_low_cst (arg, 1), elt > max))
8154 {
8155 error ("selector must be an integer constant in the range 0..%wi", max);
8156 return 0;
8157 }
8158
8159 return elt;
8160}
8161
8162/* Expand vec_set builtin. */
8163static rtx
5039610b 8164altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8165{
8166 enum machine_mode tmode, mode1;
8167 tree arg0, arg1, arg2;
8168 int elt;
8169 rtx op0, op1;
8170
5039610b
SL
8171 arg0 = CALL_EXPR_ARG (exp, 0);
8172 arg1 = CALL_EXPR_ARG (exp, 1);
8173 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8174
8175 tmode = TYPE_MODE (TREE_TYPE (arg0));
8176 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8177 gcc_assert (VECTOR_MODE_P (tmode));
8178
8179 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8180 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8181 elt = get_element_number (TREE_TYPE (arg0), arg2);
8182
8183 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8184 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8185
8186 op0 = force_reg (tmode, op0);
8187 op1 = force_reg (mode1, op1);
8188
8189 rs6000_expand_vector_set (op0, op1, elt);
8190
8191 return op0;
8192}
8193
8194/* Expand vec_ext builtin. */
8195static rtx
5039610b 8196altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8197{
8198 enum machine_mode tmode, mode0;
8199 tree arg0, arg1;
8200 int elt;
8201 rtx op0;
8202
5039610b
SL
8203 arg0 = CALL_EXPR_ARG (exp, 0);
8204 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8205
84217346 8206 op0 = expand_normal (arg0);
7a4eca66
DE
8207 elt = get_element_number (TREE_TYPE (arg0), arg1);
8208
8209 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8210 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8211 gcc_assert (VECTOR_MODE_P (mode0));
8212
8213 op0 = force_reg (mode0, op0);
8214
8215 if (optimize || !target || !register_operand (target, tmode))
8216 target = gen_reg_rtx (tmode);
8217
8218 rs6000_expand_vector_extract (target, op0, elt);
8219
8220 return target;
8221}
8222
3a9b8c7e
AH
8223/* Expand the builtin in EXP and store the result in TARGET. Store
8224 true in *EXPANDEDP if we found a builtin to expand. */
8225static rtx
a2369ed3 8226altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8227{
586de218
KG
8228 const struct builtin_description *d;
8229 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8230 size_t i;
8231 enum insn_code icode;
5039610b 8232 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8233 tree arg0;
8234 rtx op0, pat;
8235 enum machine_mode tmode, mode0;
3a9b8c7e 8236 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8237
58646b77
PB
8238 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8239 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8240 {
8241 *expandedp = true;
ea40ba9c 8242 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8243 return const0_rtx;
8244 }
8245
3a9b8c7e
AH
8246 target = altivec_expand_ld_builtin (exp, target, expandedp);
8247 if (*expandedp)
8248 return target;
0ac081f6 8249
3a9b8c7e
AH
8250 target = altivec_expand_st_builtin (exp, target, expandedp);
8251 if (*expandedp)
8252 return target;
8253
8254 target = altivec_expand_dst_builtin (exp, target, expandedp);
8255 if (*expandedp)
8256 return target;
8257
8258 *expandedp = true;
95385cbb 8259
3a9b8c7e
AH
8260 switch (fcode)
8261 {
6525c0e7 8262 case ALTIVEC_BUILTIN_STVX:
5039610b 8263 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8264 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8265 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8266 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8267 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8268 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8269 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8270 case ALTIVEC_BUILTIN_STVXL:
5039610b 8271 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8272
95385cbb
AH
8273 case ALTIVEC_BUILTIN_MFVSCR:
8274 icode = CODE_FOR_altivec_mfvscr;
8275 tmode = insn_data[icode].operand[0].mode;
8276
8277 if (target == 0
8278 || GET_MODE (target) != tmode
8279 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8280 target = gen_reg_rtx (tmode);
f676971a 8281
95385cbb 8282 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8283 if (! pat)
8284 return 0;
8285 emit_insn (pat);
95385cbb
AH
8286 return target;
8287
8288 case ALTIVEC_BUILTIN_MTVSCR:
8289 icode = CODE_FOR_altivec_mtvscr;
5039610b 8290 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8291 op0 = expand_normal (arg0);
95385cbb
AH
8292 mode0 = insn_data[icode].operand[0].mode;
8293
8294 /* If we got invalid arguments bail out before generating bad rtl. */
8295 if (arg0 == error_mark_node)
9a171fcd 8296 return const0_rtx;
95385cbb
AH
8297
8298 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8299 op0 = copy_to_mode_reg (mode0, op0);
8300
8301 pat = GEN_FCN (icode) (op0);
8302 if (pat)
8303 emit_insn (pat);
8304 return NULL_RTX;
3a9b8c7e 8305
95385cbb
AH
8306 case ALTIVEC_BUILTIN_DSSALL:
8307 emit_insn (gen_altivec_dssall ());
8308 return NULL_RTX;
8309
8310 case ALTIVEC_BUILTIN_DSS:
8311 icode = CODE_FOR_altivec_dss;
5039610b 8312 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8313 STRIP_NOPS (arg0);
84217346 8314 op0 = expand_normal (arg0);
95385cbb
AH
8315 mode0 = insn_data[icode].operand[0].mode;
8316
8317 /* If we got invalid arguments bail out before generating bad rtl. */
8318 if (arg0 == error_mark_node)
9a171fcd 8319 return const0_rtx;
95385cbb 8320
b44140e7
AH
8321 if (TREE_CODE (arg0) != INTEGER_CST
8322 || TREE_INT_CST_LOW (arg0) & ~0x3)
8323 {
8324 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8325 return const0_rtx;
b44140e7
AH
8326 }
8327
95385cbb
AH
8328 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8329 op0 = copy_to_mode_reg (mode0, op0);
8330
8331 emit_insn (gen_altivec_dss (op0));
0ac081f6 8332 return NULL_RTX;
7a4eca66
DE
8333
8334 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8335 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8336 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8337 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8338 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8339
8340 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8341 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8342 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8343 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8344 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8345
8346 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8347 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8348 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8349 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8350 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8351
8352 default:
8353 break;
8354 /* Fall through. */
0ac081f6 8355 }
24408032 8356
100c4561 8357 /* Expand abs* operations. */
586de218 8358 d = bdesc_abs;
ca7558fc 8359 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8360 if (d->code == fcode)
5039610b 8361 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8362
ae4b4a02 8363 /* Expand the AltiVec predicates. */
586de218 8364 dp = bdesc_altivec_preds;
ca7558fc 8365 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8366 if (dp->code == fcode)
c4ad648e 8367 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8368 exp, target);
ae4b4a02 8369
6525c0e7
AH
8370 /* LV* are funky. We initialized them differently. */
8371 switch (fcode)
8372 {
8373 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8374 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8375 exp, target);
6525c0e7 8376 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8377 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8378 exp, target);
6525c0e7 8379 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8380 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8381 exp, target);
6525c0e7 8382 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8383 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8384 exp, target);
6525c0e7 8385 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8386 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8387 exp, target);
6525c0e7 8388 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8389 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8390 exp, target);
6525c0e7 8391 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8392 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8393 exp, target);
6525c0e7
AH
8394 default:
8395 break;
8396 /* Fall through. */
8397 }
95385cbb 8398
92898235 8399 *expandedp = false;
0ac081f6
AH
8400 return NULL_RTX;
8401}
8402
96038623
DE
8403/* Expand the builtin in EXP and store the result in TARGET. Store
8404 true in *EXPANDEDP if we found a builtin to expand. */
8405static rtx
8406paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8407{
8408 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8409 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8410 const struct builtin_description *d;
96038623
DE
8411 size_t i;
8412
8413 *expandedp = true;
8414
8415 switch (fcode)
8416 {
8417 case PAIRED_BUILTIN_STX:
8418 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8419 case PAIRED_BUILTIN_LX:
8420 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8421 default:
8422 break;
8423 /* Fall through. */
8424 }
8425
8426 /* Expand the paired predicates. */
23a651fc 8427 d = bdesc_paired_preds;
96038623
DE
8428 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8429 if (d->code == fcode)
8430 return paired_expand_predicate_builtin (d->icode, exp, target);
8431
8432 *expandedp = false;
8433 return NULL_RTX;
8434}
8435
a3170dc6
AH
8436/* Binops that need to be initialized manually, but can be expanded
8437 automagically by rs6000_expand_binop_builtin. */
8438static struct builtin_description bdesc_2arg_spe[] =
8439{
8440 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8441 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8442 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8443 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8444 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8445 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8446 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8447 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8448 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8449 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8450 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8451 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8452 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8453 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8454 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8455 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8456 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8457 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8458 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8459 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8460 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8461 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8462};
8463
8464/* Expand the builtin in EXP and store the result in TARGET. Store
8465 true in *EXPANDEDP if we found a builtin to expand.
8466
8467 This expands the SPE builtins that are not simple unary and binary
8468 operations. */
8469static rtx
a2369ed3 8470spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8471{
5039610b 8472 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8473 tree arg1, arg0;
8474 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8475 enum insn_code icode;
8476 enum machine_mode tmode, mode0;
8477 rtx pat, op0;
8478 struct builtin_description *d;
8479 size_t i;
8480
8481 *expandedp = true;
8482
8483 /* Syntax check for a 5-bit unsigned immediate. */
8484 switch (fcode)
8485 {
8486 case SPE_BUILTIN_EVSTDD:
8487 case SPE_BUILTIN_EVSTDH:
8488 case SPE_BUILTIN_EVSTDW:
8489 case SPE_BUILTIN_EVSTWHE:
8490 case SPE_BUILTIN_EVSTWHO:
8491 case SPE_BUILTIN_EVSTWWE:
8492 case SPE_BUILTIN_EVSTWWO:
5039610b 8493 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8494 if (TREE_CODE (arg1) != INTEGER_CST
8495 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8496 {
8497 error ("argument 2 must be a 5-bit unsigned literal");
8498 return const0_rtx;
8499 }
8500 break;
8501 default:
8502 break;
8503 }
8504
00332c9f
AH
8505 /* The evsplat*i instructions are not quite generic. */
8506 switch (fcode)
8507 {
8508 case SPE_BUILTIN_EVSPLATFI:
8509 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8510 exp, target);
00332c9f
AH
8511 case SPE_BUILTIN_EVSPLATI:
8512 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8513 exp, target);
00332c9f
AH
8514 default:
8515 break;
8516 }
8517
a3170dc6
AH
8518 d = (struct builtin_description *) bdesc_2arg_spe;
8519 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8520 if (d->code == fcode)
5039610b 8521 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8522
8523 d = (struct builtin_description *) bdesc_spe_predicates;
8524 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8525 if (d->code == fcode)
5039610b 8526 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8527
8528 d = (struct builtin_description *) bdesc_spe_evsel;
8529 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8530 if (d->code == fcode)
5039610b 8531 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8532
8533 switch (fcode)
8534 {
8535 case SPE_BUILTIN_EVSTDDX:
5039610b 8536 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8537 case SPE_BUILTIN_EVSTDHX:
5039610b 8538 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8539 case SPE_BUILTIN_EVSTDWX:
5039610b 8540 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8541 case SPE_BUILTIN_EVSTWHEX:
5039610b 8542 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8543 case SPE_BUILTIN_EVSTWHOX:
5039610b 8544 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8545 case SPE_BUILTIN_EVSTWWEX:
5039610b 8546 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8547 case SPE_BUILTIN_EVSTWWOX:
5039610b 8548 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8549 case SPE_BUILTIN_EVSTDD:
5039610b 8550 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8551 case SPE_BUILTIN_EVSTDH:
5039610b 8552 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8553 case SPE_BUILTIN_EVSTDW:
5039610b 8554 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8555 case SPE_BUILTIN_EVSTWHE:
5039610b 8556 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8557 case SPE_BUILTIN_EVSTWHO:
5039610b 8558 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8559 case SPE_BUILTIN_EVSTWWE:
5039610b 8560 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8561 case SPE_BUILTIN_EVSTWWO:
5039610b 8562 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8563 case SPE_BUILTIN_MFSPEFSCR:
8564 icode = CODE_FOR_spe_mfspefscr;
8565 tmode = insn_data[icode].operand[0].mode;
8566
8567 if (target == 0
8568 || GET_MODE (target) != tmode
8569 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8570 target = gen_reg_rtx (tmode);
f676971a 8571
a3170dc6
AH
8572 pat = GEN_FCN (icode) (target);
8573 if (! pat)
8574 return 0;
8575 emit_insn (pat);
8576 return target;
8577 case SPE_BUILTIN_MTSPEFSCR:
8578 icode = CODE_FOR_spe_mtspefscr;
5039610b 8579 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8580 op0 = expand_normal (arg0);
a3170dc6
AH
8581 mode0 = insn_data[icode].operand[0].mode;
8582
8583 if (arg0 == error_mark_node)
8584 return const0_rtx;
8585
8586 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8587 op0 = copy_to_mode_reg (mode0, op0);
8588
8589 pat = GEN_FCN (icode) (op0);
8590 if (pat)
8591 emit_insn (pat);
8592 return NULL_RTX;
8593 default:
8594 break;
8595 }
8596
8597 *expandedp = false;
8598 return NULL_RTX;
8599}
8600
96038623
DE
8601static rtx
8602paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8603{
8604 rtx pat, scratch, tmp;
8605 tree form = CALL_EXPR_ARG (exp, 0);
8606 tree arg0 = CALL_EXPR_ARG (exp, 1);
8607 tree arg1 = CALL_EXPR_ARG (exp, 2);
8608 rtx op0 = expand_normal (arg0);
8609 rtx op1 = expand_normal (arg1);
8610 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8611 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8612 int form_int;
8613 enum rtx_code code;
8614
8615 if (TREE_CODE (form) != INTEGER_CST)
8616 {
8617 error ("argument 1 of __builtin_paired_predicate must be a constant");
8618 return const0_rtx;
8619 }
8620 else
8621 form_int = TREE_INT_CST_LOW (form);
8622
8623 gcc_assert (mode0 == mode1);
8624
8625 if (arg0 == error_mark_node || arg1 == error_mark_node)
8626 return const0_rtx;
8627
8628 if (target == 0
8629 || GET_MODE (target) != SImode
8630 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8631 target = gen_reg_rtx (SImode);
8632 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8633 op0 = copy_to_mode_reg (mode0, op0);
8634 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8635 op1 = copy_to_mode_reg (mode1, op1);
8636
8637 scratch = gen_reg_rtx (CCFPmode);
8638
8639 pat = GEN_FCN (icode) (scratch, op0, op1);
8640 if (!pat)
8641 return const0_rtx;
8642
8643 emit_insn (pat);
8644
8645 switch (form_int)
8646 {
8647 /* LT bit. */
8648 case 0:
8649 code = LT;
8650 break;
8651 /* GT bit. */
8652 case 1:
8653 code = GT;
8654 break;
8655 /* EQ bit. */
8656 case 2:
8657 code = EQ;
8658 break;
8659 /* UN bit. */
8660 case 3:
8661 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8662 return target;
8663 default:
8664 error ("argument 1 of __builtin_paired_predicate is out of range");
8665 return const0_rtx;
8666 }
8667
8668 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8669 emit_move_insn (target, tmp);
8670 return target;
8671}
8672
a3170dc6 8673static rtx
5039610b 8674spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8675{
8676 rtx pat, scratch, tmp;
5039610b
SL
8677 tree form = CALL_EXPR_ARG (exp, 0);
8678 tree arg0 = CALL_EXPR_ARG (exp, 1);
8679 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8680 rtx op0 = expand_normal (arg0);
8681 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8682 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8683 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8684 int form_int;
8685 enum rtx_code code;
8686
8687 if (TREE_CODE (form) != INTEGER_CST)
8688 {
8689 error ("argument 1 of __builtin_spe_predicate must be a constant");
8690 return const0_rtx;
8691 }
8692 else
8693 form_int = TREE_INT_CST_LOW (form);
8694
37409796 8695 gcc_assert (mode0 == mode1);
a3170dc6
AH
8696
8697 if (arg0 == error_mark_node || arg1 == error_mark_node)
8698 return const0_rtx;
8699
8700 if (target == 0
8701 || GET_MODE (target) != SImode
8702 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8703 target = gen_reg_rtx (SImode);
8704
8705 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8706 op0 = copy_to_mode_reg (mode0, op0);
8707 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8708 op1 = copy_to_mode_reg (mode1, op1);
8709
8710 scratch = gen_reg_rtx (CCmode);
8711
8712 pat = GEN_FCN (icode) (scratch, op0, op1);
8713 if (! pat)
8714 return const0_rtx;
8715 emit_insn (pat);
8716
8717 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8718 _lower_. We use one compare, but look in different bits of the
8719 CR for each variant.
8720
8721 There are 2 elements in each SPE simd type (upper/lower). The CR
8722 bits are set as follows:
8723
8724 BIT0 | BIT 1 | BIT 2 | BIT 3
8725 U | L | (U | L) | (U & L)
8726
8727 So, for an "all" relationship, BIT 3 would be set.
8728 For an "any" relationship, BIT 2 would be set. Etc.
8729
8730 Following traditional nomenclature, these bits map to:
8731
8732 BIT0 | BIT 1 | BIT 2 | BIT 3
8733 LT | GT | EQ | OV
8734
8735 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8736 */
8737
8738 switch (form_int)
8739 {
8740 /* All variant. OV bit. */
8741 case 0:
8742 /* We need to get to the OV bit, which is the ORDERED bit. We
8743 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8744 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8745 So let's just use another pattern. */
8746 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8747 return target;
8748 /* Any variant. EQ bit. */
8749 case 1:
8750 code = EQ;
8751 break;
8752 /* Upper variant. LT bit. */
8753 case 2:
8754 code = LT;
8755 break;
8756 /* Lower variant. GT bit. */
8757 case 3:
8758 code = GT;
8759 break;
8760 default:
8761 error ("argument 1 of __builtin_spe_predicate is out of range");
8762 return const0_rtx;
8763 }
8764
8765 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8766 emit_move_insn (target, tmp);
8767
8768 return target;
8769}
8770
8771/* The evsel builtins look like this:
8772
8773 e = __builtin_spe_evsel_OP (a, b, c, d);
8774
8775 and work like this:
8776
8777 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8778 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8779*/
8780
8781static rtx
5039610b 8782spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8783{
8784 rtx pat, scratch;
5039610b
SL
8785 tree arg0 = CALL_EXPR_ARG (exp, 0);
8786 tree arg1 = CALL_EXPR_ARG (exp, 1);
8787 tree arg2 = CALL_EXPR_ARG (exp, 2);
8788 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8789 rtx op0 = expand_normal (arg0);
8790 rtx op1 = expand_normal (arg1);
8791 rtx op2 = expand_normal (arg2);
8792 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8793 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8794 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8795
37409796 8796 gcc_assert (mode0 == mode1);
a3170dc6
AH
8797
8798 if (arg0 == error_mark_node || arg1 == error_mark_node
8799 || arg2 == error_mark_node || arg3 == error_mark_node)
8800 return const0_rtx;
8801
8802 if (target == 0
8803 || GET_MODE (target) != mode0
8804 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8805 target = gen_reg_rtx (mode0);
8806
8807 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8808 op0 = copy_to_mode_reg (mode0, op0);
8809 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8810 op1 = copy_to_mode_reg (mode0, op1);
8811 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8812 op2 = copy_to_mode_reg (mode0, op2);
8813 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8814 op3 = copy_to_mode_reg (mode0, op3);
8815
8816 /* Generate the compare. */
8817 scratch = gen_reg_rtx (CCmode);
8818 pat = GEN_FCN (icode) (scratch, op0, op1);
8819 if (! pat)
8820 return const0_rtx;
8821 emit_insn (pat);
8822
8823 if (mode0 == V2SImode)
8824 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8825 else
8826 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8827
8828 return target;
8829}
8830
0ac081f6
AH
8831/* Expand an expression EXP that calls a built-in function,
8832 with result going to TARGET if that's convenient
8833 (and in mode MODE if that's convenient).
8834 SUBTARGET may be used as the target for computing one of EXP's operands.
8835 IGNORE is nonzero if the value is to be ignored. */
8836
8837static rtx
a2369ed3 8838rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8839 enum machine_mode mode ATTRIBUTE_UNUSED,
8840 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8841{
5039610b 8842 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8843 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8844 const struct builtin_description *d;
92898235
AH
8845 size_t i;
8846 rtx ret;
8847 bool success;
f676971a 8848
9c78b944
DE
8849 if (fcode == RS6000_BUILTIN_RECIP)
8850 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8851
8852 if (fcode == RS6000_BUILTIN_RECIPF)
8853 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8854
8855 if (fcode == RS6000_BUILTIN_RSQRTF)
8856 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8857
7ccf35ed
DN
8858 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8859 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8860 {
8861 int icode = (int) CODE_FOR_altivec_lvsr;
8862 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8863 enum machine_mode mode = insn_data[icode].operand[1].mode;
8864 tree arg;
8865 rtx op, addr, pat;
8866
37409796 8867 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8868
5039610b 8869 arg = CALL_EXPR_ARG (exp, 0);
37409796 8870 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8871 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8872 addr = memory_address (mode, op);
8873 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8874 op = addr;
8875 else
8876 {
8877 /* For the load case need to negate the address. */
8878 op = gen_reg_rtx (GET_MODE (addr));
8879 emit_insn (gen_rtx_SET (VOIDmode, op,
8880 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8881 }
7ccf35ed
DN
8882 op = gen_rtx_MEM (mode, op);
8883
8884 if (target == 0
8885 || GET_MODE (target) != tmode
8886 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8887 target = gen_reg_rtx (tmode);
8888
8889 /*pat = gen_altivec_lvsr (target, op);*/
8890 pat = GEN_FCN (icode) (target, op);
8891 if (!pat)
8892 return 0;
8893 emit_insn (pat);
8894
8895 return target;
8896 }
5039610b
SL
8897
8898 /* FIXME: There's got to be a nicer way to handle this case than
8899 constructing a new CALL_EXPR. */
f57d17f1
TM
8900 if (fcode == ALTIVEC_BUILTIN_VCFUX
8901 || fcode == ALTIVEC_BUILTIN_VCFSX)
8902 {
5039610b
SL
8903 if (call_expr_nargs (exp) == 1)
8904 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8905 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8906 }
7ccf35ed 8907
0ac081f6 8908 if (TARGET_ALTIVEC)
92898235
AH
8909 {
8910 ret = altivec_expand_builtin (exp, target, &success);
8911
a3170dc6
AH
8912 if (success)
8913 return ret;
8914 }
8915 if (TARGET_SPE)
8916 {
8917 ret = spe_expand_builtin (exp, target, &success);
8918
92898235
AH
8919 if (success)
8920 return ret;
8921 }
96038623
DE
8922 if (TARGET_PAIRED_FLOAT)
8923 {
8924 ret = paired_expand_builtin (exp, target, &success);
8925
8926 if (success)
8927 return ret;
8928 }
92898235 8929
96038623 8930 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 8931
37409796
NS
8932 /* Handle simple unary operations. */
8933 d = (struct builtin_description *) bdesc_1arg;
8934 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8935 if (d->code == fcode)
5039610b 8936 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8937
37409796
NS
8938 /* Handle simple binary operations. */
8939 d = (struct builtin_description *) bdesc_2arg;
8940 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8941 if (d->code == fcode)
5039610b 8942 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8943
37409796 8944 /* Handle simple ternary operations. */
586de218 8945 d = bdesc_3arg;
37409796
NS
8946 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8947 if (d->code == fcode)
5039610b 8948 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8949
37409796 8950 gcc_unreachable ();
0ac081f6
AH
8951}
8952
7c62e993
PB
8953static tree
8954build_opaque_vector_type (tree node, int nunits)
8955{
8956 node = copy_node (node);
8957 TYPE_MAIN_VARIANT (node) = node;
8958 return build_vector_type (node, nunits);
8959}
8960
0ac081f6 8961static void
863d938c 8962rs6000_init_builtins (void)
0ac081f6 8963{
4a5eab38
PB
8964 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8965 V2SF_type_node = build_vector_type (float_type_node, 2);
8966 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8967 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8968 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8969 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8970 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8971
8972 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8973 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8974 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8975
7c62e993
PB
8976 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8977 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8978 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8979 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8980
8bb418a3
ZL
8981 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8982 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8983 'vector unsigned short'. */
8984
8dd16ecc
NS
8985 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8986 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8987 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8988 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8989
58646b77
PB
8990 long_integer_type_internal_node = long_integer_type_node;
8991 long_unsigned_type_internal_node = long_unsigned_type_node;
8992 intQI_type_internal_node = intQI_type_node;
8993 uintQI_type_internal_node = unsigned_intQI_type_node;
8994 intHI_type_internal_node = intHI_type_node;
8995 uintHI_type_internal_node = unsigned_intHI_type_node;
8996 intSI_type_internal_node = intSI_type_node;
8997 uintSI_type_internal_node = unsigned_intSI_type_node;
8998 float_type_internal_node = float_type_node;
8999 void_type_internal_node = void_type_node;
9000
8bb418a3
ZL
9001 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9002 get_identifier ("__bool char"),
9003 bool_char_type_node));
9004 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9005 get_identifier ("__bool short"),
9006 bool_short_type_node));
9007 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9008 get_identifier ("__bool int"),
9009 bool_int_type_node));
9010 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9011 get_identifier ("__pixel"),
9012 pixel_type_node));
9013
4a5eab38
PB
9014 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9015 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9016 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9017 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9018
9019 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9020 get_identifier ("__vector unsigned char"),
9021 unsigned_V16QI_type_node));
9022 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9023 get_identifier ("__vector signed char"),
9024 V16QI_type_node));
9025 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9026 get_identifier ("__vector __bool char"),
9027 bool_V16QI_type_node));
9028
9029 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9030 get_identifier ("__vector unsigned short"),
9031 unsigned_V8HI_type_node));
9032 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9033 get_identifier ("__vector signed short"),
9034 V8HI_type_node));
9035 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9036 get_identifier ("__vector __bool short"),
9037 bool_V8HI_type_node));
9038
9039 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9040 get_identifier ("__vector unsigned int"),
9041 unsigned_V4SI_type_node));
9042 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9043 get_identifier ("__vector signed int"),
9044 V4SI_type_node));
9045 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9046 get_identifier ("__vector __bool int"),
9047 bool_V4SI_type_node));
9048
9049 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9050 get_identifier ("__vector float"),
9051 V4SF_type_node));
9052 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9053 get_identifier ("__vector __pixel"),
9054 pixel_V8HI_type_node));
9055
96038623
DE
9056 if (TARGET_PAIRED_FLOAT)
9057 paired_init_builtins ();
a3170dc6 9058 if (TARGET_SPE)
3fdaa45a 9059 spe_init_builtins ();
0ac081f6
AH
9060 if (TARGET_ALTIVEC)
9061 altivec_init_builtins ();
96038623 9062 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9063 rs6000_common_init_builtins ();
9c78b944
DE
9064 if (TARGET_PPC_GFXOPT)
9065 {
9066 tree ftype = build_function_type_list (float_type_node,
9067 float_type_node,
9068 float_type_node,
9069 NULL_TREE);
9070 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9071 RS6000_BUILTIN_RECIPF);
9072
9073 ftype = build_function_type_list (float_type_node,
9074 float_type_node,
9075 NULL_TREE);
9076 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9077 RS6000_BUILTIN_RSQRTF);
9078 }
9079 if (TARGET_POPCNTB)
9080 {
9081 tree ftype = build_function_type_list (double_type_node,
9082 double_type_node,
9083 double_type_node,
9084 NULL_TREE);
9085 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9086 RS6000_BUILTIN_RECIP);
9087
9088 }
69ca3549
DE
9089
9090#if TARGET_XCOFF
9091 /* AIX libm provides clog as __clog. */
9092 if (built_in_decls [BUILT_IN_CLOG])
9093 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9094#endif
0ac081f6
AH
9095}
9096
a3170dc6
AH
9097/* Search through a set of builtins and enable the mask bits.
9098 DESC is an array of builtins.
b6d08ca1 9099 SIZE is the total number of builtins.
a3170dc6
AH
9100 START is the builtin enum at which to start.
9101 END is the builtin enum at which to end. */
0ac081f6 9102static void
a2369ed3 9103enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9104 enum rs6000_builtins start,
a2369ed3 9105 enum rs6000_builtins end)
a3170dc6
AH
9106{
9107 int i;
9108
9109 for (i = 0; i < size; ++i)
9110 if (desc[i].code == start)
9111 break;
9112
9113 if (i == size)
9114 return;
9115
9116 for (; i < size; ++i)
9117 {
9118 /* Flip all the bits on. */
9119 desc[i].mask = target_flags;
9120 if (desc[i].code == end)
9121 break;
9122 }
9123}
9124
9125static void
863d938c 9126spe_init_builtins (void)
0ac081f6 9127{
a3170dc6
AH
9128 tree endlink = void_list_node;
9129 tree puint_type_node = build_pointer_type (unsigned_type_node);
9130 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9131 struct builtin_description *d;
0ac081f6
AH
9132 size_t i;
9133
a3170dc6
AH
9134 tree v2si_ftype_4_v2si
9135 = build_function_type
3fdaa45a
AH
9136 (opaque_V2SI_type_node,
9137 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9138 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9139 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9140 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9141 endlink)))));
9142
9143 tree v2sf_ftype_4_v2sf
9144 = build_function_type
3fdaa45a
AH
9145 (opaque_V2SF_type_node,
9146 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9147 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9148 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9149 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9150 endlink)))));
9151
9152 tree int_ftype_int_v2si_v2si
9153 = build_function_type
9154 (integer_type_node,
9155 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9156 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9157 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9158 endlink))));
9159
9160 tree int_ftype_int_v2sf_v2sf
9161 = build_function_type
9162 (integer_type_node,
9163 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9164 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9165 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9166 endlink))));
9167
9168 tree void_ftype_v2si_puint_int
9169 = build_function_type (void_type_node,
3fdaa45a 9170 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9171 tree_cons (NULL_TREE, puint_type_node,
9172 tree_cons (NULL_TREE,
9173 integer_type_node,
9174 endlink))));
9175
9176 tree void_ftype_v2si_puint_char
9177 = build_function_type (void_type_node,
3fdaa45a 9178 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9179 tree_cons (NULL_TREE, puint_type_node,
9180 tree_cons (NULL_TREE,
9181 char_type_node,
9182 endlink))));
9183
9184 tree void_ftype_v2si_pv2si_int
9185 = build_function_type (void_type_node,
3fdaa45a 9186 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9187 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9188 tree_cons (NULL_TREE,
9189 integer_type_node,
9190 endlink))));
9191
9192 tree void_ftype_v2si_pv2si_char
9193 = build_function_type (void_type_node,
3fdaa45a 9194 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9195 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9196 tree_cons (NULL_TREE,
9197 char_type_node,
9198 endlink))));
9199
9200 tree void_ftype_int
9201 = build_function_type (void_type_node,
9202 tree_cons (NULL_TREE, integer_type_node, endlink));
9203
9204 tree int_ftype_void
36e8d515 9205 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9206
9207 tree v2si_ftype_pv2si_int
3fdaa45a 9208 = build_function_type (opaque_V2SI_type_node,
6035d635 9209 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9210 tree_cons (NULL_TREE, integer_type_node,
9211 endlink)));
9212
9213 tree v2si_ftype_puint_int
3fdaa45a 9214 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9215 tree_cons (NULL_TREE, puint_type_node,
9216 tree_cons (NULL_TREE, integer_type_node,
9217 endlink)));
9218
9219 tree v2si_ftype_pushort_int
3fdaa45a 9220 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9221 tree_cons (NULL_TREE, pushort_type_node,
9222 tree_cons (NULL_TREE, integer_type_node,
9223 endlink)));
9224
00332c9f
AH
9225 tree v2si_ftype_signed_char
9226 = build_function_type (opaque_V2SI_type_node,
9227 tree_cons (NULL_TREE, signed_char_type_node,
9228 endlink));
9229
a3170dc6
AH
9230 /* The initialization of the simple binary and unary builtins is
9231 done in rs6000_common_init_builtins, but we have to enable the
9232 mask bits here manually because we have run out of `target_flags'
9233 bits. We really need to redesign this mask business. */
9234
9235 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9236 ARRAY_SIZE (bdesc_2arg),
9237 SPE_BUILTIN_EVADDW,
9238 SPE_BUILTIN_EVXOR);
9239 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9240 ARRAY_SIZE (bdesc_1arg),
9241 SPE_BUILTIN_EVABS,
9242 SPE_BUILTIN_EVSUBFUSIAAW);
9243 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9244 ARRAY_SIZE (bdesc_spe_predicates),
9245 SPE_BUILTIN_EVCMPEQ,
9246 SPE_BUILTIN_EVFSTSTLT);
9247 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9248 ARRAY_SIZE (bdesc_spe_evsel),
9249 SPE_BUILTIN_EVSEL_CMPGTS,
9250 SPE_BUILTIN_EVSEL_FSTSTEQ);
9251
36252949
AH
9252 (*lang_hooks.decls.pushdecl)
9253 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9254 opaque_V2SI_type_node));
9255
a3170dc6 9256 /* Initialize irregular SPE builtins. */
f676971a 9257
a3170dc6
AH
9258 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9259 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9260 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9261 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9262 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9263 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9264 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9265 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9266 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9267 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9268 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9269 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9270 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9271 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9272 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9273 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9274 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9275 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9276
9277 /* Loads. */
9278 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9279 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9280 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9281 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9282 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9283 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9284 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9285 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9286 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9287 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9288 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9289 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9290 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9291 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9292 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9293 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9294 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9295 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9296 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9297 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9298 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9299 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9300
9301 /* Predicates. */
9302 d = (struct builtin_description *) bdesc_spe_predicates;
9303 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9304 {
9305 tree type;
9306
9307 switch (insn_data[d->icode].operand[1].mode)
9308 {
9309 case V2SImode:
9310 type = int_ftype_int_v2si_v2si;
9311 break;
9312 case V2SFmode:
9313 type = int_ftype_int_v2sf_v2sf;
9314 break;
9315 default:
37409796 9316 gcc_unreachable ();
a3170dc6
AH
9317 }
9318
9319 def_builtin (d->mask, d->name, type, d->code);
9320 }
9321
9322 /* Evsel predicates. */
9323 d = (struct builtin_description *) bdesc_spe_evsel;
9324 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9325 {
9326 tree type;
9327
9328 switch (insn_data[d->icode].operand[1].mode)
9329 {
9330 case V2SImode:
9331 type = v2si_ftype_4_v2si;
9332 break;
9333 case V2SFmode:
9334 type = v2sf_ftype_4_v2sf;
9335 break;
9336 default:
37409796 9337 gcc_unreachable ();
a3170dc6
AH
9338 }
9339
9340 def_builtin (d->mask, d->name, type, d->code);
9341 }
9342}
9343
96038623
DE
9344static void
9345paired_init_builtins (void)
9346{
23a651fc 9347 const struct builtin_description *d;
96038623
DE
9348 size_t i;
9349 tree endlink = void_list_node;
9350
9351 tree int_ftype_int_v2sf_v2sf
9352 = build_function_type
9353 (integer_type_node,
9354 tree_cons (NULL_TREE, integer_type_node,
9355 tree_cons (NULL_TREE, V2SF_type_node,
9356 tree_cons (NULL_TREE, V2SF_type_node,
9357 endlink))));
9358 tree pcfloat_type_node =
9359 build_pointer_type (build_qualified_type
9360 (float_type_node, TYPE_QUAL_CONST));
9361
9362 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9363 long_integer_type_node,
9364 pcfloat_type_node,
9365 NULL_TREE);
9366 tree void_ftype_v2sf_long_pcfloat =
9367 build_function_type_list (void_type_node,
9368 V2SF_type_node,
9369 long_integer_type_node,
9370 pcfloat_type_node,
9371 NULL_TREE);
9372
9373
9374 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9375 PAIRED_BUILTIN_LX);
9376
9377
9378 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9379 PAIRED_BUILTIN_STX);
9380
9381 /* Predicates. */
23a651fc 9382 d = bdesc_paired_preds;
96038623
DE
9383 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9384 {
9385 tree type;
9386
9387 switch (insn_data[d->icode].operand[1].mode)
9388 {
9389 case V2SFmode:
9390 type = int_ftype_int_v2sf_v2sf;
9391 break;
9392 default:
9393 gcc_unreachable ();
9394 }
9395
9396 def_builtin (d->mask, d->name, type, d->code);
9397 }
9398}
9399
a3170dc6 9400static void
863d938c 9401altivec_init_builtins (void)
a3170dc6 9402{
586de218
KG
9403 const struct builtin_description *d;
9404 const struct builtin_description_predicates *dp;
a3170dc6 9405 size_t i;
7a4eca66
DE
9406 tree ftype;
9407
a3170dc6
AH
9408 tree pfloat_type_node = build_pointer_type (float_type_node);
9409 tree pint_type_node = build_pointer_type (integer_type_node);
9410 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9411 tree pchar_type_node = build_pointer_type (char_type_node);
9412
9413 tree pvoid_type_node = build_pointer_type (void_type_node);
9414
0dbc3651
ZW
9415 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9416 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9417 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9418 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9419
9420 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9421
58646b77
PB
9422 tree int_ftype_opaque
9423 = build_function_type_list (integer_type_node,
9424 opaque_V4SI_type_node, NULL_TREE);
9425
9426 tree opaque_ftype_opaque_int
9427 = build_function_type_list (opaque_V4SI_type_node,
9428 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9429 tree opaque_ftype_opaque_opaque_int
9430 = build_function_type_list (opaque_V4SI_type_node,
9431 opaque_V4SI_type_node, opaque_V4SI_type_node,
9432 integer_type_node, NULL_TREE);
9433 tree int_ftype_int_opaque_opaque
9434 = build_function_type_list (integer_type_node,
9435 integer_type_node, opaque_V4SI_type_node,
9436 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9437 tree int_ftype_int_v4si_v4si
9438 = build_function_type_list (integer_type_node,
9439 integer_type_node, V4SI_type_node,
9440 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9441 tree v4sf_ftype_pcfloat
9442 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9443 tree void_ftype_pfloat_v4sf
b4de2f7d 9444 = build_function_type_list (void_type_node,
a3170dc6 9445 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9446 tree v4si_ftype_pcint
9447 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9448 tree void_ftype_pint_v4si
b4de2f7d
AH
9449 = build_function_type_list (void_type_node,
9450 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9451 tree v8hi_ftype_pcshort
9452 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9453 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9454 = build_function_type_list (void_type_node,
9455 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9456 tree v16qi_ftype_pcchar
9457 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9458 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9459 = build_function_type_list (void_type_node,
9460 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9461 tree void_ftype_v4si
b4de2f7d 9462 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9463 tree v8hi_ftype_void
9464 = build_function_type (V8HI_type_node, void_list_node);
9465 tree void_ftype_void
9466 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9467 tree void_ftype_int
9468 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9469
58646b77
PB
9470 tree opaque_ftype_long_pcvoid
9471 = build_function_type_list (opaque_V4SI_type_node,
9472 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9473 tree v16qi_ftype_long_pcvoid
a3170dc6 9474 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9475 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9476 tree v8hi_ftype_long_pcvoid
a3170dc6 9477 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9478 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9479 tree v4si_ftype_long_pcvoid
a3170dc6 9480 = build_function_type_list (V4SI_type_node,
b4a62fa0 9481 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9482
58646b77
PB
9483 tree void_ftype_opaque_long_pvoid
9484 = build_function_type_list (void_type_node,
9485 opaque_V4SI_type_node, long_integer_type_node,
9486 pvoid_type_node, NULL_TREE);
b4a62fa0 9487 tree void_ftype_v4si_long_pvoid
b4de2f7d 9488 = build_function_type_list (void_type_node,
b4a62fa0 9489 V4SI_type_node, long_integer_type_node,
b4de2f7d 9490 pvoid_type_node, NULL_TREE);
b4a62fa0 9491 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9492 = build_function_type_list (void_type_node,
b4a62fa0 9493 V16QI_type_node, long_integer_type_node,
b4de2f7d 9494 pvoid_type_node, NULL_TREE);
b4a62fa0 9495 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9496 = build_function_type_list (void_type_node,
b4a62fa0 9497 V8HI_type_node, long_integer_type_node,
b4de2f7d 9498 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9499 tree int_ftype_int_v8hi_v8hi
9500 = build_function_type_list (integer_type_node,
9501 integer_type_node, V8HI_type_node,
9502 V8HI_type_node, NULL_TREE);
9503 tree int_ftype_int_v16qi_v16qi
9504 = build_function_type_list (integer_type_node,
9505 integer_type_node, V16QI_type_node,
9506 V16QI_type_node, NULL_TREE);
9507 tree int_ftype_int_v4sf_v4sf
9508 = build_function_type_list (integer_type_node,
9509 integer_type_node, V4SF_type_node,
9510 V4SF_type_node, NULL_TREE);
9511 tree v4si_ftype_v4si
9512 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9513 tree v8hi_ftype_v8hi
9514 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9515 tree v16qi_ftype_v16qi
9516 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9517 tree v4sf_ftype_v4sf
9518 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9519 tree void_ftype_pcvoid_int_int
a3170dc6 9520 = build_function_type_list (void_type_node,
0dbc3651 9521 pcvoid_type_node, integer_type_node,
8bb418a3 9522 integer_type_node, NULL_TREE);
8bb418a3 9523
0dbc3651
ZW
9524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9525 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9526 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9527 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9528 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9529 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9530 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9531 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9532 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9533 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9534 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9535 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9536 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9537 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9538 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9539 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9540 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9541 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9542 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9543 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9544 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9545 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9546 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9547 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9548 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9549 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9550 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9551 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9552 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9553 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9554 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9555 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9556 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9557 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9558 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9559 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9560 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9561 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9562 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9563 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9564 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9565 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9566 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9567 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9568 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9569 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9570
9571 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9572
9573 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9574 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9575 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9576 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9577 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9578 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9579 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9580 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9581 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9582 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9583
a3170dc6 9584 /* Add the DST variants. */
586de218 9585 d = bdesc_dst;
a3170dc6 9586 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9587 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9588
9589 /* Initialize the predicates. */
586de218 9590 dp = bdesc_altivec_preds;
a3170dc6
AH
9591 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9592 {
9593 enum machine_mode mode1;
9594 tree type;
58646b77
PB
9595 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9596 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9597
58646b77
PB
9598 if (is_overloaded)
9599 mode1 = VOIDmode;
9600 else
9601 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9602
9603 switch (mode1)
9604 {
58646b77
PB
9605 case VOIDmode:
9606 type = int_ftype_int_opaque_opaque;
9607 break;
a3170dc6
AH
9608 case V4SImode:
9609 type = int_ftype_int_v4si_v4si;
9610 break;
9611 case V8HImode:
9612 type = int_ftype_int_v8hi_v8hi;
9613 break;
9614 case V16QImode:
9615 type = int_ftype_int_v16qi_v16qi;
9616 break;
9617 case V4SFmode:
9618 type = int_ftype_int_v4sf_v4sf;
9619 break;
9620 default:
37409796 9621 gcc_unreachable ();
a3170dc6 9622 }
f676971a 9623
a3170dc6
AH
9624 def_builtin (dp->mask, dp->name, type, dp->code);
9625 }
9626
9627 /* Initialize the abs* operators. */
586de218 9628 d = bdesc_abs;
a3170dc6
AH
9629 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9630 {
9631 enum machine_mode mode0;
9632 tree type;
9633
9634 mode0 = insn_data[d->icode].operand[0].mode;
9635
9636 switch (mode0)
9637 {
9638 case V4SImode:
9639 type = v4si_ftype_v4si;
9640 break;
9641 case V8HImode:
9642 type = v8hi_ftype_v8hi;
9643 break;
9644 case V16QImode:
9645 type = v16qi_ftype_v16qi;
9646 break;
9647 case V4SFmode:
9648 type = v4sf_ftype_v4sf;
9649 break;
9650 default:
37409796 9651 gcc_unreachable ();
a3170dc6 9652 }
f676971a 9653
a3170dc6
AH
9654 def_builtin (d->mask, d->name, type, d->code);
9655 }
7ccf35ed 9656
13c62176
DN
9657 if (TARGET_ALTIVEC)
9658 {
9659 tree decl;
9660
9661 /* Initialize target builtin that implements
9662 targetm.vectorize.builtin_mask_for_load. */
9663
c79efc4d
RÁE
9664 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9665 v16qi_ftype_long_pcvoid,
9666 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9667 BUILT_IN_MD, NULL, NULL_TREE);
9668 TREE_READONLY (decl) = 1;
13c62176
DN
9669 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9670 altivec_builtin_mask_for_load = decl;
13c62176 9671 }
7a4eca66
DE
9672
9673 /* Access to the vec_init patterns. */
9674 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9675 integer_type_node, integer_type_node,
9676 integer_type_node, NULL_TREE);
9677 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9678 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9679
9680 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9681 short_integer_type_node,
9682 short_integer_type_node,
9683 short_integer_type_node,
9684 short_integer_type_node,
9685 short_integer_type_node,
9686 short_integer_type_node,
9687 short_integer_type_node, NULL_TREE);
9688 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9689 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9690
9691 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9692 char_type_node, char_type_node,
9693 char_type_node, char_type_node,
9694 char_type_node, char_type_node,
9695 char_type_node, char_type_node,
9696 char_type_node, char_type_node,
9697 char_type_node, char_type_node,
9698 char_type_node, char_type_node,
9699 char_type_node, NULL_TREE);
9700 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9701 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9702
9703 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9704 float_type_node, float_type_node,
9705 float_type_node, NULL_TREE);
9706 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9707 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9708
9709 /* Access to the vec_set patterns. */
9710 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9711 intSI_type_node,
9712 integer_type_node, NULL_TREE);
9713 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9714 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9715
9716 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9717 intHI_type_node,
9718 integer_type_node, NULL_TREE);
9719 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9720 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9721
9722 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9723 intQI_type_node,
9724 integer_type_node, NULL_TREE);
9725 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9726 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9727
9728 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9729 float_type_node,
9730 integer_type_node, NULL_TREE);
9731 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9732 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9733
9734 /* Access to the vec_extract patterns. */
9735 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9736 integer_type_node, NULL_TREE);
9737 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9738 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9739
9740 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9741 integer_type_node, NULL_TREE);
9742 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9743 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9744
9745 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9746 integer_type_node, NULL_TREE);
9747 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9748 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9749
9750 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9751 integer_type_node, NULL_TREE);
9752 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9753 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9754}
9755
9756static void
863d938c 9757rs6000_common_init_builtins (void)
a3170dc6 9758{
586de218 9759 const struct builtin_description *d;
a3170dc6
AH
9760 size_t i;
9761
96038623
DE
9762 tree v2sf_ftype_v2sf_v2sf_v2sf
9763 = build_function_type_list (V2SF_type_node,
9764 V2SF_type_node, V2SF_type_node,
9765 V2SF_type_node, NULL_TREE);
9766
a3170dc6
AH
9767 tree v4sf_ftype_v4sf_v4sf_v16qi
9768 = build_function_type_list (V4SF_type_node,
9769 V4SF_type_node, V4SF_type_node,
9770 V16QI_type_node, NULL_TREE);
9771 tree v4si_ftype_v4si_v4si_v16qi
9772 = build_function_type_list (V4SI_type_node,
9773 V4SI_type_node, V4SI_type_node,
9774 V16QI_type_node, NULL_TREE);
9775 tree v8hi_ftype_v8hi_v8hi_v16qi
9776 = build_function_type_list (V8HI_type_node,
9777 V8HI_type_node, V8HI_type_node,
9778 V16QI_type_node, NULL_TREE);
9779 tree v16qi_ftype_v16qi_v16qi_v16qi
9780 = build_function_type_list (V16QI_type_node,
9781 V16QI_type_node, V16QI_type_node,
9782 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9783 tree v4si_ftype_int
9784 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9785 tree v8hi_ftype_int
9786 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9787 tree v16qi_ftype_int
9788 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9789 tree v8hi_ftype_v16qi
9790 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9791 tree v4sf_ftype_v4sf
9792 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9793
9794 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9795 = build_function_type_list (opaque_V2SI_type_node,
9796 opaque_V2SI_type_node,
9797 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9798
96038623 9799 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9800 = build_function_type_list (opaque_V2SF_type_node,
9801 opaque_V2SF_type_node,
9802 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9803
96038623
DE
9804 tree v2sf_ftype_v2sf_v2sf
9805 = build_function_type_list (V2SF_type_node,
9806 V2SF_type_node,
9807 V2SF_type_node, NULL_TREE);
9808
9809
a3170dc6 9810 tree v2si_ftype_int_int
2abe3e28 9811 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9812 integer_type_node, integer_type_node,
9813 NULL_TREE);
9814
58646b77
PB
9815 tree opaque_ftype_opaque
9816 = build_function_type_list (opaque_V4SI_type_node,
9817 opaque_V4SI_type_node, NULL_TREE);
9818
a3170dc6 9819 tree v2si_ftype_v2si
2abe3e28
AH
9820 = build_function_type_list (opaque_V2SI_type_node,
9821 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9822
96038623 9823 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9824 = build_function_type_list (opaque_V2SF_type_node,
9825 opaque_V2SF_type_node, NULL_TREE);
f676971a 9826
96038623
DE
9827 tree v2sf_ftype_v2sf
9828 = build_function_type_list (V2SF_type_node,
9829 V2SF_type_node, NULL_TREE);
9830
a3170dc6 9831 tree v2sf_ftype_v2si
2abe3e28
AH
9832 = build_function_type_list (opaque_V2SF_type_node,
9833 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9834
9835 tree v2si_ftype_v2sf
2abe3e28
AH
9836 = build_function_type_list (opaque_V2SI_type_node,
9837 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9838
9839 tree v2si_ftype_v2si_char
2abe3e28
AH
9840 = build_function_type_list (opaque_V2SI_type_node,
9841 opaque_V2SI_type_node,
9842 char_type_node, NULL_TREE);
a3170dc6
AH
9843
9844 tree v2si_ftype_int_char
2abe3e28 9845 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9846 integer_type_node, char_type_node, NULL_TREE);
9847
9848 tree v2si_ftype_char
2abe3e28
AH
9849 = build_function_type_list (opaque_V2SI_type_node,
9850 char_type_node, NULL_TREE);
a3170dc6
AH
9851
9852 tree int_ftype_int_int
9853 = build_function_type_list (integer_type_node,
9854 integer_type_node, integer_type_node,
9855 NULL_TREE);
95385cbb 9856
58646b77
PB
9857 tree opaque_ftype_opaque_opaque
9858 = build_function_type_list (opaque_V4SI_type_node,
9859 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9860 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9861 = build_function_type_list (V4SI_type_node,
9862 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9863 tree v4sf_ftype_v4si_int
b4de2f7d 9864 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9865 V4SI_type_node, integer_type_node, NULL_TREE);
9866 tree v4si_ftype_v4sf_int
b4de2f7d 9867 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9868 V4SF_type_node, integer_type_node, NULL_TREE);
9869 tree v4si_ftype_v4si_int
b4de2f7d 9870 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9871 V4SI_type_node, integer_type_node, NULL_TREE);
9872 tree v8hi_ftype_v8hi_int
b4de2f7d 9873 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9874 V8HI_type_node, integer_type_node, NULL_TREE);
9875 tree v16qi_ftype_v16qi_int
b4de2f7d 9876 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9877 V16QI_type_node, integer_type_node, NULL_TREE);
9878 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9879 = build_function_type_list (V16QI_type_node,
9880 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9881 integer_type_node, NULL_TREE);
9882 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9883 = build_function_type_list (V8HI_type_node,
9884 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9885 integer_type_node, NULL_TREE);
9886 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9887 = build_function_type_list (V4SI_type_node,
9888 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9889 integer_type_node, NULL_TREE);
9890 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9891 = build_function_type_list (V4SF_type_node,
9892 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9893 integer_type_node, NULL_TREE);
0ac081f6 9894 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9895 = build_function_type_list (V4SF_type_node,
9896 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9897 tree opaque_ftype_opaque_opaque_opaque
9898 = build_function_type_list (opaque_V4SI_type_node,
9899 opaque_V4SI_type_node, opaque_V4SI_type_node,
9900 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9901 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9902 = build_function_type_list (V4SF_type_node,
9903 V4SF_type_node, V4SF_type_node,
9904 V4SI_type_node, NULL_TREE);
2212663f 9905 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9906 = build_function_type_list (V4SF_type_node,
9907 V4SF_type_node, V4SF_type_node,
9908 V4SF_type_node, NULL_TREE);
f676971a 9909 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9910 = build_function_type_list (V4SI_type_node,
9911 V4SI_type_node, V4SI_type_node,
9912 V4SI_type_node, NULL_TREE);
0ac081f6 9913 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9914 = build_function_type_list (V8HI_type_node,
9915 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9916 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9917 = build_function_type_list (V8HI_type_node,
9918 V8HI_type_node, V8HI_type_node,
9919 V8HI_type_node, NULL_TREE);
c4ad648e 9920 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9921 = build_function_type_list (V4SI_type_node,
9922 V8HI_type_node, V8HI_type_node,
9923 V4SI_type_node, NULL_TREE);
c4ad648e 9924 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9925 = build_function_type_list (V4SI_type_node,
9926 V16QI_type_node, V16QI_type_node,
9927 V4SI_type_node, NULL_TREE);
0ac081f6 9928 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9929 = build_function_type_list (V16QI_type_node,
9930 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9931 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9932 = build_function_type_list (V4SI_type_node,
9933 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9934 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9935 = build_function_type_list (V8HI_type_node,
9936 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9937 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9938 = build_function_type_list (V4SI_type_node,
9939 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9940 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9941 = build_function_type_list (V8HI_type_node,
9942 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9943 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9944 = build_function_type_list (V16QI_type_node,
9945 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9946 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9947 = build_function_type_list (V4SI_type_node,
9948 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9949 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9950 = build_function_type_list (V4SI_type_node,
9951 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9952 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9953 = build_function_type_list (V4SI_type_node,
9954 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9955 tree v4si_ftype_v8hi
9956 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9957 tree int_ftype_v4si_v4si
9958 = build_function_type_list (integer_type_node,
9959 V4SI_type_node, V4SI_type_node, NULL_TREE);
9960 tree int_ftype_v4sf_v4sf
9961 = build_function_type_list (integer_type_node,
9962 V4SF_type_node, V4SF_type_node, NULL_TREE);
9963 tree int_ftype_v16qi_v16qi
9964 = build_function_type_list (integer_type_node,
9965 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9966 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9967 = build_function_type_list (integer_type_node,
9968 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9969
6f317ef3 9970 /* Add the simple ternary operators. */
586de218 9971 d = bdesc_3arg;
ca7558fc 9972 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9973 {
2212663f
DB
9974 enum machine_mode mode0, mode1, mode2, mode3;
9975 tree type;
58646b77
PB
9976 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9977 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9978
58646b77
PB
9979 if (is_overloaded)
9980 {
9981 mode0 = VOIDmode;
9982 mode1 = VOIDmode;
9983 mode2 = VOIDmode;
9984 mode3 = VOIDmode;
9985 }
9986 else
9987 {
9988 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9989 continue;
f676971a 9990
58646b77
PB
9991 mode0 = insn_data[d->icode].operand[0].mode;
9992 mode1 = insn_data[d->icode].operand[1].mode;
9993 mode2 = insn_data[d->icode].operand[2].mode;
9994 mode3 = insn_data[d->icode].operand[3].mode;
9995 }
bb8df8a6 9996
2212663f
DB
9997 /* When all four are of the same mode. */
9998 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9999 {
10000 switch (mode0)
10001 {
58646b77
PB
10002 case VOIDmode:
10003 type = opaque_ftype_opaque_opaque_opaque;
10004 break;
617e0e1d
DB
10005 case V4SImode:
10006 type = v4si_ftype_v4si_v4si_v4si;
10007 break;
2212663f
DB
10008 case V4SFmode:
10009 type = v4sf_ftype_v4sf_v4sf_v4sf;
10010 break;
10011 case V8HImode:
10012 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10013 break;
2212663f
DB
10014 case V16QImode:
10015 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10016 break;
96038623
DE
10017 case V2SFmode:
10018 type = v2sf_ftype_v2sf_v2sf_v2sf;
10019 break;
2212663f 10020 default:
37409796 10021 gcc_unreachable ();
2212663f
DB
10022 }
10023 }
10024 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10025 {
2212663f
DB
10026 switch (mode0)
10027 {
10028 case V4SImode:
10029 type = v4si_ftype_v4si_v4si_v16qi;
10030 break;
10031 case V4SFmode:
10032 type = v4sf_ftype_v4sf_v4sf_v16qi;
10033 break;
10034 case V8HImode:
10035 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10036 break;
2212663f
DB
10037 case V16QImode:
10038 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10039 break;
2212663f 10040 default:
37409796 10041 gcc_unreachable ();
2212663f
DB
10042 }
10043 }
f676971a 10044 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10045 && mode3 == V4SImode)
24408032 10046 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10047 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10048 && mode3 == V4SImode)
24408032 10049 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10050 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10051 && mode3 == V4SImode)
24408032
AH
10052 type = v4sf_ftype_v4sf_v4sf_v4si;
10053
a7b376ee 10054 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10055 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10056 && mode3 == QImode)
b9e4e5d1 10057 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10058
a7b376ee 10059 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10060 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10061 && mode3 == QImode)
b9e4e5d1 10062 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10063
a7b376ee 10064 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10065 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10066 && mode3 == QImode)
b9e4e5d1 10067 type = v4si_ftype_v4si_v4si_int;
24408032 10068
a7b376ee 10069 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10070 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10071 && mode3 == QImode)
b9e4e5d1 10072 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10073
2212663f 10074 else
37409796 10075 gcc_unreachable ();
2212663f
DB
10076
10077 def_builtin (d->mask, d->name, type, d->code);
10078 }
10079
0ac081f6 10080 /* Add the simple binary operators. */
00b960c7 10081 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10082 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10083 {
10084 enum machine_mode mode0, mode1, mode2;
10085 tree type;
58646b77
PB
10086 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10087 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10088
58646b77
PB
10089 if (is_overloaded)
10090 {
10091 mode0 = VOIDmode;
10092 mode1 = VOIDmode;
10093 mode2 = VOIDmode;
10094 }
10095 else
bb8df8a6 10096 {
58646b77
PB
10097 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10098 continue;
f676971a 10099
58646b77
PB
10100 mode0 = insn_data[d->icode].operand[0].mode;
10101 mode1 = insn_data[d->icode].operand[1].mode;
10102 mode2 = insn_data[d->icode].operand[2].mode;
10103 }
0ac081f6
AH
10104
10105 /* When all three operands are of the same mode. */
10106 if (mode0 == mode1 && mode1 == mode2)
10107 {
10108 switch (mode0)
10109 {
58646b77
PB
10110 case VOIDmode:
10111 type = opaque_ftype_opaque_opaque;
10112 break;
0ac081f6
AH
10113 case V4SFmode:
10114 type = v4sf_ftype_v4sf_v4sf;
10115 break;
10116 case V4SImode:
10117 type = v4si_ftype_v4si_v4si;
10118 break;
10119 case V16QImode:
10120 type = v16qi_ftype_v16qi_v16qi;
10121 break;
10122 case V8HImode:
10123 type = v8hi_ftype_v8hi_v8hi;
10124 break;
a3170dc6
AH
10125 case V2SImode:
10126 type = v2si_ftype_v2si_v2si;
10127 break;
96038623
DE
10128 case V2SFmode:
10129 if (TARGET_PAIRED_FLOAT)
10130 type = v2sf_ftype_v2sf_v2sf;
10131 else
10132 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10133 break;
10134 case SImode:
10135 type = int_ftype_int_int;
10136 break;
0ac081f6 10137 default:
37409796 10138 gcc_unreachable ();
0ac081f6
AH
10139 }
10140 }
10141
10142 /* A few other combos we really don't want to do manually. */
10143
10144 /* vint, vfloat, vfloat. */
10145 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10146 type = v4si_ftype_v4sf_v4sf;
10147
10148 /* vshort, vchar, vchar. */
10149 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10150 type = v8hi_ftype_v16qi_v16qi;
10151
10152 /* vint, vshort, vshort. */
10153 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10154 type = v4si_ftype_v8hi_v8hi;
10155
10156 /* vshort, vint, vint. */
10157 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10158 type = v8hi_ftype_v4si_v4si;
10159
10160 /* vchar, vshort, vshort. */
10161 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10162 type = v16qi_ftype_v8hi_v8hi;
10163
10164 /* vint, vchar, vint. */
10165 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10166 type = v4si_ftype_v16qi_v4si;
10167
fa066a23
AH
10168 /* vint, vchar, vchar. */
10169 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10170 type = v4si_ftype_v16qi_v16qi;
10171
0ac081f6
AH
10172 /* vint, vshort, vint. */
10173 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10174 type = v4si_ftype_v8hi_v4si;
f676971a 10175
a7b376ee 10176 /* vint, vint, 5-bit literal. */
2212663f 10177 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10178 type = v4si_ftype_v4si_int;
f676971a 10179
a7b376ee 10180 /* vshort, vshort, 5-bit literal. */
2212663f 10181 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10182 type = v8hi_ftype_v8hi_int;
f676971a 10183
a7b376ee 10184 /* vchar, vchar, 5-bit literal. */
2212663f 10185 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10186 type = v16qi_ftype_v16qi_int;
0ac081f6 10187
a7b376ee 10188 /* vfloat, vint, 5-bit literal. */
617e0e1d 10189 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10190 type = v4sf_ftype_v4si_int;
f676971a 10191
a7b376ee 10192 /* vint, vfloat, 5-bit literal. */
617e0e1d 10193 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10194 type = v4si_ftype_v4sf_int;
617e0e1d 10195
a3170dc6
AH
10196 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10197 type = v2si_ftype_int_int;
10198
10199 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10200 type = v2si_ftype_v2si_char;
10201
10202 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10203 type = v2si_ftype_int_char;
10204
37409796 10205 else
0ac081f6 10206 {
37409796
NS
10207 /* int, x, x. */
10208 gcc_assert (mode0 == SImode);
0ac081f6
AH
10209 switch (mode1)
10210 {
10211 case V4SImode:
10212 type = int_ftype_v4si_v4si;
10213 break;
10214 case V4SFmode:
10215 type = int_ftype_v4sf_v4sf;
10216 break;
10217 case V16QImode:
10218 type = int_ftype_v16qi_v16qi;
10219 break;
10220 case V8HImode:
10221 type = int_ftype_v8hi_v8hi;
10222 break;
10223 default:
37409796 10224 gcc_unreachable ();
0ac081f6
AH
10225 }
10226 }
10227
2212663f
DB
10228 def_builtin (d->mask, d->name, type, d->code);
10229 }
24408032 10230
2212663f
DB
10231 /* Add the simple unary operators. */
10232 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10233 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10234 {
10235 enum machine_mode mode0, mode1;
10236 tree type;
58646b77
PB
10237 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10238 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10239
10240 if (is_overloaded)
10241 {
10242 mode0 = VOIDmode;
10243 mode1 = VOIDmode;
10244 }
10245 else
10246 {
10247 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10248 continue;
bb8df8a6 10249
58646b77
PB
10250 mode0 = insn_data[d->icode].operand[0].mode;
10251 mode1 = insn_data[d->icode].operand[1].mode;
10252 }
2212663f
DB
10253
10254 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10255 type = v4si_ftype_int;
2212663f 10256 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10257 type = v8hi_ftype_int;
2212663f 10258 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10259 type = v16qi_ftype_int;
58646b77
PB
10260 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10261 type = opaque_ftype_opaque;
617e0e1d
DB
10262 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10263 type = v4sf_ftype_v4sf;
20e26713
AH
10264 else if (mode0 == V8HImode && mode1 == V16QImode)
10265 type = v8hi_ftype_v16qi;
10266 else if (mode0 == V4SImode && mode1 == V8HImode)
10267 type = v4si_ftype_v8hi;
a3170dc6
AH
10268 else if (mode0 == V2SImode && mode1 == V2SImode)
10269 type = v2si_ftype_v2si;
10270 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10271 {
10272 if (TARGET_PAIRED_FLOAT)
10273 type = v2sf_ftype_v2sf;
10274 else
10275 type = v2sf_ftype_v2sf_spe;
10276 }
a3170dc6
AH
10277 else if (mode0 == V2SFmode && mode1 == V2SImode)
10278 type = v2sf_ftype_v2si;
10279 else if (mode0 == V2SImode && mode1 == V2SFmode)
10280 type = v2si_ftype_v2sf;
10281 else if (mode0 == V2SImode && mode1 == QImode)
10282 type = v2si_ftype_char;
2212663f 10283 else
37409796 10284 gcc_unreachable ();
2212663f 10285
0ac081f6
AH
10286 def_builtin (d->mask, d->name, type, d->code);
10287 }
10288}
10289
c15c90bb
ZW
10290static void
10291rs6000_init_libfuncs (void)
10292{
602ea4d3
JJ
10293 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10294 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10295 {
602ea4d3
JJ
10296 /* AIX library routines for float->int conversion. */
10297 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10298 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10299 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10300 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10301 }
c15c90bb 10302
602ea4d3 10303 if (!TARGET_IEEEQUAD)
98c41d98 10304 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10305 if (!TARGET_XL_COMPAT)
10306 {
10307 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10308 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10309 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10310 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10311
17caeff2 10312 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10313 {
10314 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10315 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10316 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10317 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10318 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10319 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10320 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10321
10322 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10323 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10324 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10325 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10326 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10327 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10328 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10329 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10330 }
b26941b4
JM
10331
10332 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10333 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10334 }
10335 else
10336 {
10337 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10338 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10339 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10340 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10341 }
c9034561 10342 else
c15c90bb 10343 {
c9034561 10344 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10345
10346 set_optab_libfunc (add_optab, TFmode, "_q_add");
10347 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10348 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10349 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10350 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10351 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10352 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10353
c9034561
ZW
10354 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10355 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10356 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10357 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10358 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10359 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10360
85363ca0
ZW
10361 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10362 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10363 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10364 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10365 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10366 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10367 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10368 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10369 }
10370}
fba73eb1
DE
10371
10372\f
10373/* Expand a block clear operation, and return 1 if successful. Return 0
10374 if we should let the compiler generate normal code.
10375
10376 operands[0] is the destination
10377 operands[1] is the length
57e84f18 10378 operands[3] is the alignment */
fba73eb1
DE
10379
10380int
10381expand_block_clear (rtx operands[])
10382{
10383 rtx orig_dest = operands[0];
10384 rtx bytes_rtx = operands[1];
57e84f18 10385 rtx align_rtx = operands[3];
5514620a
GK
10386 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10387 HOST_WIDE_INT align;
10388 HOST_WIDE_INT bytes;
fba73eb1
DE
10389 int offset;
10390 int clear_bytes;
5514620a 10391 int clear_step;
fba73eb1
DE
10392
10393 /* If this is not a fixed size move, just call memcpy */
10394 if (! constp)
10395 return 0;
10396
37409796
NS
10397 /* This must be a fixed size alignment */
10398 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10399 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10400
10401 /* Anything to clear? */
10402 bytes = INTVAL (bytes_rtx);
10403 if (bytes <= 0)
10404 return 1;
10405
5514620a
GK
10406 /* Use the builtin memset after a point, to avoid huge code bloat.
10407 When optimize_size, avoid any significant code bloat; calling
10408 memset is about 4 instructions, so allow for one instruction to
10409 load zero and three to do clearing. */
10410 if (TARGET_ALTIVEC && align >= 128)
10411 clear_step = 16;
10412 else if (TARGET_POWERPC64 && align >= 32)
10413 clear_step = 8;
21d818ff
NF
10414 else if (TARGET_SPE && align >= 64)
10415 clear_step = 8;
5514620a
GK
10416 else
10417 clear_step = 4;
fba73eb1 10418
5514620a
GK
10419 if (optimize_size && bytes > 3 * clear_step)
10420 return 0;
10421 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10422 return 0;
10423
10424 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10425 {
fba73eb1
DE
10426 enum machine_mode mode = BLKmode;
10427 rtx dest;
f676971a 10428
5514620a
GK
10429 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10430 {
10431 clear_bytes = 16;
10432 mode = V4SImode;
10433 }
21d818ff
NF
10434 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10435 {
10436 clear_bytes = 8;
10437 mode = V2SImode;
10438 }
5514620a 10439 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10440 /* 64-bit loads and stores require word-aligned
10441 displacements. */
10442 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10443 {
10444 clear_bytes = 8;
10445 mode = DImode;
fba73eb1 10446 }
5514620a 10447 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10448 { /* move 4 bytes */
10449 clear_bytes = 4;
10450 mode = SImode;
fba73eb1 10451 }
ec53fc93 10452 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10453 { /* move 2 bytes */
10454 clear_bytes = 2;
10455 mode = HImode;
fba73eb1
DE
10456 }
10457 else /* move 1 byte at a time */
10458 {
10459 clear_bytes = 1;
10460 mode = QImode;
fba73eb1 10461 }
f676971a 10462
fba73eb1 10463 dest = adjust_address (orig_dest, mode, offset);
f676971a 10464
5514620a 10465 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10466 }
10467
10468 return 1;
10469}
10470
35aff10b 10471\f
7e69e155
MM
10472/* Expand a block move operation, and return 1 if successful. Return 0
10473 if we should let the compiler generate normal code.
10474
10475 operands[0] is the destination
10476 operands[1] is the source
10477 operands[2] is the length
10478 operands[3] is the alignment */
10479
3933e0e1
MM
10480#define MAX_MOVE_REG 4
10481
7e69e155 10482int
a2369ed3 10483expand_block_move (rtx operands[])
7e69e155 10484{
b6c9286a
MM
10485 rtx orig_dest = operands[0];
10486 rtx orig_src = operands[1];
7e69e155 10487 rtx bytes_rtx = operands[2];
7e69e155 10488 rtx align_rtx = operands[3];
3933e0e1 10489 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10490 int align;
3933e0e1
MM
10491 int bytes;
10492 int offset;
7e69e155 10493 int move_bytes;
cabfd258
GK
10494 rtx stores[MAX_MOVE_REG];
10495 int num_reg = 0;
7e69e155 10496
3933e0e1 10497 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10498 if (! constp)
3933e0e1
MM
10499 return 0;
10500
37409796
NS
10501 /* This must be a fixed size alignment */
10502 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10503 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10504
7e69e155 10505 /* Anything to move? */
3933e0e1
MM
10506 bytes = INTVAL (bytes_rtx);
10507 if (bytes <= 0)
7e69e155
MM
10508 return 1;
10509
ea9982a8 10510 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10511 reg_parm_stack_space. */
ea9982a8 10512 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10513 return 0;
10514
cabfd258 10515 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10516 {
cabfd258 10517 union {
70128ad9 10518 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10519 rtx (*mov) (rtx, rtx);
cabfd258
GK
10520 } gen_func;
10521 enum machine_mode mode = BLKmode;
10522 rtx src, dest;
f676971a 10523
5514620a
GK
10524 /* Altivec first, since it will be faster than a string move
10525 when it applies, and usually not significantly larger. */
10526 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10527 {
10528 move_bytes = 16;
10529 mode = V4SImode;
10530 gen_func.mov = gen_movv4si;
10531 }
21d818ff
NF
10532 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10533 {
10534 move_bytes = 8;
10535 mode = V2SImode;
10536 gen_func.mov = gen_movv2si;
10537 }
5514620a 10538 else if (TARGET_STRING
cabfd258
GK
10539 && bytes > 24 /* move up to 32 bytes at a time */
10540 && ! fixed_regs[5]
10541 && ! fixed_regs[6]
10542 && ! fixed_regs[7]
10543 && ! fixed_regs[8]
10544 && ! fixed_regs[9]
10545 && ! fixed_regs[10]
10546 && ! fixed_regs[11]
10547 && ! fixed_regs[12])
7e69e155 10548 {
cabfd258 10549 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10550 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10551 }
10552 else if (TARGET_STRING
10553 && bytes > 16 /* move up to 24 bytes at a time */
10554 && ! fixed_regs[5]
10555 && ! fixed_regs[6]
10556 && ! fixed_regs[7]
10557 && ! fixed_regs[8]
10558 && ! fixed_regs[9]
10559 && ! fixed_regs[10])
10560 {
10561 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10562 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10563 }
10564 else if (TARGET_STRING
10565 && bytes > 8 /* move up to 16 bytes at a time */
10566 && ! fixed_regs[5]
10567 && ! fixed_regs[6]
10568 && ! fixed_regs[7]
10569 && ! fixed_regs[8])
10570 {
10571 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10572 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10573 }
10574 else if (bytes >= 8 && TARGET_POWERPC64
10575 /* 64-bit loads and stores require word-aligned
10576 displacements. */
fba73eb1 10577 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10578 {
10579 move_bytes = 8;
10580 mode = DImode;
10581 gen_func.mov = gen_movdi;
10582 }
10583 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10584 { /* move up to 8 bytes at a time */
10585 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10586 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10587 }
cd7d9ca4 10588 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10589 { /* move 4 bytes */
10590 move_bytes = 4;
10591 mode = SImode;
10592 gen_func.mov = gen_movsi;
10593 }
ec53fc93 10594 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10595 { /* move 2 bytes */
10596 move_bytes = 2;
10597 mode = HImode;
10598 gen_func.mov = gen_movhi;
10599 }
10600 else if (TARGET_STRING && bytes > 1)
10601 { /* move up to 4 bytes at a time */
10602 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10603 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10604 }
10605 else /* move 1 byte at a time */
10606 {
10607 move_bytes = 1;
10608 mode = QImode;
10609 gen_func.mov = gen_movqi;
10610 }
f676971a 10611
cabfd258
GK
10612 src = adjust_address (orig_src, mode, offset);
10613 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10614
10615 if (mode != BLKmode)
cabfd258
GK
10616 {
10617 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10618
cabfd258
GK
10619 emit_insn ((*gen_func.mov) (tmp_reg, src));
10620 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10621 }
3933e0e1 10622
cabfd258
GK
10623 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10624 {
10625 int i;
10626 for (i = 0; i < num_reg; i++)
10627 emit_insn (stores[i]);
10628 num_reg = 0;
10629 }
35aff10b 10630
cabfd258 10631 if (mode == BLKmode)
7e69e155 10632 {
70128ad9 10633 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10634 patterns require zero offset. */
10635 if (!REG_P (XEXP (src, 0)))
b6c9286a 10636 {
cabfd258
GK
10637 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10638 src = replace_equiv_address (src, src_reg);
b6c9286a 10639 }
cabfd258 10640 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10641
cabfd258 10642 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10643 {
cabfd258
GK
10644 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10645 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10646 }
cabfd258 10647 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10648
70128ad9 10649 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10650 GEN_INT (move_bytes & 31),
10651 align_rtx));
7e69e155 10652 }
7e69e155
MM
10653 }
10654
10655 return 1;
10656}
10657
d62294f5 10658\f
9caa3eb2
DE
10659/* Return a string to perform a load_multiple operation.
10660 operands[0] is the vector.
10661 operands[1] is the source address.
10662 operands[2] is the first destination register. */
10663
10664const char *
a2369ed3 10665rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10666{
10667 /* We have to handle the case where the pseudo used to contain the address
10668 is assigned to one of the output registers. */
10669 int i, j;
10670 int words = XVECLEN (operands[0], 0);
10671 rtx xop[10];
10672
10673 if (XVECLEN (operands[0], 0) == 1)
10674 return "{l|lwz} %2,0(%1)";
10675
10676 for (i = 0; i < words; i++)
10677 if (refers_to_regno_p (REGNO (operands[2]) + i,
10678 REGNO (operands[2]) + i + 1, operands[1], 0))
10679 {
10680 if (i == words-1)
10681 {
10682 xop[0] = GEN_INT (4 * (words-1));
10683 xop[1] = operands[1];
10684 xop[2] = operands[2];
10685 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10686 return "";
10687 }
10688 else if (i == 0)
10689 {
10690 xop[0] = GEN_INT (4 * (words-1));
10691 xop[1] = operands[1];
10692 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10693 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10694 return "";
10695 }
10696 else
10697 {
10698 for (j = 0; j < words; j++)
10699 if (j != i)
10700 {
10701 xop[0] = GEN_INT (j * 4);
10702 xop[1] = operands[1];
10703 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10704 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10705 }
10706 xop[0] = GEN_INT (i * 4);
10707 xop[1] = operands[1];
10708 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10709 return "";
10710 }
10711 }
10712
10713 return "{lsi|lswi} %2,%1,%N0";
10714}
10715
9878760c 10716\f
a4f6c312
SS
10717/* A validation routine: say whether CODE, a condition code, and MODE
10718 match. The other alternatives either don't make sense or should
10719 never be generated. */
39a10a29 10720
48d72335 10721void
a2369ed3 10722validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10723{
37409796
NS
10724 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10725 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10726 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10727
10728 /* These don't make sense. */
37409796
NS
10729 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10730 || mode != CCUNSmode);
39a10a29 10731
37409796
NS
10732 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10733 || mode == CCUNSmode);
39a10a29 10734
37409796
NS
10735 gcc_assert (mode == CCFPmode
10736 || (code != ORDERED && code != UNORDERED
10737 && code != UNEQ && code != LTGT
10738 && code != UNGT && code != UNLT
10739 && code != UNGE && code != UNLE));
f676971a
EC
10740
10741 /* These should never be generated except for
bc9ec0e0 10742 flag_finite_math_only. */
37409796
NS
10743 gcc_assert (mode != CCFPmode
10744 || flag_finite_math_only
10745 || (code != LE && code != GE
10746 && code != UNEQ && code != LTGT
10747 && code != UNGT && code != UNLT));
39a10a29
GK
10748
10749 /* These are invalid; the information is not there. */
37409796 10750 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10751}
10752
9878760c
RK
10753\f
10754/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10755 mask required to convert the result of a rotate insn into a shift
b1765bde 10756 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10757
10758int
a2369ed3 10759includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10760{
e2c953b6
DE
10761 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10762
10763 shift_mask <<= INTVAL (shiftop);
9878760c 10764
b1765bde 10765 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10766}
10767
10768/* Similar, but for right shift. */
10769
10770int
a2369ed3 10771includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10772{
a7653a2c 10773 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10774
10775 shift_mask >>= INTVAL (shiftop);
10776
b1765bde 10777 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10778}
10779
c5059423
AM
10780/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10781 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10782 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10783
10784int
a2369ed3 10785includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10786{
c5059423
AM
10787 if (GET_CODE (andop) == CONST_INT)
10788 {
02071907 10789 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10790
c5059423 10791 c = INTVAL (andop);
02071907 10792 if (c == 0 || c == ~0)
c5059423 10793 return 0;
e2c953b6 10794
02071907 10795 shift_mask = ~0;
c5059423
AM
10796 shift_mask <<= INTVAL (shiftop);
10797
b6d08ca1 10798 /* Find the least significant one bit. */
c5059423
AM
10799 lsb = c & -c;
10800
10801 /* It must coincide with the LSB of the shift mask. */
10802 if (-lsb != shift_mask)
10803 return 0;
e2c953b6 10804
c5059423
AM
10805 /* Invert to look for the next transition (if any). */
10806 c = ~c;
10807
10808 /* Remove the low group of ones (originally low group of zeros). */
10809 c &= -lsb;
10810
10811 /* Again find the lsb, and check we have all 1's above. */
10812 lsb = c & -c;
10813 return c == -lsb;
10814 }
10815 else if (GET_CODE (andop) == CONST_DOUBLE
10816 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10817 {
02071907
AM
10818 HOST_WIDE_INT low, high, lsb;
10819 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10820
10821 low = CONST_DOUBLE_LOW (andop);
10822 if (HOST_BITS_PER_WIDE_INT < 64)
10823 high = CONST_DOUBLE_HIGH (andop);
10824
10825 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10826 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10827 return 0;
10828
10829 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10830 {
02071907 10831 shift_mask_high = ~0;
c5059423
AM
10832 if (INTVAL (shiftop) > 32)
10833 shift_mask_high <<= INTVAL (shiftop) - 32;
10834
10835 lsb = high & -high;
10836
10837 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10838 return 0;
10839
10840 high = ~high;
10841 high &= -lsb;
10842
10843 lsb = high & -high;
10844 return high == -lsb;
10845 }
10846
02071907 10847 shift_mask_low = ~0;
c5059423
AM
10848 shift_mask_low <<= INTVAL (shiftop);
10849
10850 lsb = low & -low;
10851
10852 if (-lsb != shift_mask_low)
10853 return 0;
10854
10855 if (HOST_BITS_PER_WIDE_INT < 64)
10856 high = ~high;
10857 low = ~low;
10858 low &= -lsb;
10859
10860 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10861 {
10862 lsb = high & -high;
10863 return high == -lsb;
10864 }
10865
10866 lsb = low & -low;
10867 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10868 }
10869 else
10870 return 0;
10871}
e2c953b6 10872
c5059423
AM
10873/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10874 to perform a left shift. It must have SHIFTOP or more least
c1207243 10875 significant 0's, with the remainder of the word 1's. */
e2c953b6 10876
c5059423 10877int
a2369ed3 10878includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10879{
e2c953b6 10880 if (GET_CODE (andop) == CONST_INT)
c5059423 10881 {
02071907 10882 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10883
02071907 10884 shift_mask = ~0;
c5059423
AM
10885 shift_mask <<= INTVAL (shiftop);
10886 c = INTVAL (andop);
10887
c1207243 10888 /* Find the least significant one bit. */
c5059423
AM
10889 lsb = c & -c;
10890
10891 /* It must be covered by the shift mask.
a4f6c312 10892 This test also rejects c == 0. */
c5059423
AM
10893 if ((lsb & shift_mask) == 0)
10894 return 0;
10895
10896 /* Check we have all 1's above the transition, and reject all 1's. */
10897 return c == -lsb && lsb != 1;
10898 }
10899 else if (GET_CODE (andop) == CONST_DOUBLE
10900 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10901 {
02071907 10902 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10903
10904 low = CONST_DOUBLE_LOW (andop);
10905
10906 if (HOST_BITS_PER_WIDE_INT < 64)
10907 {
02071907 10908 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10909
10910 high = CONST_DOUBLE_HIGH (andop);
10911
10912 if (low == 0)
10913 {
02071907 10914 shift_mask_high = ~0;
c5059423
AM
10915 if (INTVAL (shiftop) > 32)
10916 shift_mask_high <<= INTVAL (shiftop) - 32;
10917
10918 lsb = high & -high;
10919
10920 if ((lsb & shift_mask_high) == 0)
10921 return 0;
10922
10923 return high == -lsb;
10924 }
10925 if (high != ~0)
10926 return 0;
10927 }
10928
02071907 10929 shift_mask_low = ~0;
c5059423
AM
10930 shift_mask_low <<= INTVAL (shiftop);
10931
10932 lsb = low & -low;
10933
10934 if ((lsb & shift_mask_low) == 0)
10935 return 0;
10936
10937 return low == -lsb && lsb != 1;
10938 }
e2c953b6 10939 else
c5059423 10940 return 0;
9878760c 10941}
35068b43 10942
11ac38b2
DE
10943/* Return 1 if operands will generate a valid arguments to rlwimi
10944instruction for insert with right shift in 64-bit mode. The mask may
10945not start on the first bit or stop on the last bit because wrap-around
10946effects of instruction do not correspond to semantics of RTL insn. */
10947
10948int
10949insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10950{
429ec7dc
DE
10951 if (INTVAL (startop) > 32
10952 && INTVAL (startop) < 64
10953 && INTVAL (sizeop) > 1
10954 && INTVAL (sizeop) + INTVAL (startop) < 64
10955 && INTVAL (shiftop) > 0
10956 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10957 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10958 return 1;
10959
10960 return 0;
10961}
10962
35068b43 10963/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10964 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10965
10966int
a2369ed3 10967registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10968{
10969 /* We might have been passed a SUBREG. */
f676971a 10970 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10971 return 0;
f676971a 10972
90f81f99
AP
10973 /* We might have been passed non floating point registers. */
10974 if (!FP_REGNO_P (REGNO (reg1))
10975 || !FP_REGNO_P (REGNO (reg2)))
10976 return 0;
35068b43
RK
10977
10978 return (REGNO (reg1) == REGNO (reg2) - 1);
10979}
10980
a4f6c312
SS
10981/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10982 addr1 and addr2 must be in consecutive memory locations
10983 (addr2 == addr1 + 8). */
35068b43
RK
10984
10985int
90f81f99 10986mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10987{
90f81f99 10988 rtx addr1, addr2;
bb8df8a6
EC
10989 unsigned int reg1, reg2;
10990 int offset1, offset2;
35068b43 10991
90f81f99
AP
10992 /* The mems cannot be volatile. */
10993 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10994 return 0;
f676971a 10995
90f81f99
AP
10996 addr1 = XEXP (mem1, 0);
10997 addr2 = XEXP (mem2, 0);
10998
35068b43
RK
10999 /* Extract an offset (if used) from the first addr. */
11000 if (GET_CODE (addr1) == PLUS)
11001 {
11002 /* If not a REG, return zero. */
11003 if (GET_CODE (XEXP (addr1, 0)) != REG)
11004 return 0;
11005 else
11006 {
c4ad648e 11007 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11008 /* The offset must be constant! */
11009 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11010 return 0;
11011 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11012 }
11013 }
11014 else if (GET_CODE (addr1) != REG)
11015 return 0;
11016 else
11017 {
11018 reg1 = REGNO (addr1);
11019 /* This was a simple (mem (reg)) expression. Offset is 0. */
11020 offset1 = 0;
11021 }
11022
bb8df8a6
EC
11023 /* And now for the second addr. */
11024 if (GET_CODE (addr2) == PLUS)
11025 {
11026 /* If not a REG, return zero. */
11027 if (GET_CODE (XEXP (addr2, 0)) != REG)
11028 return 0;
11029 else
11030 {
11031 reg2 = REGNO (XEXP (addr2, 0));
11032 /* The offset must be constant. */
11033 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11034 return 0;
11035 offset2 = INTVAL (XEXP (addr2, 1));
11036 }
11037 }
11038 else if (GET_CODE (addr2) != REG)
35068b43 11039 return 0;
bb8df8a6
EC
11040 else
11041 {
11042 reg2 = REGNO (addr2);
11043 /* This was a simple (mem (reg)) expression. Offset is 0. */
11044 offset2 = 0;
11045 }
35068b43 11046
bb8df8a6
EC
11047 /* Both of these must have the same base register. */
11048 if (reg1 != reg2)
35068b43
RK
11049 return 0;
11050
11051 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11052 if (offset2 != offset1 + 8)
35068b43
RK
11053 return 0;
11054
11055 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11056 instructions. */
11057 return 1;
11058}
9878760c
RK
11059\f
11060/* Return the register class of a scratch register needed to copy IN into
11061 or out of a register in CLASS in MODE. If it can be done directly,
11062 NO_REGS is returned. */
11063
11064enum reg_class
3c4774e0
R
11065rs6000_secondary_reload_class (enum reg_class class,
11066 enum machine_mode mode ATTRIBUTE_UNUSED,
11067 rtx in)
9878760c 11068{
5accd822 11069 int regno;
9878760c 11070
ab82a49f
AP
11071 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11072#if TARGET_MACHO
c4ad648e 11073 && MACHOPIC_INDIRECT
ab82a49f 11074#endif
c4ad648e 11075 ))
46fad5b7
DJ
11076 {
11077 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11078 other than BASE_REGS for TARGET_ELF. So indicate that a
11079 register from BASE_REGS is needed as an intermediate
11080 register.
f676971a 11081
46fad5b7
DJ
11082 On Darwin, pic addresses require a load from memory, which
11083 needs a base register. */
11084 if (class != BASE_REGS
c4ad648e
AM
11085 && (GET_CODE (in) == SYMBOL_REF
11086 || GET_CODE (in) == HIGH
11087 || GET_CODE (in) == LABEL_REF
11088 || GET_CODE (in) == CONST))
11089 return BASE_REGS;
46fad5b7 11090 }
e7b7998a 11091
5accd822
DE
11092 if (GET_CODE (in) == REG)
11093 {
11094 regno = REGNO (in);
11095 if (regno >= FIRST_PSEUDO_REGISTER)
11096 {
11097 regno = true_regnum (in);
11098 if (regno >= FIRST_PSEUDO_REGISTER)
11099 regno = -1;
11100 }
11101 }
11102 else if (GET_CODE (in) == SUBREG)
11103 {
11104 regno = true_regnum (in);
11105 if (regno >= FIRST_PSEUDO_REGISTER)
11106 regno = -1;
11107 }
11108 else
11109 regno = -1;
11110
9878760c
RK
11111 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11112 into anything. */
11113 if (class == GENERAL_REGS || class == BASE_REGS
11114 || (regno >= 0 && INT_REGNO_P (regno)))
11115 return NO_REGS;
11116
11117 /* Constants, memory, and FP registers can go into FP registers. */
11118 if ((regno == -1 || FP_REGNO_P (regno))
11119 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
11120 return NO_REGS;
11121
0ac081f6
AH
11122 /* Memory, and AltiVec registers can go into AltiVec registers. */
11123 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11124 && class == ALTIVEC_REGS)
11125 return NO_REGS;
11126
9878760c
RK
11127 /* We can copy among the CR registers. */
11128 if ((class == CR_REGS || class == CR0_REGS)
11129 && regno >= 0 && CR_REGNO_P (regno))
11130 return NO_REGS;
11131
11132 /* Otherwise, we need GENERAL_REGS. */
11133 return GENERAL_REGS;
11134}
11135\f
11136/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11137 know this is a valid comparison.
9878760c
RK
11138
11139 SCC_P is 1 if this is for an scc. That means that %D will have been
11140 used instead of %C, so the bits will be in different places.
11141
b4ac57ab 11142 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11143
11144int
a2369ed3 11145ccr_bit (rtx op, int scc_p)
9878760c
RK
11146{
11147 enum rtx_code code = GET_CODE (op);
11148 enum machine_mode cc_mode;
11149 int cc_regnum;
11150 int base_bit;
9ebbca7d 11151 rtx reg;
9878760c 11152
ec8e098d 11153 if (!COMPARISON_P (op))
9878760c
RK
11154 return -1;
11155
9ebbca7d
GK
11156 reg = XEXP (op, 0);
11157
37409796 11158 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11159
11160 cc_mode = GET_MODE (reg);
11161 cc_regnum = REGNO (reg);
11162 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11163
39a10a29 11164 validate_condition_mode (code, cc_mode);
c5defebb 11165
b7053a3f
GK
11166 /* When generating a sCOND operation, only positive conditions are
11167 allowed. */
37409796
NS
11168 gcc_assert (!scc_p
11169 || code == EQ || code == GT || code == LT || code == UNORDERED
11170 || code == GTU || code == LTU);
f676971a 11171
9878760c
RK
11172 switch (code)
11173 {
11174 case NE:
11175 return scc_p ? base_bit + 3 : base_bit + 2;
11176 case EQ:
11177 return base_bit + 2;
1c882ea4 11178 case GT: case GTU: case UNLE:
9878760c 11179 return base_bit + 1;
1c882ea4 11180 case LT: case LTU: case UNGE:
9878760c 11181 return base_bit;
1c882ea4
GK
11182 case ORDERED: case UNORDERED:
11183 return base_bit + 3;
9878760c
RK
11184
11185 case GE: case GEU:
39a10a29 11186 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11187 unordered position. So test that bit. For integer, this is ! LT
11188 unless this is an scc insn. */
39a10a29 11189 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11190
11191 case LE: case LEU:
39a10a29 11192 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11193
9878760c 11194 default:
37409796 11195 gcc_unreachable ();
9878760c
RK
11196 }
11197}
1ff7789b 11198\f
8d30c4ee 11199/* Return the GOT register. */
1ff7789b 11200
9390387d 11201rtx
a2369ed3 11202rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11203{
a4f6c312
SS
11204 /* The second flow pass currently (June 1999) can't update
11205 regs_ever_live without disturbing other parts of the compiler, so
11206 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11207 if (!can_create_pseudo_p ()
11208 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11209 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11210
8d30c4ee 11211 current_function_uses_pic_offset_table = 1;
3cb999d8 11212
1ff7789b
MM
11213 return pic_offset_table_rtx;
11214}
a7df97e6 11215\f
e2500fed
GK
11216/* Function to init struct machine_function.
11217 This will be called, via a pointer variable,
11218 from push_function_context. */
a7df97e6 11219
e2500fed 11220static struct machine_function *
863d938c 11221rs6000_init_machine_status (void)
a7df97e6 11222{
e2500fed 11223 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11224}
9878760c 11225\f
0ba1b2ff
AM
11226/* These macros test for integers and extract the low-order bits. */
11227#define INT_P(X) \
11228((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11229 && GET_MODE (X) == VOIDmode)
11230
11231#define INT_LOWPART(X) \
11232 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11233
11234int
a2369ed3 11235extract_MB (rtx op)
0ba1b2ff
AM
11236{
11237 int i;
11238 unsigned long val = INT_LOWPART (op);
11239
11240 /* If the high bit is zero, the value is the first 1 bit we find
11241 from the left. */
11242 if ((val & 0x80000000) == 0)
11243 {
37409796 11244 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11245
11246 i = 1;
11247 while (((val <<= 1) & 0x80000000) == 0)
11248 ++i;
11249 return i;
11250 }
11251
11252 /* If the high bit is set and the low bit is not, or the mask is all
11253 1's, the value is zero. */
11254 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11255 return 0;
11256
11257 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11258 from the right. */
11259 i = 31;
11260 while (((val >>= 1) & 1) != 0)
11261 --i;
11262
11263 return i;
11264}
11265
11266int
a2369ed3 11267extract_ME (rtx op)
0ba1b2ff
AM
11268{
11269 int i;
11270 unsigned long val = INT_LOWPART (op);
11271
11272 /* If the low bit is zero, the value is the first 1 bit we find from
11273 the right. */
11274 if ((val & 1) == 0)
11275 {
37409796 11276 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11277
11278 i = 30;
11279 while (((val >>= 1) & 1) == 0)
11280 --i;
11281
11282 return i;
11283 }
11284
11285 /* If the low bit is set and the high bit is not, or the mask is all
11286 1's, the value is 31. */
11287 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11288 return 31;
11289
11290 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11291 from the left. */
11292 i = 0;
11293 while (((val <<= 1) & 0x80000000) != 0)
11294 ++i;
11295
11296 return i;
11297}
11298
c4501e62
JJ
11299/* Locate some local-dynamic symbol still in use by this function
11300 so that we can print its name in some tls_ld pattern. */
11301
11302static const char *
863d938c 11303rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11304{
11305 rtx insn;
11306
11307 if (cfun->machine->some_ld_name)
11308 return cfun->machine->some_ld_name;
11309
11310 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11311 if (INSN_P (insn)
11312 && for_each_rtx (&PATTERN (insn),
11313 rs6000_get_some_local_dynamic_name_1, 0))
11314 return cfun->machine->some_ld_name;
11315
37409796 11316 gcc_unreachable ();
c4501e62
JJ
11317}
11318
11319/* Helper function for rs6000_get_some_local_dynamic_name. */
11320
11321static int
a2369ed3 11322rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11323{
11324 rtx x = *px;
11325
11326 if (GET_CODE (x) == SYMBOL_REF)
11327 {
11328 const char *str = XSTR (x, 0);
11329 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11330 {
11331 cfun->machine->some_ld_name = str;
11332 return 1;
11333 }
11334 }
11335
11336 return 0;
11337}
11338
85b776df
AM
11339/* Write out a function code label. */
11340
11341void
11342rs6000_output_function_entry (FILE *file, const char *fname)
11343{
11344 if (fname[0] != '.')
11345 {
11346 switch (DEFAULT_ABI)
11347 {
11348 default:
37409796 11349 gcc_unreachable ();
85b776df
AM
11350
11351 case ABI_AIX:
11352 if (DOT_SYMBOLS)
11353 putc ('.', file);
11354 else
11355 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11356 break;
11357
11358 case ABI_V4:
11359 case ABI_DARWIN:
11360 break;
11361 }
11362 }
11363 if (TARGET_AIX)
11364 RS6000_OUTPUT_BASENAME (file, fname);
11365 else
11366 assemble_name (file, fname);
11367}
11368
9878760c
RK
11369/* Print an operand. Recognize special options, documented below. */
11370
38c1f2d7 11371#if TARGET_ELF
d9407988 11372#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11373#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11374#else
11375#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11376#define SMALL_DATA_REG 0
ba5e43aa
MM
11377#endif
11378
9878760c 11379void
a2369ed3 11380print_operand (FILE *file, rtx x, int code)
9878760c
RK
11381{
11382 int i;
a260abc9 11383 HOST_WIDE_INT val;
0ba1b2ff 11384 unsigned HOST_WIDE_INT uval;
9878760c
RK
11385
11386 switch (code)
11387 {
a8b3aeda 11388 case '.':
a85d226b
RK
11389 /* Write out an instruction after the call which may be replaced
11390 with glue code by the loader. This depends on the AIX version. */
11391 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11392 return;
11393
81eace42
GK
11394 /* %a is output_address. */
11395
9854d9ed
RK
11396 case 'A':
11397 /* If X is a constant integer whose low-order 5 bits are zero,
11398 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11399 in the AIX assembler where "sri" with a zero shift count
20e26713 11400 writes a trash instruction. */
9854d9ed 11401 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11402 putc ('l', file);
9854d9ed 11403 else
76229ac8 11404 putc ('r', file);
9854d9ed
RK
11405 return;
11406
11407 case 'b':
e2c953b6
DE
11408 /* If constant, low-order 16 bits of constant, unsigned.
11409 Otherwise, write normally. */
11410 if (INT_P (x))
11411 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11412 else
11413 print_operand (file, x, 0);
cad12a8d
RK
11414 return;
11415
a260abc9
DE
11416 case 'B':
11417 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11418 for 64-bit mask direction. */
9390387d 11419 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11420 return;
a260abc9 11421
81eace42
GK
11422 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11423 output_operand. */
11424
423c1189
AH
11425 case 'c':
11426 /* X is a CR register. Print the number of the GT bit of the CR. */
11427 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11428 output_operand_lossage ("invalid %%E value");
11429 else
11430 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11431 return;
11432
11433 case 'D':
cef6b86c 11434 /* Like 'J' but get to the GT bit only. */
37409796 11435 gcc_assert (GET_CODE (x) == REG);
423c1189 11436
cef6b86c
EB
11437 /* Bit 1 is GT bit. */
11438 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11439
cef6b86c
EB
11440 /* Add one for shift count in rlinm for scc. */
11441 fprintf (file, "%d", i + 1);
423c1189
AH
11442 return;
11443
9854d9ed 11444 case 'E':
39a10a29 11445 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11446 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11447 output_operand_lossage ("invalid %%E value");
78fbdbf7 11448 else
39a10a29 11449 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11450 return;
9854d9ed
RK
11451
11452 case 'f':
11453 /* X is a CR register. Print the shift count needed to move it
11454 to the high-order four bits. */
11455 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11456 output_operand_lossage ("invalid %%f value");
11457 else
9ebbca7d 11458 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11459 return;
11460
11461 case 'F':
11462 /* Similar, but print the count for the rotate in the opposite
11463 direction. */
11464 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11465 output_operand_lossage ("invalid %%F value");
11466 else
9ebbca7d 11467 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11468 return;
11469
11470 case 'G':
11471 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11472 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11473 if (GET_CODE (x) != CONST_INT)
11474 output_operand_lossage ("invalid %%G value");
11475 else if (INTVAL (x) >= 0)
76229ac8 11476 putc ('z', file);
9854d9ed 11477 else
76229ac8 11478 putc ('m', file);
9854d9ed 11479 return;
e2c953b6 11480
9878760c 11481 case 'h':
a4f6c312
SS
11482 /* If constant, output low-order five bits. Otherwise, write
11483 normally. */
9878760c 11484 if (INT_P (x))
5f59ecb7 11485 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11486 else
11487 print_operand (file, x, 0);
11488 return;
11489
64305719 11490 case 'H':
a4f6c312
SS
11491 /* If constant, output low-order six bits. Otherwise, write
11492 normally. */
64305719 11493 if (INT_P (x))
5f59ecb7 11494 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11495 else
11496 print_operand (file, x, 0);
11497 return;
11498
9854d9ed
RK
11499 case 'I':
11500 /* Print `i' if this is a constant, else nothing. */
9878760c 11501 if (INT_P (x))
76229ac8 11502 putc ('i', file);
9878760c
RK
11503 return;
11504
9854d9ed
RK
11505 case 'j':
11506 /* Write the bit number in CCR for jump. */
11507 i = ccr_bit (x, 0);
11508 if (i == -1)
11509 output_operand_lossage ("invalid %%j code");
9878760c 11510 else
9854d9ed 11511 fprintf (file, "%d", i);
9878760c
RK
11512 return;
11513
9854d9ed
RK
11514 case 'J':
11515 /* Similar, but add one for shift count in rlinm for scc and pass
11516 scc flag to `ccr_bit'. */
11517 i = ccr_bit (x, 1);
11518 if (i == -1)
11519 output_operand_lossage ("invalid %%J code");
11520 else
a0466a68
RK
11521 /* If we want bit 31, write a shift count of zero, not 32. */
11522 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11523 return;
11524
9854d9ed
RK
11525 case 'k':
11526 /* X must be a constant. Write the 1's complement of the
11527 constant. */
9878760c 11528 if (! INT_P (x))
9854d9ed 11529 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11530 else
11531 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11532 return;
11533
81eace42 11534 case 'K':
9ebbca7d
GK
11535 /* X must be a symbolic constant on ELF. Write an
11536 expression suitable for an 'addi' that adds in the low 16
11537 bits of the MEM. */
11538 if (GET_CODE (x) != CONST)
11539 {
11540 print_operand_address (file, x);
11541 fputs ("@l", file);
11542 }
11543 else
11544 {
11545 if (GET_CODE (XEXP (x, 0)) != PLUS
11546 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11547 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11548 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11549 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11550 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11551 fputs ("@l", file);
ed8d2920
MM
11552 /* For GNU as, there must be a non-alphanumeric character
11553 between 'l' and the number. The '-' is added by
11554 print_operand() already. */
11555 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11556 fputs ("+", file);
9ebbca7d
GK
11557 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11558 }
81eace42
GK
11559 return;
11560
11561 /* %l is output_asm_label. */
9ebbca7d 11562
9854d9ed
RK
11563 case 'L':
11564 /* Write second word of DImode or DFmode reference. Works on register
11565 or non-indexed memory only. */
11566 if (GET_CODE (x) == REG)
fb5c67a7 11567 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11568 else if (GET_CODE (x) == MEM)
11569 {
11570 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11571 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11572 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11573 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11574 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11575 UNITS_PER_WORD));
6fb5fa3c
DB
11576 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11577 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11578 UNITS_PER_WORD));
9854d9ed 11579 else
d7624dc0
RK
11580 output_address (XEXP (adjust_address_nv (x, SImode,
11581 UNITS_PER_WORD),
11582 0));
ed8908e7 11583
ba5e43aa 11584 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11585 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11586 reg_names[SMALL_DATA_REG]);
9854d9ed 11587 }
9878760c 11588 return;
f676971a 11589
9878760c
RK
11590 case 'm':
11591 /* MB value for a mask operand. */
b1765bde 11592 if (! mask_operand (x, SImode))
9878760c
RK
11593 output_operand_lossage ("invalid %%m value");
11594
0ba1b2ff 11595 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11596 return;
11597
11598 case 'M':
11599 /* ME value for a mask operand. */
b1765bde 11600 if (! mask_operand (x, SImode))
a260abc9 11601 output_operand_lossage ("invalid %%M value");
9878760c 11602
0ba1b2ff 11603 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11604 return;
11605
81eace42
GK
11606 /* %n outputs the negative of its operand. */
11607
9878760c
RK
11608 case 'N':
11609 /* Write the number of elements in the vector times 4. */
11610 if (GET_CODE (x) != PARALLEL)
11611 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11612 else
11613 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11614 return;
11615
11616 case 'O':
11617 /* Similar, but subtract 1 first. */
11618 if (GET_CODE (x) != PARALLEL)
1427100a 11619 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11620 else
11621 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11622 return;
11623
9854d9ed
RK
11624 case 'p':
11625 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11626 if (! INT_P (x)
2bfcf297 11627 || INT_LOWPART (x) < 0
9854d9ed
RK
11628 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11629 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11630 else
11631 fprintf (file, "%d", i);
9854d9ed
RK
11632 return;
11633
9878760c
RK
11634 case 'P':
11635 /* The operand must be an indirect memory reference. The result
8bb418a3 11636 is the register name. */
9878760c
RK
11637 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11638 || REGNO (XEXP (x, 0)) >= 32)
11639 output_operand_lossage ("invalid %%P value");
e2c953b6 11640 else
fb5c67a7 11641 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11642 return;
11643
dfbdccdb
GK
11644 case 'q':
11645 /* This outputs the logical code corresponding to a boolean
11646 expression. The expression may have one or both operands
39a10a29 11647 negated (if one, only the first one). For condition register
c4ad648e
AM
11648 logical operations, it will also treat the negated
11649 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11650 {
63bc1d05 11651 const char *const *t = 0;
dfbdccdb
GK
11652 const char *s;
11653 enum rtx_code code = GET_CODE (x);
11654 static const char * const tbl[3][3] = {
11655 { "and", "andc", "nor" },
11656 { "or", "orc", "nand" },
11657 { "xor", "eqv", "xor" } };
11658
11659 if (code == AND)
11660 t = tbl[0];
11661 else if (code == IOR)
11662 t = tbl[1];
11663 else if (code == XOR)
11664 t = tbl[2];
11665 else
11666 output_operand_lossage ("invalid %%q value");
11667
11668 if (GET_CODE (XEXP (x, 0)) != NOT)
11669 s = t[0];
11670 else
11671 {
11672 if (GET_CODE (XEXP (x, 1)) == NOT)
11673 s = t[2];
11674 else
11675 s = t[1];
11676 }
f676971a 11677
dfbdccdb
GK
11678 fputs (s, file);
11679 }
11680 return;
11681
2c4a9cff
DE
11682 case 'Q':
11683 if (TARGET_MFCRF)
3b6ce0af 11684 fputc (',', file);
5efb1046 11685 /* FALLTHRU */
2c4a9cff
DE
11686 else
11687 return;
11688
9854d9ed
RK
11689 case 'R':
11690 /* X is a CR register. Print the mask for `mtcrf'. */
11691 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11692 output_operand_lossage ("invalid %%R value");
11693 else
9ebbca7d 11694 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11695 return;
9854d9ed
RK
11696
11697 case 's':
11698 /* Low 5 bits of 32 - value */
11699 if (! INT_P (x))
11700 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11701 else
11702 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11703 return;
9854d9ed 11704
a260abc9 11705 case 'S':
0ba1b2ff 11706 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11707 CONST_INT 32-bit mask is considered sign-extended so any
11708 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11709 if (! mask64_operand (x, DImode))
a260abc9
DE
11710 output_operand_lossage ("invalid %%S value");
11711
0ba1b2ff 11712 uval = INT_LOWPART (x);
a260abc9 11713
0ba1b2ff 11714 if (uval & 1) /* Clear Left */
a260abc9 11715 {
f099d360
GK
11716#if HOST_BITS_PER_WIDE_INT > 64
11717 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11718#endif
0ba1b2ff 11719 i = 64;
a260abc9 11720 }
0ba1b2ff 11721 else /* Clear Right */
a260abc9 11722 {
0ba1b2ff 11723 uval = ~uval;
f099d360
GK
11724#if HOST_BITS_PER_WIDE_INT > 64
11725 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11726#endif
0ba1b2ff 11727 i = 63;
a260abc9 11728 }
0ba1b2ff
AM
11729 while (uval != 0)
11730 --i, uval >>= 1;
37409796 11731 gcc_assert (i >= 0);
0ba1b2ff
AM
11732 fprintf (file, "%d", i);
11733 return;
a260abc9 11734
a3170dc6
AH
11735 case 't':
11736 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11737 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11738
11739 /* Bit 3 is OV bit. */
11740 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11741
11742 /* If we want bit 31, write a shift count of zero, not 32. */
11743 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11744 return;
11745
cccf3bdc
DE
11746 case 'T':
11747 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11748 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11749 && REGNO (x) != CTR_REGNO))
cccf3bdc 11750 output_operand_lossage ("invalid %%T value");
1de43f85 11751 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11752 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11753 else
11754 fputs ("ctr", file);
11755 return;
11756
9854d9ed 11757 case 'u':
802a0058 11758 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11759 if (! INT_P (x))
11760 output_operand_lossage ("invalid %%u value");
e2c953b6 11761 else
f676971a 11762 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11763 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11764 return;
11765
802a0058
MM
11766 case 'v':
11767 /* High-order 16 bits of constant for use in signed operand. */
11768 if (! INT_P (x))
11769 output_operand_lossage ("invalid %%v value");
e2c953b6 11770 else
134c32f6
DE
11771 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11772 (INT_LOWPART (x) >> 16) & 0xffff);
11773 return;
802a0058 11774
9854d9ed
RK
11775 case 'U':
11776 /* Print `u' if this has an auto-increment or auto-decrement. */
11777 if (GET_CODE (x) == MEM
11778 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11779 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11780 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11781 putc ('u', file);
9854d9ed 11782 return;
9878760c 11783
e0cd0770
JC
11784 case 'V':
11785 /* Print the trap code for this operand. */
11786 switch (GET_CODE (x))
11787 {
11788 case EQ:
11789 fputs ("eq", file); /* 4 */
11790 break;
11791 case NE:
11792 fputs ("ne", file); /* 24 */
11793 break;
11794 case LT:
11795 fputs ("lt", file); /* 16 */
11796 break;
11797 case LE:
11798 fputs ("le", file); /* 20 */
11799 break;
11800 case GT:
11801 fputs ("gt", file); /* 8 */
11802 break;
11803 case GE:
11804 fputs ("ge", file); /* 12 */
11805 break;
11806 case LTU:
11807 fputs ("llt", file); /* 2 */
11808 break;
11809 case LEU:
11810 fputs ("lle", file); /* 6 */
11811 break;
11812 case GTU:
11813 fputs ("lgt", file); /* 1 */
11814 break;
11815 case GEU:
11816 fputs ("lge", file); /* 5 */
11817 break;
11818 default:
37409796 11819 gcc_unreachable ();
e0cd0770
JC
11820 }
11821 break;
11822
9854d9ed
RK
11823 case 'w':
11824 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11825 normally. */
11826 if (INT_P (x))
f676971a 11827 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11828 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11829 else
11830 print_operand (file, x, 0);
9878760c
RK
11831 return;
11832
9854d9ed 11833 case 'W':
e2c953b6 11834 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11835 val = (GET_CODE (x) == CONST_INT
11836 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11837
11838 if (val < 0)
11839 i = -1;
9854d9ed 11840 else
e2c953b6
DE
11841 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11842 if ((val <<= 1) < 0)
11843 break;
11844
11845#if HOST_BITS_PER_WIDE_INT == 32
11846 if (GET_CODE (x) == CONST_INT && i >= 0)
11847 i += 32; /* zero-extend high-part was all 0's */
11848 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11849 {
11850 val = CONST_DOUBLE_LOW (x);
11851
37409796
NS
11852 gcc_assert (val);
11853 if (val < 0)
e2c953b6
DE
11854 --i;
11855 else
11856 for ( ; i < 64; i++)
11857 if ((val <<= 1) < 0)
11858 break;
11859 }
11860#endif
11861
11862 fprintf (file, "%d", i + 1);
9854d9ed 11863 return;
9878760c 11864
9854d9ed
RK
11865 case 'X':
11866 if (GET_CODE (x) == MEM
6fb5fa3c
DB
11867 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
11868 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
11869 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 11870 putc ('x', file);
9854d9ed 11871 return;
9878760c 11872
9854d9ed
RK
11873 case 'Y':
11874 /* Like 'L', for third word of TImode */
11875 if (GET_CODE (x) == REG)
fb5c67a7 11876 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11877 else if (GET_CODE (x) == MEM)
9878760c 11878 {
9854d9ed
RK
11879 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11880 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11881 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
11882 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11883 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11884 else
d7624dc0 11885 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11886 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11887 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11888 reg_names[SMALL_DATA_REG]);
9878760c
RK
11889 }
11890 return;
f676971a 11891
9878760c 11892 case 'z':
b4ac57ab
RS
11893 /* X is a SYMBOL_REF. Write out the name preceded by a
11894 period and without any trailing data in brackets. Used for function
4d30c363
MM
11895 names. If we are configured for System V (or the embedded ABI) on
11896 the PowerPC, do not emit the period, since those systems do not use
11897 TOCs and the like. */
37409796 11898 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11899
c4ad648e
AM
11900 /* Mark the decl as referenced so that cgraph will output the
11901 function. */
9bf6462a 11902 if (SYMBOL_REF_DECL (x))
c4ad648e 11903 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11904
85b776df 11905 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11906 if (TARGET_MACHO)
11907 {
11908 const char *name = XSTR (x, 0);
a031e781 11909#if TARGET_MACHO
3b48085e 11910 if (MACHOPIC_INDIRECT
11abc112
MM
11911 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11912 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11913#endif
11914 assemble_name (file, name);
11915 }
85b776df 11916 else if (!DOT_SYMBOLS)
9739c90c 11917 assemble_name (file, XSTR (x, 0));
85b776df
AM
11918 else
11919 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11920 return;
11921
9854d9ed
RK
11922 case 'Z':
11923 /* Like 'L', for last word of TImode. */
11924 if (GET_CODE (x) == REG)
fb5c67a7 11925 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11926 else if (GET_CODE (x) == MEM)
11927 {
11928 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11929 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11930 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
11931 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11932 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11933 else
d7624dc0 11934 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11935 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11936 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11937 reg_names[SMALL_DATA_REG]);
9854d9ed 11938 }
5c23c401 11939 return;
0ac081f6 11940
a3170dc6 11941 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11942 case 'y':
11943 {
11944 rtx tmp;
11945
37409796 11946 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11947
11948 tmp = XEXP (x, 0);
11949
90d3ff1c 11950 /* Ugly hack because %y is overloaded. */
8ef65e3d 11951 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11952 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11953 || GET_MODE (x) == TFmode
11954 || GET_MODE (x) == TImode))
a3170dc6
AH
11955 {
11956 /* Handle [reg]. */
11957 if (GET_CODE (tmp) == REG)
11958 {
11959 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11960 break;
11961 }
11962 /* Handle [reg+UIMM]. */
11963 else if (GET_CODE (tmp) == PLUS &&
11964 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11965 {
11966 int x;
11967
37409796 11968 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11969
11970 x = INTVAL (XEXP (tmp, 1));
11971 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11972 break;
11973 }
11974
11975 /* Fall through. Must be [reg+reg]. */
11976 }
850e8d3d
DN
11977 if (TARGET_ALTIVEC
11978 && GET_CODE (tmp) == AND
11979 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11980 && INTVAL (XEXP (tmp, 1)) == -16)
11981 tmp = XEXP (tmp, 0);
0ac081f6 11982 if (GET_CODE (tmp) == REG)
c62f2db5 11983 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11984 else
0ac081f6 11985 {
37409796 11986 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11987 && REG_P (XEXP (tmp, 0))
11988 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11989
0ac081f6
AH
11990 if (REGNO (XEXP (tmp, 0)) == 0)
11991 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11992 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11993 else
11994 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11995 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11996 }
0ac081f6
AH
11997 break;
11998 }
f676971a 11999
9878760c
RK
12000 case 0:
12001 if (GET_CODE (x) == REG)
12002 fprintf (file, "%s", reg_names[REGNO (x)]);
12003 else if (GET_CODE (x) == MEM)
12004 {
12005 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12006 know the width from the mode. */
12007 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12008 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12009 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12010 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12011 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12012 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12013 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12014 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12015 else
a54d04b7 12016 output_address (XEXP (x, 0));
9878760c
RK
12017 }
12018 else
a54d04b7 12019 output_addr_const (file, x);
a85d226b 12020 return;
9878760c 12021
c4501e62
JJ
12022 case '&':
12023 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12024 return;
12025
9878760c
RK
12026 default:
12027 output_operand_lossage ("invalid %%xn code");
12028 }
12029}
12030\f
12031/* Print the address of an operand. */
12032
12033void
a2369ed3 12034print_operand_address (FILE *file, rtx x)
9878760c
RK
12035{
12036 if (GET_CODE (x) == REG)
4697a36c 12037 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12038 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12039 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12040 {
12041 output_addr_const (file, x);
ba5e43aa 12042 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12043 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12044 reg_names[SMALL_DATA_REG]);
37409796
NS
12045 else
12046 gcc_assert (!TARGET_TOC);
9878760c
RK
12047 }
12048 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12049 {
9024f4b8 12050 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12051 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12052 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12053 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12054 else
4697a36c
MM
12055 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12056 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12057 }
12058 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12059 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12060 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12061#if TARGET_ELF
12062 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12063 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12064 {
12065 output_addr_const (file, XEXP (x, 1));
12066 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12067 }
c859cda6
DJ
12068#endif
12069#if TARGET_MACHO
12070 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12071 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12072 {
12073 fprintf (file, "lo16(");
12074 output_addr_const (file, XEXP (x, 1));
12075 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12076 }
3cb999d8 12077#endif
4d588c14 12078 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12079 {
2bfcf297 12080 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12081 {
2bfcf297
DB
12082 rtx contains_minus = XEXP (x, 1);
12083 rtx minus, symref;
12084 const char *name;
f676971a 12085
9ebbca7d 12086 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12087 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12088 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12089 contains_minus = XEXP (contains_minus, 0);
12090
2bfcf297
DB
12091 minus = XEXP (contains_minus, 0);
12092 symref = XEXP (minus, 0);
12093 XEXP (contains_minus, 0) = symref;
12094 if (TARGET_ELF)
12095 {
12096 char *newname;
12097
12098 name = XSTR (symref, 0);
12099 newname = alloca (strlen (name) + sizeof ("@toc"));
12100 strcpy (newname, name);
12101 strcat (newname, "@toc");
12102 XSTR (symref, 0) = newname;
12103 }
12104 output_addr_const (file, XEXP (x, 1));
12105 if (TARGET_ELF)
12106 XSTR (symref, 0) = name;
9ebbca7d
GK
12107 XEXP (contains_minus, 0) = minus;
12108 }
12109 else
12110 output_addr_const (file, XEXP (x, 1));
12111
12112 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12113 }
9878760c 12114 else
37409796 12115 gcc_unreachable ();
9878760c
RK
12116}
12117\f
88cad84b 12118/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12119 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12120 is defined. It also needs to handle DI-mode objects on 64-bit
12121 targets. */
12122
12123static bool
a2369ed3 12124rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12125{
f4f4921e 12126#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12127 /* Special handling for SI values. */
84dcde01 12128 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12129 {
301d03af 12130 static int recurse = 0;
f676971a 12131
301d03af
RS
12132 /* For -mrelocatable, we mark all addresses that need to be fixed up
12133 in the .fixup section. */
12134 if (TARGET_RELOCATABLE
d6b5193b
RS
12135 && in_section != toc_section
12136 && in_section != text_section
4325ca90 12137 && !unlikely_text_section_p (in_section)
301d03af
RS
12138 && !recurse
12139 && GET_CODE (x) != CONST_INT
12140 && GET_CODE (x) != CONST_DOUBLE
12141 && CONSTANT_P (x))
12142 {
12143 char buf[256];
12144
12145 recurse = 1;
12146 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12147 fixuplabelno++;
12148 ASM_OUTPUT_LABEL (asm_out_file, buf);
12149 fprintf (asm_out_file, "\t.long\t(");
12150 output_addr_const (asm_out_file, x);
12151 fprintf (asm_out_file, ")@fixup\n");
12152 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12153 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12154 fprintf (asm_out_file, "\t.long\t");
12155 assemble_name (asm_out_file, buf);
12156 fprintf (asm_out_file, "\n\t.previous\n");
12157 recurse = 0;
12158 return true;
12159 }
12160 /* Remove initial .'s to turn a -mcall-aixdesc function
12161 address into the address of the descriptor, not the function
12162 itself. */
12163 else if (GET_CODE (x) == SYMBOL_REF
12164 && XSTR (x, 0)[0] == '.'
12165 && DEFAULT_ABI == ABI_AIX)
12166 {
12167 const char *name = XSTR (x, 0);
12168 while (*name == '.')
12169 name++;
12170
12171 fprintf (asm_out_file, "\t.long\t%s\n", name);
12172 return true;
12173 }
12174 }
f4f4921e 12175#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12176 return default_assemble_integer (x, size, aligned_p);
12177}
93638d7a
AM
12178
12179#ifdef HAVE_GAS_HIDDEN
12180/* Emit an assembler directive to set symbol visibility for DECL to
12181 VISIBILITY_TYPE. */
12182
5add3202 12183static void
a2369ed3 12184rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12185{
93638d7a
AM
12186 /* Functions need to have their entry point symbol visibility set as
12187 well as their descriptor symbol visibility. */
85b776df
AM
12188 if (DEFAULT_ABI == ABI_AIX
12189 && DOT_SYMBOLS
12190 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12191 {
25fdb4dc 12192 static const char * const visibility_types[] = {
c4ad648e 12193 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12194 };
12195
12196 const char *name, *type;
93638d7a
AM
12197
12198 name = ((* targetm.strip_name_encoding)
12199 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12200 type = visibility_types[vis];
93638d7a 12201
25fdb4dc
RH
12202 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12203 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12204 }
25fdb4dc
RH
12205 else
12206 default_assemble_visibility (decl, vis);
93638d7a
AM
12207}
12208#endif
301d03af 12209\f
39a10a29 12210enum rtx_code
a2369ed3 12211rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12212{
12213 /* Reversal of FP compares takes care -- an ordered compare
12214 becomes an unordered compare and vice versa. */
f676971a 12215 if (mode == CCFPmode
bc9ec0e0
GK
12216 && (!flag_finite_math_only
12217 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12218 || code == UNEQ || code == LTGT))
bab6226b 12219 return reverse_condition_maybe_unordered (code);
39a10a29 12220 else
bab6226b 12221 return reverse_condition (code);
39a10a29
GK
12222}
12223
39a10a29
GK
12224/* Generate a compare for CODE. Return a brand-new rtx that
12225 represents the result of the compare. */
a4f6c312 12226
39a10a29 12227static rtx
a2369ed3 12228rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12229{
12230 enum machine_mode comp_mode;
12231 rtx compare_result;
12232
12233 if (rs6000_compare_fp_p)
12234 comp_mode = CCFPmode;
12235 else if (code == GTU || code == LTU
c4ad648e 12236 || code == GEU || code == LEU)
39a10a29 12237 comp_mode = CCUNSmode;
60934f9c
NS
12238 else if ((code == EQ || code == NE)
12239 && GET_CODE (rs6000_compare_op0) == SUBREG
12240 && GET_CODE (rs6000_compare_op1) == SUBREG
12241 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12242 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12243 /* These are unsigned values, perhaps there will be a later
12244 ordering compare that can be shared with this one.
12245 Unfortunately we cannot detect the signedness of the operands
12246 for non-subregs. */
12247 comp_mode = CCUNSmode;
39a10a29
GK
12248 else
12249 comp_mode = CCmode;
12250
12251 /* First, the compare. */
12252 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12253
cef6b86c 12254 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12255 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12256 && rs6000_compare_fp_p)
a3170dc6 12257 {
64022b5d 12258 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12259 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12260
12261 if (op_mode == VOIDmode)
12262 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12263
cef6b86c
EB
12264 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12265 This explains the following mess. */
423c1189 12266
a3170dc6
AH
12267 switch (code)
12268 {
423c1189 12269 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12270 switch (op_mode)
12271 {
12272 case SFmode:
12273 cmp = flag_unsafe_math_optimizations
12274 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12275 rs6000_compare_op1)
12276 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12277 rs6000_compare_op1);
12278 break;
12279
12280 case DFmode:
12281 cmp = flag_unsafe_math_optimizations
12282 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12283 rs6000_compare_op1)
12284 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12285 rs6000_compare_op1);
12286 break;
12287
17caeff2
JM
12288 case TFmode:
12289 cmp = flag_unsafe_math_optimizations
12290 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12291 rs6000_compare_op1)
12292 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12293 rs6000_compare_op1);
12294 break;
12295
37409796
NS
12296 default:
12297 gcc_unreachable ();
12298 }
a3170dc6 12299 break;
bb8df8a6 12300
423c1189 12301 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12302 switch (op_mode)
12303 {
12304 case SFmode:
12305 cmp = flag_unsafe_math_optimizations
12306 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12307 rs6000_compare_op1)
12308 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12309 rs6000_compare_op1);
12310 break;
bb8df8a6 12311
37409796
NS
12312 case DFmode:
12313 cmp = flag_unsafe_math_optimizations
12314 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12315 rs6000_compare_op1)
12316 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12317 rs6000_compare_op1);
12318 break;
12319
17caeff2
JM
12320 case TFmode:
12321 cmp = flag_unsafe_math_optimizations
12322 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12323 rs6000_compare_op1)
12324 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12325 rs6000_compare_op1);
12326 break;
12327
37409796
NS
12328 default:
12329 gcc_unreachable ();
12330 }
a3170dc6 12331 break;
bb8df8a6 12332
423c1189 12333 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12334 switch (op_mode)
12335 {
12336 case SFmode:
12337 cmp = flag_unsafe_math_optimizations
12338 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12339 rs6000_compare_op1)
12340 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12341 rs6000_compare_op1);
12342 break;
bb8df8a6 12343
37409796
NS
12344 case DFmode:
12345 cmp = flag_unsafe_math_optimizations
12346 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12347 rs6000_compare_op1)
12348 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12349 rs6000_compare_op1);
12350 break;
12351
17caeff2
JM
12352 case TFmode:
12353 cmp = flag_unsafe_math_optimizations
12354 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12355 rs6000_compare_op1)
12356 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12357 rs6000_compare_op1);
12358 break;
12359
37409796
NS
12360 default:
12361 gcc_unreachable ();
12362 }
a3170dc6 12363 break;
4d4cbc0e 12364 default:
37409796 12365 gcc_unreachable ();
a3170dc6
AH
12366 }
12367
12368 /* Synthesize LE and GE from LT/GT || EQ. */
12369 if (code == LE || code == GE || code == LEU || code == GEU)
12370 {
a3170dc6
AH
12371 emit_insn (cmp);
12372
12373 switch (code)
12374 {
12375 case LE: code = LT; break;
12376 case GE: code = GT; break;
12377 case LEU: code = LT; break;
12378 case GEU: code = GT; break;
37409796 12379 default: gcc_unreachable ();
a3170dc6
AH
12380 }
12381
a3170dc6
AH
12382 compare_result2 = gen_reg_rtx (CCFPmode);
12383
12384 /* Do the EQ. */
37409796
NS
12385 switch (op_mode)
12386 {
12387 case SFmode:
12388 cmp = flag_unsafe_math_optimizations
12389 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12390 rs6000_compare_op1)
12391 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12392 rs6000_compare_op1);
12393 break;
12394
12395 case DFmode:
12396 cmp = flag_unsafe_math_optimizations
12397 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12398 rs6000_compare_op1)
12399 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12400 rs6000_compare_op1);
12401 break;
12402
17caeff2
JM
12403 case TFmode:
12404 cmp = flag_unsafe_math_optimizations
12405 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12406 rs6000_compare_op1)
12407 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12408 rs6000_compare_op1);
12409 break;
12410
37409796
NS
12411 default:
12412 gcc_unreachable ();
12413 }
a3170dc6
AH
12414 emit_insn (cmp);
12415
a3170dc6 12416 /* OR them together. */
64022b5d
AH
12417 or_result = gen_reg_rtx (CCFPmode);
12418 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12419 compare_result2);
a3170dc6
AH
12420 compare_result = or_result;
12421 code = EQ;
12422 }
12423 else
12424 {
a3170dc6 12425 if (code == NE || code == LTGT)
a3170dc6 12426 code = NE;
423c1189
AH
12427 else
12428 code = EQ;
a3170dc6
AH
12429 }
12430
12431 emit_insn (cmp);
12432 }
12433 else
de17c25f
DE
12434 {
12435 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12436 CLOBBERs to match cmptf_internal2 pattern. */
12437 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12438 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12439 && !TARGET_IEEEQUAD
de17c25f
DE
12440 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12441 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12442 gen_rtvec (9,
12443 gen_rtx_SET (VOIDmode,
12444 compare_result,
12445 gen_rtx_COMPARE (comp_mode,
12446 rs6000_compare_op0,
12447 rs6000_compare_op1)),
12448 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12449 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12450 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12451 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12452 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12453 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12454 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12455 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12456 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12457 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12458 {
12459 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12460 comp_mode = CCEQmode;
12461 compare_result = gen_reg_rtx (CCEQmode);
12462 if (TARGET_64BIT)
12463 emit_insn (gen_stack_protect_testdi (compare_result,
12464 rs6000_compare_op0, op1));
12465 else
12466 emit_insn (gen_stack_protect_testsi (compare_result,
12467 rs6000_compare_op0, op1));
12468 }
de17c25f
DE
12469 else
12470 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12471 gen_rtx_COMPARE (comp_mode,
12472 rs6000_compare_op0,
12473 rs6000_compare_op1)));
12474 }
f676971a 12475
ca5adc63 12476 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12477 under flag_finite_math_only we don't bother. */
39a10a29 12478 if (rs6000_compare_fp_p
e7108df9 12479 && !flag_finite_math_only
8ef65e3d 12480 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12481 && (code == LE || code == GE
12482 || code == UNEQ || code == LTGT
12483 || code == UNGT || code == UNLT))
12484 {
12485 enum rtx_code or1, or2;
12486 rtx or1_rtx, or2_rtx, compare2_rtx;
12487 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12488
39a10a29
GK
12489 switch (code)
12490 {
12491 case LE: or1 = LT; or2 = EQ; break;
12492 case GE: or1 = GT; or2 = EQ; break;
12493 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12494 case LTGT: or1 = LT; or2 = GT; break;
12495 case UNGT: or1 = UNORDERED; or2 = GT; break;
12496 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12497 default: gcc_unreachable ();
39a10a29
GK
12498 }
12499 validate_condition_mode (or1, comp_mode);
12500 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12501 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12502 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12503 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12504 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12505 const_true_rtx);
12506 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12507
12508 compare_result = or_result;
12509 code = EQ;
12510 }
12511
12512 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12513
1c563bed 12514 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12515}
12516
12517
12518/* Emit the RTL for an sCOND pattern. */
12519
12520void
a2369ed3 12521rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12522{
12523 rtx condition_rtx;
12524 enum machine_mode op_mode;
b7053a3f 12525 enum rtx_code cond_code;
39a10a29
GK
12526
12527 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12528 cond_code = GET_CODE (condition_rtx);
12529
8ef65e3d 12530 if (rs6000_compare_fp_p
423c1189
AH
12531 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12532 {
12533 rtx t;
12534
12535 PUT_MODE (condition_rtx, SImode);
12536 t = XEXP (condition_rtx, 0);
12537
37409796 12538 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12539
12540 if (cond_code == NE)
64022b5d 12541 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12542
64022b5d 12543 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12544 return;
12545 }
12546
b7053a3f
GK
12547 if (cond_code == NE
12548 || cond_code == GE || cond_code == LE
12549 || cond_code == GEU || cond_code == LEU
12550 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12551 {
12552 rtx not_result = gen_reg_rtx (CCEQmode);
12553 rtx not_op, rev_cond_rtx;
12554 enum machine_mode cc_mode;
f676971a 12555
b7053a3f
GK
12556 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12557
1c563bed 12558 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12559 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12560 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12561 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12562 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12563 }
39a10a29
GK
12564
12565 op_mode = GET_MODE (rs6000_compare_op0);
12566 if (op_mode == VOIDmode)
12567 op_mode = GET_MODE (rs6000_compare_op1);
12568
12569 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12570 {
12571 PUT_MODE (condition_rtx, DImode);
12572 convert_move (result, condition_rtx, 0);
12573 }
12574 else
12575 {
12576 PUT_MODE (condition_rtx, SImode);
12577 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12578 }
12579}
12580
39a10a29
GK
12581/* Emit a branch of kind CODE to location LOC. */
12582
12583void
a2369ed3 12584rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12585{
12586 rtx condition_rtx, loc_ref;
12587
12588 condition_rtx = rs6000_generate_compare (code);
12589 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12590 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12591 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12592 loc_ref, pc_rtx)));
12593}
12594
12a4e8c5
GK
12595/* Return the string to output a conditional branch to LABEL, which is
12596 the operand number of the label, or -1 if the branch is really a
f676971a 12597 conditional return.
12a4e8c5
GK
12598
12599 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12600 condition code register and its mode specifies what kind of
12601 comparison we made.
12602
a0ab749a 12603 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12604
12605 INSN is the insn. */
12606
12607char *
a2369ed3 12608output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12609{
12610 static char string[64];
12611 enum rtx_code code = GET_CODE (op);
12612 rtx cc_reg = XEXP (op, 0);
12613 enum machine_mode mode = GET_MODE (cc_reg);
12614 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12615 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12616 int really_reversed = reversed ^ need_longbranch;
12617 char *s = string;
12618 const char *ccode;
12619 const char *pred;
12620 rtx note;
12621
39a10a29
GK
12622 validate_condition_mode (code, mode);
12623
12624 /* Work out which way this really branches. We could use
12625 reverse_condition_maybe_unordered here always but this
12626 makes the resulting assembler clearer. */
12a4e8c5 12627 if (really_reversed)
de40e1df
DJ
12628 {
12629 /* Reversal of FP compares takes care -- an ordered compare
12630 becomes an unordered compare and vice versa. */
12631 if (mode == CCFPmode)
12632 code = reverse_condition_maybe_unordered (code);
12633 else
12634 code = reverse_condition (code);
12635 }
12a4e8c5 12636
8ef65e3d 12637 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12638 {
12639 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12640 to the GT bit. */
37409796
NS
12641 switch (code)
12642 {
12643 case EQ:
12644 /* Opposite of GT. */
12645 code = GT;
12646 break;
12647
12648 case NE:
12649 code = UNLE;
12650 break;
12651
12652 default:
12653 gcc_unreachable ();
12654 }
a3170dc6
AH
12655 }
12656
39a10a29 12657 switch (code)
12a4e8c5
GK
12658 {
12659 /* Not all of these are actually distinct opcodes, but
12660 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12661 case NE: case LTGT:
12662 ccode = "ne"; break;
12663 case EQ: case UNEQ:
12664 ccode = "eq"; break;
f676971a 12665 case GE: case GEU:
50a0b056 12666 ccode = "ge"; break;
f676971a 12667 case GT: case GTU: case UNGT:
50a0b056 12668 ccode = "gt"; break;
f676971a 12669 case LE: case LEU:
50a0b056 12670 ccode = "le"; break;
f676971a 12671 case LT: case LTU: case UNLT:
50a0b056 12672 ccode = "lt"; break;
12a4e8c5
GK
12673 case UNORDERED: ccode = "un"; break;
12674 case ORDERED: ccode = "nu"; break;
12675 case UNGE: ccode = "nl"; break;
12676 case UNLE: ccode = "ng"; break;
12677 default:
37409796 12678 gcc_unreachable ();
12a4e8c5 12679 }
f676971a
EC
12680
12681 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12682 The old mnemonics don't have a way to specify this information. */
f4857b9b 12683 pred = "";
12a4e8c5
GK
12684 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12685 if (note != NULL_RTX)
12686 {
12687 /* PROB is the difference from 50%. */
12688 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12689
12690 /* Only hint for highly probable/improbable branches on newer
12691 cpus as static prediction overrides processor dynamic
12692 prediction. For older cpus we may as well always hint, but
12693 assume not taken for branches that are very close to 50% as a
12694 mispredicted taken branch is more expensive than a
f676971a 12695 mispredicted not-taken branch. */
ec507f2d 12696 if (rs6000_always_hint
2c9e13f3
JH
12697 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12698 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12699 {
12700 if (abs (prob) > REG_BR_PROB_BASE / 20
12701 && ((prob > 0) ^ need_longbranch))
c4ad648e 12702 pred = "+";
f4857b9b
AM
12703 else
12704 pred = "-";
12705 }
12a4e8c5 12706 }
12a4e8c5
GK
12707
12708 if (label == NULL)
94a54f47 12709 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12710 else
94a54f47 12711 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12712
37c67319 12713 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12714 Assume they'd only be the first character.... */
37c67319
GK
12715 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12716 *s++ = '%';
94a54f47 12717 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12718
12719 if (label != NULL)
12720 {
12721 /* If the branch distance was too far, we may have to use an
12722 unconditional branch to go the distance. */
12723 if (need_longbranch)
44518ddd 12724 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12725 else
12726 s += sprintf (s, ",%s", label);
12727 }
12728
12729 return string;
12730}
50a0b056 12731
64022b5d 12732/* Return the string to flip the GT bit on a CR. */
423c1189 12733char *
64022b5d 12734output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12735{
12736 static char string[64];
12737 int a, b;
12738
37409796
NS
12739 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12740 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12741
64022b5d
AH
12742 /* GT bit. */
12743 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12744 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12745
12746 sprintf (string, "crnot %d,%d", a, b);
12747 return string;
12748}
12749
21213b4c
DP
12750/* Return insn index for the vector compare instruction for given CODE,
12751 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12752 not available. */
12753
12754static int
94ff898d 12755get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12756 enum machine_mode dest_mode,
12757 enum machine_mode op_mode)
12758{
12759 if (!TARGET_ALTIVEC)
12760 return INSN_NOT_AVAILABLE;
12761
12762 switch (code)
12763 {
12764 case EQ:
12765 if (dest_mode == V16QImode && op_mode == V16QImode)
12766 return UNSPEC_VCMPEQUB;
12767 if (dest_mode == V8HImode && op_mode == V8HImode)
12768 return UNSPEC_VCMPEQUH;
12769 if (dest_mode == V4SImode && op_mode == V4SImode)
12770 return UNSPEC_VCMPEQUW;
12771 if (dest_mode == V4SImode && op_mode == V4SFmode)
12772 return UNSPEC_VCMPEQFP;
12773 break;
12774 case GE:
12775 if (dest_mode == V4SImode && op_mode == V4SFmode)
12776 return UNSPEC_VCMPGEFP;
12777 case GT:
12778 if (dest_mode == V16QImode && op_mode == V16QImode)
12779 return UNSPEC_VCMPGTSB;
12780 if (dest_mode == V8HImode && op_mode == V8HImode)
12781 return UNSPEC_VCMPGTSH;
12782 if (dest_mode == V4SImode && op_mode == V4SImode)
12783 return UNSPEC_VCMPGTSW;
12784 if (dest_mode == V4SImode && op_mode == V4SFmode)
12785 return UNSPEC_VCMPGTFP;
12786 break;
12787 case GTU:
12788 if (dest_mode == V16QImode && op_mode == V16QImode)
12789 return UNSPEC_VCMPGTUB;
12790 if (dest_mode == V8HImode && op_mode == V8HImode)
12791 return UNSPEC_VCMPGTUH;
12792 if (dest_mode == V4SImode && op_mode == V4SImode)
12793 return UNSPEC_VCMPGTUW;
12794 break;
12795 default:
12796 break;
12797 }
12798 return INSN_NOT_AVAILABLE;
12799}
12800
12801/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12802 DMODE is expected destination mode. This is a recursive function. */
12803
12804static rtx
12805rs6000_emit_vector_compare (enum rtx_code rcode,
12806 rtx op0, rtx op1,
12807 enum machine_mode dmode)
12808{
12809 int vec_cmp_insn;
12810 rtx mask;
12811 enum machine_mode dest_mode;
12812 enum machine_mode op_mode = GET_MODE (op1);
12813
37409796
NS
12814 gcc_assert (TARGET_ALTIVEC);
12815 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12816
12817 /* Floating point vector compare instructions uses destination V4SImode.
12818 Move destination to appropriate mode later. */
12819 if (dmode == V4SFmode)
12820 dest_mode = V4SImode;
12821 else
12822 dest_mode = dmode;
12823
12824 mask = gen_reg_rtx (dest_mode);
12825 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12826
12827 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12828 {
12829 bool swap_operands = false;
12830 bool try_again = false;
12831 switch (rcode)
12832 {
12833 case LT:
12834 rcode = GT;
12835 swap_operands = true;
12836 try_again = true;
12837 break;
12838 case LTU:
12839 rcode = GTU;
12840 swap_operands = true;
12841 try_again = true;
12842 break;
12843 case NE:
370df7db
JC
12844 case UNLE:
12845 case UNLT:
12846 case UNGE:
12847 case UNGT:
12848 /* Invert condition and try again.
12849 e.g., A != B becomes ~(A==B). */
21213b4c 12850 {
370df7db 12851 enum rtx_code rev_code;
21213b4c 12852 enum insn_code nor_code;
d1123cde 12853 rtx eq_rtx;
370df7db
JC
12854
12855 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12856 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12857 dest_mode);
94ff898d 12858
166cdb08 12859 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 12860 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12861 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12862
12863 if (dmode != dest_mode)
12864 {
12865 rtx temp = gen_reg_rtx (dest_mode);
12866 convert_move (temp, mask, 0);
12867 return temp;
12868 }
12869 return mask;
12870 }
12871 break;
12872 case GE:
12873 case GEU:
12874 case LE:
12875 case LEU:
12876 /* Try GT/GTU/LT/LTU OR EQ */
12877 {
12878 rtx c_rtx, eq_rtx;
12879 enum insn_code ior_code;
12880 enum rtx_code new_code;
12881
37409796
NS
12882 switch (rcode)
12883 {
12884 case GE:
12885 new_code = GT;
12886 break;
12887
12888 case GEU:
12889 new_code = GTU;
12890 break;
12891
12892 case LE:
12893 new_code = LT;
12894 break;
12895
12896 case LEU:
12897 new_code = LTU;
12898 break;
12899
12900 default:
12901 gcc_unreachable ();
12902 }
21213b4c
DP
12903
12904 c_rtx = rs6000_emit_vector_compare (new_code,
12905 op0, op1, dest_mode);
12906 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12907 dest_mode);
12908
166cdb08 12909 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 12910 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12911 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12912 if (dmode != dest_mode)
12913 {
12914 rtx temp = gen_reg_rtx (dest_mode);
12915 convert_move (temp, mask, 0);
12916 return temp;
12917 }
12918 return mask;
12919 }
12920 break;
12921 default:
37409796 12922 gcc_unreachable ();
21213b4c
DP
12923 }
12924
12925 if (try_again)
12926 {
12927 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12928 /* You only get two chances. */
12929 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12930 }
12931
12932 if (swap_operands)
12933 {
12934 rtx tmp;
12935 tmp = op0;
12936 op0 = op1;
12937 op1 = tmp;
12938 }
12939 }
12940
915167f5
GK
12941 emit_insn (gen_rtx_SET (VOIDmode, mask,
12942 gen_rtx_UNSPEC (dest_mode,
12943 gen_rtvec (2, op0, op1),
12944 vec_cmp_insn)));
21213b4c
DP
12945 if (dmode != dest_mode)
12946 {
12947 rtx temp = gen_reg_rtx (dest_mode);
12948 convert_move (temp, mask, 0);
12949 return temp;
12950 }
12951 return mask;
12952}
12953
12954/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12955 valid insn doesn exist for given mode. */
12956
12957static int
12958get_vsel_insn (enum machine_mode mode)
12959{
12960 switch (mode)
12961 {
12962 case V4SImode:
12963 return UNSPEC_VSEL4SI;
12964 break;
12965 case V4SFmode:
12966 return UNSPEC_VSEL4SF;
12967 break;
12968 case V8HImode:
12969 return UNSPEC_VSEL8HI;
12970 break;
12971 case V16QImode:
12972 return UNSPEC_VSEL16QI;
12973 break;
12974 default:
12975 return INSN_NOT_AVAILABLE;
12976 break;
12977 }
12978 return INSN_NOT_AVAILABLE;
12979}
12980
12981/* Emit vector select insn where DEST is destination using
12982 operands OP1, OP2 and MASK. */
12983
12984static void
12985rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12986{
12987 rtx t, temp;
12988 enum machine_mode dest_mode = GET_MODE (dest);
12989 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12990
12991 temp = gen_reg_rtx (dest_mode);
94ff898d 12992
bb8df8a6 12993 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12994 select op2. */
915167f5
GK
12995 t = gen_rtx_SET (VOIDmode, temp,
12996 gen_rtx_UNSPEC (dest_mode,
12997 gen_rtvec (3, op2, op1, mask),
12998 vsel_insn_index));
21213b4c
DP
12999 emit_insn (t);
13000 emit_move_insn (dest, temp);
13001 return;
13002}
13003
94ff898d 13004/* Emit vector conditional expression.
21213b4c
DP
13005 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13006 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13007
13008int
13009rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13010 rtx cond, rtx cc_op0, rtx cc_op1)
13011{
13012 enum machine_mode dest_mode = GET_MODE (dest);
13013 enum rtx_code rcode = GET_CODE (cond);
13014 rtx mask;
13015
13016 if (!TARGET_ALTIVEC)
13017 return 0;
13018
13019 /* Get the vector mask for the given relational operations. */
13020 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13021
13022 rs6000_emit_vector_select (dest, op1, op2, mask);
13023
13024 return 1;
13025}
13026
50a0b056
GK
13027/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13028 operands of the last comparison is nonzero/true, FALSE_COND if it
13029 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13030
50a0b056 13031int
a2369ed3 13032rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13033{
13034 enum rtx_code code = GET_CODE (op);
13035 rtx op0 = rs6000_compare_op0;
13036 rtx op1 = rs6000_compare_op1;
13037 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13038 enum machine_mode compare_mode = GET_MODE (op0);
13039 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13040 rtx temp;
add2402e 13041 bool is_against_zero;
50a0b056 13042
a3c9585f 13043 /* These modes should always match. */
a3170dc6
AH
13044 if (GET_MODE (op1) != compare_mode
13045 /* In the isel case however, we can use a compare immediate, so
13046 op1 may be a small constant. */
13047 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13048 return 0;
178c3eff 13049 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13050 return 0;
178c3eff 13051 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13052 return 0;
13053
50a0b056 13054 /* First, work out if the hardware can do this at all, or
a3c9585f 13055 if it's too slow.... */
50a0b056 13056 if (! rs6000_compare_fp_p)
a3170dc6
AH
13057 {
13058 if (TARGET_ISEL)
13059 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13060 return 0;
13061 }
8ef65e3d 13062 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13063 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13064 return 0;
50a0b056 13065
add2402e 13066 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13067
add2402e
GK
13068 /* A floating-point subtract might overflow, underflow, or produce
13069 an inexact result, thus changing the floating-point flags, so it
13070 can't be generated if we care about that. It's safe if one side
13071 of the construct is zero, since then no subtract will be
13072 generated. */
ebb109ad 13073 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13074 && flag_trapping_math && ! is_against_zero)
13075 return 0;
13076
50a0b056
GK
13077 /* Eliminate half of the comparisons by switching operands, this
13078 makes the remaining code simpler. */
13079 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13080 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13081 {
13082 code = reverse_condition_maybe_unordered (code);
13083 temp = true_cond;
13084 true_cond = false_cond;
13085 false_cond = temp;
13086 }
13087
13088 /* UNEQ and LTGT take four instructions for a comparison with zero,
13089 it'll probably be faster to use a branch here too. */
bc9ec0e0 13090 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13091 return 0;
f676971a 13092
50a0b056
GK
13093 if (GET_CODE (op1) == CONST_DOUBLE)
13094 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13095
b6d08ca1 13096 /* We're going to try to implement comparisons by performing
50a0b056
GK
13097 a subtract, then comparing against zero. Unfortunately,
13098 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13099 know that the operand is finite and the comparison
50a0b056 13100 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13101 if (HONOR_INFINITIES (compare_mode)
50a0b056 13102 && code != GT && code != UNGE
045572c7 13103 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13104 /* Constructs of the form (a OP b ? a : b) are safe. */
13105 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13106 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13107 && ! rtx_equal_p (op1, true_cond))))
13108 return 0;
add2402e 13109
50a0b056
GK
13110 /* At this point we know we can use fsel. */
13111
13112 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13113 if (! is_against_zero)
13114 {
13115 temp = gen_reg_rtx (compare_mode);
13116 emit_insn (gen_rtx_SET (VOIDmode, temp,
13117 gen_rtx_MINUS (compare_mode, op0, op1)));
13118 op0 = temp;
13119 op1 = CONST0_RTX (compare_mode);
13120 }
50a0b056
GK
13121
13122 /* If we don't care about NaNs we can reduce some of the comparisons
13123 down to faster ones. */
bc9ec0e0 13124 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13125 switch (code)
13126 {
13127 case GT:
13128 code = LE;
13129 temp = true_cond;
13130 true_cond = false_cond;
13131 false_cond = temp;
13132 break;
13133 case UNGE:
13134 code = GE;
13135 break;
13136 case UNEQ:
13137 code = EQ;
13138 break;
13139 default:
13140 break;
13141 }
13142
13143 /* Now, reduce everything down to a GE. */
13144 switch (code)
13145 {
13146 case GE:
13147 break;
13148
13149 case LE:
3148ad6d
DJ
13150 temp = gen_reg_rtx (compare_mode);
13151 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13152 op0 = temp;
13153 break;
13154
13155 case ORDERED:
3148ad6d
DJ
13156 temp = gen_reg_rtx (compare_mode);
13157 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13158 op0 = temp;
13159 break;
13160
13161 case EQ:
3148ad6d 13162 temp = gen_reg_rtx (compare_mode);
f676971a 13163 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13164 gen_rtx_NEG (compare_mode,
13165 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13166 op0 = temp;
13167 break;
13168
13169 case UNGE:
bc9ec0e0 13170 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13171 temp = gen_reg_rtx (result_mode);
50a0b056 13172 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13173 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13174 gen_rtx_GE (VOIDmode,
13175 op0, op1),
13176 true_cond, false_cond)));
bc9ec0e0
GK
13177 false_cond = true_cond;
13178 true_cond = temp;
50a0b056 13179
3148ad6d
DJ
13180 temp = gen_reg_rtx (compare_mode);
13181 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13182 op0 = temp;
13183 break;
13184
13185 case GT:
bc9ec0e0 13186 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13187 temp = gen_reg_rtx (result_mode);
50a0b056 13188 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13189 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13190 gen_rtx_GE (VOIDmode,
13191 op0, op1),
13192 true_cond, false_cond)));
bc9ec0e0
GK
13193 true_cond = false_cond;
13194 false_cond = temp;
50a0b056 13195
3148ad6d
DJ
13196 temp = gen_reg_rtx (compare_mode);
13197 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13198 op0 = temp;
13199 break;
13200
13201 default:
37409796 13202 gcc_unreachable ();
50a0b056
GK
13203 }
13204
13205 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13206 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13207 gen_rtx_GE (VOIDmode,
13208 op0, op1),
13209 true_cond, false_cond)));
13210 return 1;
13211}
13212
a3170dc6
AH
13213/* Same as above, but for ints (isel). */
13214
13215static int
a2369ed3 13216rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13217{
13218 rtx condition_rtx, cr;
13219
13220 /* All isel implementations thus far are 32-bits. */
13221 if (GET_MODE (rs6000_compare_op0) != SImode)
13222 return 0;
13223
13224 /* We still have to do the compare, because isel doesn't do a
13225 compare, it just looks at the CRx bits set by a previous compare
13226 instruction. */
13227 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13228 cr = XEXP (condition_rtx, 0);
13229
13230 if (GET_MODE (cr) == CCmode)
13231 emit_insn (gen_isel_signed (dest, condition_rtx,
13232 true_cond, false_cond, cr));
13233 else
13234 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13235 true_cond, false_cond, cr));
13236
13237 return 1;
13238}
13239
13240const char *
a2369ed3 13241output_isel (rtx *operands)
a3170dc6
AH
13242{
13243 enum rtx_code code;
13244
13245 code = GET_CODE (operands[1]);
13246 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13247 {
13248 PUT_CODE (operands[1], reverse_condition (code));
13249 return "isel %0,%3,%2,%j1";
13250 }
13251 else
13252 return "isel %0,%2,%3,%j1";
13253}
13254
50a0b056 13255void
a2369ed3 13256rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13257{
13258 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13259 enum rtx_code c;
50a0b056 13260 rtx target;
5dc8d536
AH
13261
13262 if (code == SMAX || code == SMIN)
13263 c = GE;
13264 else
13265 c = GEU;
13266
50a0b056 13267 if (code == SMAX || code == UMAX)
f676971a 13268 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13269 op0, op1, mode, 0);
13270 else
f676971a 13271 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13272 op1, op0, mode, 0);
37409796 13273 gcc_assert (target);
50a0b056
GK
13274 if (target != dest)
13275 emit_move_insn (dest, target);
13276}
46c07df8 13277
915167f5
GK
13278/* Emit instructions to perform a load-reserved/store-conditional operation.
13279 The operation performed is an atomic
13280 (set M (CODE:MODE M OP))
13281 If not NULL, BEFORE is atomically set to M before the operation, and
13282 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13283 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13284 Either OP or M may be wrapped in a NOT operation. */
13285
13286void
13287rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13288 rtx m, rtx op, rtx before_param, rtx after_param,
13289 bool sync_p)
13290{
13291 enum machine_mode used_mode;
13292 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13293 rtx used_m;
13294 rtvec vec;
13295 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13296 rtx shift = NULL_RTX;
bb8df8a6 13297
915167f5
GK
13298 if (sync_p)
13299 emit_insn (gen_memory_barrier ());
bb8df8a6 13300
915167f5
GK
13301 if (GET_CODE (m) == NOT)
13302 used_m = XEXP (m, 0);
13303 else
13304 used_m = m;
13305
13306 /* If this is smaller than SImode, we'll have to use SImode with
13307 adjustments. */
13308 if (mode == QImode || mode == HImode)
13309 {
13310 rtx newop, oldop;
13311
13312 if (MEM_ALIGN (used_m) >= 32)
13313 {
13314 int ishift = 0;
13315 if (BYTES_BIG_ENDIAN)
13316 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13317
915167f5 13318 shift = GEN_INT (ishift);
c75c6d11 13319 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13320 }
13321 else
13322 {
13323 rtx addrSI, aligned_addr;
a9c9d3fa 13324 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13325
c75c6d11
JJ
13326 addrSI = gen_lowpart_common (SImode,
13327 force_reg (Pmode, XEXP (used_m, 0)));
13328 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13329 shift = gen_reg_rtx (SImode);
13330
13331 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13332 GEN_INT (shift_mask)));
13333 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13334
13335 aligned_addr = expand_binop (Pmode, and_optab,
13336 XEXP (used_m, 0),
13337 GEN_INT (-4), NULL_RTX,
13338 1, OPTAB_LIB_WIDEN);
13339 used_m = change_address (used_m, SImode, aligned_addr);
13340 set_mem_align (used_m, 32);
915167f5 13341 }
c75c6d11
JJ
13342 /* It's safe to keep the old alias set of USED_M, because
13343 the operation is atomic and only affects the original
13344 USED_M. */
13345 if (GET_CODE (m) == NOT)
13346 m = gen_rtx_NOT (SImode, used_m);
13347 else
13348 m = used_m;
915167f5
GK
13349
13350 if (GET_CODE (op) == NOT)
13351 {
13352 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13353 oldop = gen_rtx_NOT (SImode, oldop);
13354 }
13355 else
13356 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13357
915167f5
GK
13358 switch (code)
13359 {
13360 case IOR:
13361 case XOR:
13362 newop = expand_binop (SImode, and_optab,
13363 oldop, GEN_INT (imask), NULL_RTX,
13364 1, OPTAB_LIB_WIDEN);
13365 emit_insn (gen_ashlsi3 (newop, newop, shift));
13366 break;
13367
13368 case AND:
13369 newop = expand_binop (SImode, ior_optab,
13370 oldop, GEN_INT (~imask), NULL_RTX,
13371 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13372 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13373 break;
13374
13375 case PLUS:
9f0076e5 13376 case MINUS:
915167f5
GK
13377 {
13378 rtx mask;
bb8df8a6 13379
915167f5
GK
13380 newop = expand_binop (SImode, and_optab,
13381 oldop, GEN_INT (imask), NULL_RTX,
13382 1, OPTAB_LIB_WIDEN);
13383 emit_insn (gen_ashlsi3 (newop, newop, shift));
13384
13385 mask = gen_reg_rtx (SImode);
13386 emit_move_insn (mask, GEN_INT (imask));
13387 emit_insn (gen_ashlsi3 (mask, mask, shift));
13388
9f0076e5
DE
13389 if (code == PLUS)
13390 newop = gen_rtx_PLUS (SImode, m, newop);
13391 else
13392 newop = gen_rtx_MINUS (SImode, m, newop);
13393 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13394 newop = gen_rtx_IOR (SImode, newop,
13395 gen_rtx_AND (SImode,
13396 gen_rtx_NOT (SImode, mask),
13397 m));
13398 break;
13399 }
13400
13401 default:
13402 gcc_unreachable ();
13403 }
13404
a9c9d3fa
GK
13405 if (GET_CODE (m) == NOT)
13406 {
13407 rtx mask, xorm;
13408
13409 mask = gen_reg_rtx (SImode);
13410 emit_move_insn (mask, GEN_INT (imask));
13411 emit_insn (gen_ashlsi3 (mask, mask, shift));
13412
13413 xorm = gen_rtx_XOR (SImode, used_m, mask);
13414 /* Depending on the value of 'op', the XOR or the operation might
13415 be able to be simplified away. */
13416 newop = simplify_gen_binary (code, SImode, xorm, newop);
13417 }
915167f5
GK
13418 op = newop;
13419 used_mode = SImode;
13420 before = gen_reg_rtx (used_mode);
13421 after = gen_reg_rtx (used_mode);
13422 }
13423 else
13424 {
13425 used_mode = mode;
13426 before = before_param;
13427 after = after_param;
13428
13429 if (before == NULL_RTX)
13430 before = gen_reg_rtx (used_mode);
13431 if (after == NULL_RTX)
13432 after = gen_reg_rtx (used_mode);
13433 }
bb8df8a6 13434
9f0076e5
DE
13435 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13436 && used_mode != mode)
915167f5
GK
13437 the_op = op; /* Computed above. */
13438 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13439 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13440 else
13441 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13442
13443 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13444 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13445 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13446 gen_rtx_UNSPEC (used_mode,
13447 gen_rtvec (1, the_op),
13448 UNSPEC_SYNC_OP));
915167f5
GK
13449 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13450
9f0076e5 13451 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13452 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13453 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13454 else
13455 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13456 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13457
13458 /* Shift and mask the return values properly. */
13459 if (used_mode != mode && before_param)
13460 {
13461 emit_insn (gen_lshrsi3 (before, before, shift));
13462 convert_move (before_param, before, 1);
13463 }
13464
13465 if (used_mode != mode && after_param)
13466 {
13467 emit_insn (gen_lshrsi3 (after, after, shift));
13468 convert_move (after_param, after, 1);
13469 }
13470
13471 /* The previous sequence will end with a branch that's dependent on
13472 the conditional store, so placing an isync will ensure that no
13473 other instructions (especially, no load or store instructions)
13474 can start before the atomic operation completes. */
13475 if (sync_p)
13476 emit_insn (gen_isync ());
13477}
13478
b52110d4
DE
13479/* A subroutine of the atomic operation splitters. Jump to LABEL if
13480 COND is true. Mark the jump as unlikely to be taken. */
13481
13482static void
13483emit_unlikely_jump (rtx cond, rtx label)
13484{
13485 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13486 rtx x;
13487
13488 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13489 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13490 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13491}
13492
13493/* A subroutine of the atomic operation splitters. Emit a load-locked
13494 instruction in MODE. */
13495
13496static void
13497emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13498{
13499 rtx (*fn) (rtx, rtx) = NULL;
13500 if (mode == SImode)
13501 fn = gen_load_locked_si;
13502 else if (mode == DImode)
13503 fn = gen_load_locked_di;
13504 emit_insn (fn (reg, mem));
13505}
13506
13507/* A subroutine of the atomic operation splitters. Emit a store-conditional
13508 instruction in MODE. */
13509
13510static void
13511emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13512{
13513 rtx (*fn) (rtx, rtx, rtx) = NULL;
13514 if (mode == SImode)
13515 fn = gen_store_conditional_si;
13516 else if (mode == DImode)
13517 fn = gen_store_conditional_di;
13518
9f0076e5 13519 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13520 if (PPC405_ERRATUM77)
13521 emit_insn (gen_memory_barrier ());
13522
13523 emit_insn (fn (res, mem, val));
13524}
13525
ea2c620c 13526/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13527 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13528 operand of the binary operator. BEFORE and AFTER are optional locations to
13529 return the value of MEM either before of after the operation. SCRATCH is
13530 a scratch register. */
13531
13532void
13533rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13534 rtx before, rtx after, rtx scratch)
13535{
13536 enum machine_mode mode = GET_MODE (mem);
13537 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13538
13539 emit_insn (gen_memory_barrier ());
13540
13541 label = gen_label_rtx ();
13542 emit_label (label);
13543 label = gen_rtx_LABEL_REF (VOIDmode, label);
13544
13545 if (before == NULL_RTX)
13546 before = scratch;
13547 emit_load_locked (mode, before, mem);
13548
13549 if (code == NOT)
13550 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13551 else if (code == AND)
13552 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13553 else
13554 x = gen_rtx_fmt_ee (code, mode, before, val);
13555
13556 if (after != NULL_RTX)
13557 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13558 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13559
13560 emit_store_conditional (mode, cond, mem, scratch);
13561
13562 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13563 emit_unlikely_jump (x, label);
13564
13565 emit_insn (gen_isync ());
13566}
13567
b52110d4
DE
13568/* Expand an atomic compare and swap operation. MEM is the memory on which
13569 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13570 value to be stored. SCRATCH is a scratch GPR. */
13571
13572void
13573rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13574 rtx scratch)
13575{
13576 enum machine_mode mode = GET_MODE (mem);
13577 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13578
13579 emit_insn (gen_memory_barrier ());
13580
13581 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13582 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13583 emit_label (XEXP (label1, 0));
13584
13585 emit_load_locked (mode, retval, mem);
13586
13587 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13588 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13589
13590 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13591 emit_unlikely_jump (x, label2);
13592
13593 emit_move_insn (scratch, newval);
13594 emit_store_conditional (mode, cond, mem, scratch);
13595
13596 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13597 emit_unlikely_jump (x, label1);
13598
13599 emit_insn (gen_isync ());
13600 emit_label (XEXP (label2, 0));
13601}
13602
13603/* Expand an atomic test and set operation. MEM is the memory on which
13604 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13605
13606void
13607rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13608{
13609 enum machine_mode mode = GET_MODE (mem);
13610 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13611
13612 emit_insn (gen_memory_barrier ());
13613
13614 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13615 emit_label (XEXP (label, 0));
13616
13617 emit_load_locked (mode, retval, mem);
13618 emit_move_insn (scratch, val);
13619 emit_store_conditional (mode, cond, mem, scratch);
13620
13621 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13622 emit_unlikely_jump (x, label);
13623
13624 emit_insn (gen_isync ());
13625}
13626
9fc75b97
DE
13627void
13628rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13629{
13630 enum machine_mode mode = GET_MODE (mem);
13631 rtx addrSI, align, wdst, shift, mask;
13632 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13633 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13634
13635 /* Shift amount for subword relative to aligned word. */
13636 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13637 shift = gen_reg_rtx (SImode);
13638 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13639 GEN_INT (shift_mask)));
13640 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13641
13642 /* Shift and mask old value into position within word. */
13643 oldval = convert_modes (SImode, mode, oldval, 1);
13644 oldval = expand_binop (SImode, and_optab,
13645 oldval, GEN_INT (imask), NULL_RTX,
13646 1, OPTAB_LIB_WIDEN);
13647 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13648
13649 /* Shift and mask new value into position within word. */
13650 newval = convert_modes (SImode, mode, newval, 1);
13651 newval = expand_binop (SImode, and_optab,
13652 newval, GEN_INT (imask), NULL_RTX,
13653 1, OPTAB_LIB_WIDEN);
13654 emit_insn (gen_ashlsi3 (newval, newval, shift));
13655
13656 /* Mask for insertion. */
13657 mask = gen_reg_rtx (SImode);
13658 emit_move_insn (mask, GEN_INT (imask));
13659 emit_insn (gen_ashlsi3 (mask, mask, shift));
13660
13661 /* Address of aligned word containing subword. */
13662 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13663 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13664 mem = change_address (mem, SImode, align);
13665 set_mem_align (mem, 32);
13666 MEM_VOLATILE_P (mem) = 1;
13667
13668 wdst = gen_reg_rtx (SImode);
13669 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13670 oldval, newval, mem));
13671
13672 emit_move_insn (dst, gen_lowpart (mode, wdst));
13673}
13674
13675void
13676rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13677 rtx oldval, rtx newval, rtx mem,
13678 rtx scratch)
13679{
13680 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13681
13682 emit_insn (gen_memory_barrier ());
13683 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13684 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13685 emit_label (XEXP (label1, 0));
13686
13687 emit_load_locked (SImode, scratch, mem);
13688
13689 /* Mask subword within loaded value for comparison with oldval.
13690 Use UNSPEC_AND to avoid clobber.*/
13691 emit_insn (gen_rtx_SET (SImode, dest,
13692 gen_rtx_UNSPEC (SImode,
13693 gen_rtvec (2, scratch, mask),
13694 UNSPEC_AND)));
13695
13696 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13697 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13698
13699 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13700 emit_unlikely_jump (x, label2);
13701
13702 /* Clear subword within loaded value for insertion of new value. */
13703 emit_insn (gen_rtx_SET (SImode, scratch,
13704 gen_rtx_AND (SImode,
13705 gen_rtx_NOT (SImode, mask), scratch)));
13706 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13707 emit_store_conditional (SImode, cond, mem, scratch);
13708
13709 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13710 emit_unlikely_jump (x, label1);
13711
13712 emit_insn (gen_isync ());
13713 emit_label (XEXP (label2, 0));
13714}
13715
13716
b52110d4 13717 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13718 multi-register moves. It will emit at most one instruction for
13719 each register that is accessed; that is, it won't emit li/lis pairs
13720 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13721 register. */
46c07df8 13722
46c07df8 13723void
a9baceb1 13724rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13725{
a9baceb1
GK
13726 /* The register number of the first register being moved. */
13727 int reg;
13728 /* The mode that is to be moved. */
13729 enum machine_mode mode;
13730 /* The mode that the move is being done in, and its size. */
13731 enum machine_mode reg_mode;
13732 int reg_mode_size;
13733 /* The number of registers that will be moved. */
13734 int nregs;
13735
13736 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13737 mode = GET_MODE (dst);
c8b622ff 13738 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13739 if (FP_REGNO_P (reg))
7393f7f8 13740 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13741 else if (ALTIVEC_REGNO_P (reg))
13742 reg_mode = V16QImode;
4d4447b5
PB
13743 else if (TARGET_E500_DOUBLE && (mode == TFmode || mode == TDmode))
13744 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13745 else
13746 reg_mode = word_mode;
13747 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13748
37409796 13749 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13750
a9baceb1
GK
13751 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13752 {
13753 /* Move register range backwards, if we might have destructive
13754 overlap. */
13755 int i;
13756 for (i = nregs - 1; i >= 0; i--)
f676971a 13757 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13758 simplify_gen_subreg (reg_mode, dst, mode,
13759 i * reg_mode_size),
13760 simplify_gen_subreg (reg_mode, src, mode,
13761 i * reg_mode_size)));
13762 }
46c07df8
HP
13763 else
13764 {
a9baceb1
GK
13765 int i;
13766 int j = -1;
13767 bool used_update = false;
46c07df8 13768
c1e55850 13769 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13770 {
13771 rtx breg;
3a1f863f 13772
a9baceb1
GK
13773 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13774 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13775 {
13776 rtx delta_rtx;
a9baceb1 13777 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13778 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13779 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13780 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13781 emit_insn (TARGET_32BIT
13782 ? gen_addsi3 (breg, breg, delta_rtx)
13783 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13784 src = replace_equiv_address (src, breg);
3a1f863f 13785 }
d04b6e6e 13786 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13787 {
13e2e16e 13788 rtx basereg;
c1e55850
GK
13789 basereg = gen_rtx_REG (Pmode, reg);
13790 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13791 src = replace_equiv_address (src, basereg);
c1e55850 13792 }
3a1f863f 13793
0423421f
AM
13794 breg = XEXP (src, 0);
13795 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13796 breg = XEXP (breg, 0);
13797
13798 /* If the base register we are using to address memory is
13799 also a destination reg, then change that register last. */
13800 if (REG_P (breg)
13801 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13802 && REGNO (breg) < REGNO (dst) + nregs)
13803 j = REGNO (breg) - REGNO (dst);
c4ad648e 13804 }
46c07df8 13805
a9baceb1 13806 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13807 {
13808 rtx breg;
13809
a9baceb1
GK
13810 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13811 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13812 {
13813 rtx delta_rtx;
a9baceb1 13814 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13815 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13816 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13817 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13818
13819 /* We have to update the breg before doing the store.
13820 Use store with update, if available. */
13821
13822 if (TARGET_UPDATE)
13823 {
a9baceb1 13824 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13825 emit_insn (TARGET_32BIT
13826 ? (TARGET_POWERPC64
13827 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13828 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13829 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13830 used_update = true;
3a1f863f
DE
13831 }
13832 else
a9baceb1
GK
13833 emit_insn (TARGET_32BIT
13834 ? gen_addsi3 (breg, breg, delta_rtx)
13835 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13836 dst = replace_equiv_address (dst, breg);
3a1f863f 13837 }
37409796 13838 else
d04b6e6e 13839 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13840 }
13841
46c07df8 13842 for (i = 0; i < nregs; i++)
f676971a 13843 {
3a1f863f
DE
13844 /* Calculate index to next subword. */
13845 ++j;
f676971a 13846 if (j == nregs)
3a1f863f 13847 j = 0;
46c07df8 13848
112cdef5 13849 /* If compiler already emitted move of first word by
a9baceb1 13850 store with update, no need to do anything. */
3a1f863f 13851 if (j == 0 && used_update)
a9baceb1 13852 continue;
f676971a 13853
a9baceb1
GK
13854 emit_insn (gen_rtx_SET (VOIDmode,
13855 simplify_gen_subreg (reg_mode, dst, mode,
13856 j * reg_mode_size),
13857 simplify_gen_subreg (reg_mode, src, mode,
13858 j * reg_mode_size)));
3a1f863f 13859 }
46c07df8
HP
13860 }
13861}
13862
12a4e8c5 13863\f
a4f6c312
SS
13864/* This page contains routines that are used to determine what the
13865 function prologue and epilogue code will do and write them out. */
9878760c 13866
a4f6c312
SS
13867/* Return the first fixed-point register that is required to be
13868 saved. 32 if none. */
9878760c
RK
13869
13870int
863d938c 13871first_reg_to_save (void)
9878760c
RK
13872{
13873 int first_reg;
13874
13875 /* Find lowest numbered live register. */
13876 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 13877 if (df_regs_ever_live_p (first_reg)
a38d360d 13878 && (! call_used_regs[first_reg]
1db02437 13879 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13880 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13881 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13882 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13883 break;
13884
ee890fe2 13885#if TARGET_MACHO
93638d7a
AM
13886 if (flag_pic
13887 && current_function_uses_pic_offset_table
13888 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13889 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13890#endif
13891
9878760c
RK
13892 return first_reg;
13893}
13894
13895/* Similar, for FP regs. */
13896
13897int
863d938c 13898first_fp_reg_to_save (void)
9878760c
RK
13899{
13900 int first_reg;
13901
13902 /* Find lowest numbered live register. */
13903 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 13904 if (df_regs_ever_live_p (first_reg))
9878760c
RK
13905 break;
13906
13907 return first_reg;
13908}
00b960c7
AH
13909
13910/* Similar, for AltiVec regs. */
13911
13912static int
863d938c 13913first_altivec_reg_to_save (void)
00b960c7
AH
13914{
13915 int i;
13916
13917 /* Stack frame remains as is unless we are in AltiVec ABI. */
13918 if (! TARGET_ALTIVEC_ABI)
13919 return LAST_ALTIVEC_REGNO + 1;
13920
22fa69da 13921 /* On Darwin, the unwind routines are compiled without
982afe02 13922 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13923 altivec registers when necessary. */
13924 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13925 && ! TARGET_ALTIVEC)
13926 return FIRST_ALTIVEC_REGNO + 20;
13927
00b960c7
AH
13928 /* Find lowest numbered live register. */
13929 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13930 if (df_regs_ever_live_p (i))
00b960c7
AH
13931 break;
13932
13933 return i;
13934}
13935
13936/* Return a 32-bit mask of the AltiVec registers we need to set in
13937 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13938 the 32-bit word is 0. */
13939
13940static unsigned int
863d938c 13941compute_vrsave_mask (void)
00b960c7
AH
13942{
13943 unsigned int i, mask = 0;
13944
22fa69da 13945 /* On Darwin, the unwind routines are compiled without
982afe02 13946 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13947 call-saved altivec registers when necessary. */
13948 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13949 && ! TARGET_ALTIVEC)
13950 mask |= 0xFFF;
13951
00b960c7
AH
13952 /* First, find out if we use _any_ altivec registers. */
13953 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13954 if (df_regs_ever_live_p (i))
00b960c7
AH
13955 mask |= ALTIVEC_REG_BIT (i);
13956
13957 if (mask == 0)
13958 return mask;
13959
00b960c7
AH
13960 /* Next, remove the argument registers from the set. These must
13961 be in the VRSAVE mask set by the caller, so we don't need to add
13962 them in again. More importantly, the mask we compute here is
13963 used to generate CLOBBERs in the set_vrsave insn, and we do not
13964 wish the argument registers to die. */
a6cf80f2 13965 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13966 mask &= ~ALTIVEC_REG_BIT (i);
13967
13968 /* Similarly, remove the return value from the set. */
13969 {
13970 bool yes = false;
13971 diddle_return_value (is_altivec_return_reg, &yes);
13972 if (yes)
13973 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13974 }
13975
13976 return mask;
13977}
13978
d62294f5 13979/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13980 size of prologues/epilogues by calling our own save/restore-the-world
13981 routines. */
d62294f5
FJ
13982
13983static void
f57fe068
AM
13984compute_save_world_info (rs6000_stack_t *info_ptr)
13985{
13986 info_ptr->world_save_p = 1;
13987 info_ptr->world_save_p
13988 = (WORLD_SAVE_P (info_ptr)
13989 && DEFAULT_ABI == ABI_DARWIN
13990 && ! (current_function_calls_setjmp && flag_exceptions)
13991 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13992 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13993 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13994 && info_ptr->cr_save_p);
f676971a 13995
d62294f5
FJ
13996 /* This will not work in conjunction with sibcalls. Make sure there
13997 are none. (This check is expensive, but seldom executed.) */
f57fe068 13998 if (WORLD_SAVE_P (info_ptr))
f676971a 13999 {
d62294f5
FJ
14000 rtx insn;
14001 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14002 if ( GET_CODE (insn) == CALL_INSN
14003 && SIBLING_CALL_P (insn))
14004 {
14005 info_ptr->world_save_p = 0;
14006 break;
14007 }
d62294f5 14008 }
f676971a 14009
f57fe068 14010 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14011 {
14012 /* Even if we're not touching VRsave, make sure there's room on the
14013 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14014 will attempt to save it. */
d62294f5
FJ
14015 info_ptr->vrsave_size = 4;
14016
14017 /* "Save" the VRsave register too if we're saving the world. */
14018 if (info_ptr->vrsave_mask == 0)
c4ad648e 14019 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14020
14021 /* Because the Darwin register save/restore routines only handle
c4ad648e 14022 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14023 check. */
37409796
NS
14024 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14025 && (info_ptr->first_altivec_reg_save
14026 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14027 }
f676971a 14028 return;
d62294f5
FJ
14029}
14030
14031
00b960c7 14032static void
a2369ed3 14033is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14034{
14035 bool *yes = (bool *) xyes;
14036 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14037 *yes = true;
14038}
14039
4697a36c
MM
14040\f
14041/* Calculate the stack information for the current function. This is
14042 complicated by having two separate calling sequences, the AIX calling
14043 sequence and the V.4 calling sequence.
14044
592696dd 14045 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14046 32-bit 64-bit
4697a36c 14047 SP----> +---------------------------------------+
a260abc9 14048 | back chain to caller | 0 0
4697a36c 14049 +---------------------------------------+
a260abc9 14050 | saved CR | 4 8 (8-11)
4697a36c 14051 +---------------------------------------+
a260abc9 14052 | saved LR | 8 16
4697a36c 14053 +---------------------------------------+
a260abc9 14054 | reserved for compilers | 12 24
4697a36c 14055 +---------------------------------------+
a260abc9 14056 | reserved for binders | 16 32
4697a36c 14057 +---------------------------------------+
a260abc9 14058 | saved TOC pointer | 20 40
4697a36c 14059 +---------------------------------------+
a260abc9 14060 | Parameter save area (P) | 24 48
4697a36c 14061 +---------------------------------------+
a260abc9 14062 | Alloca space (A) | 24+P etc.
802a0058 14063 +---------------------------------------+
a7df97e6 14064 | Local variable space (L) | 24+P+A
4697a36c 14065 +---------------------------------------+
a7df97e6 14066 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14067 +---------------------------------------+
00b960c7
AH
14068 | Save area for AltiVec registers (W) | 24+P+A+L+X
14069 +---------------------------------------+
14070 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14071 +---------------------------------------+
14072 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14073 +---------------------------------------+
00b960c7
AH
14074 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14075 +---------------------------------------+
14076 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14077 +---------------------------------------+
14078 old SP->| back chain to caller's caller |
14079 +---------------------------------------+
14080
5376a30c
KR
14081 The required alignment for AIX configurations is two words (i.e., 8
14082 or 16 bytes).
14083
14084
4697a36c
MM
14085 V.4 stack frames look like:
14086
14087 SP----> +---------------------------------------+
14088 | back chain to caller | 0
14089 +---------------------------------------+
5eb387b8 14090 | caller's saved LR | 4
4697a36c
MM
14091 +---------------------------------------+
14092 | Parameter save area (P) | 8
14093 +---------------------------------------+
a7df97e6 14094 | Alloca space (A) | 8+P
f676971a 14095 +---------------------------------------+
a7df97e6 14096 | Varargs save area (V) | 8+P+A
f676971a 14097 +---------------------------------------+
a7df97e6 14098 | Local variable space (L) | 8+P+A+V
f676971a 14099 +---------------------------------------+
a7df97e6 14100 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14101 +---------------------------------------+
00b960c7
AH
14102 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14103 +---------------------------------------+
14104 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14105 +---------------------------------------+
14106 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14107 +---------------------------------------+
c4ad648e
AM
14108 | SPE: area for 64-bit GP registers |
14109 +---------------------------------------+
14110 | SPE alignment padding |
14111 +---------------------------------------+
00b960c7 14112 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14113 +---------------------------------------+
00b960c7 14114 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14115 +---------------------------------------+
00b960c7 14116 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14117 +---------------------------------------+
14118 old SP->| back chain to caller's caller |
14119 +---------------------------------------+
b6c9286a 14120
5376a30c
KR
14121 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14122 given. (But note below and in sysv4.h that we require only 8 and
14123 may round up the size of our stack frame anyways. The historical
14124 reason is early versions of powerpc-linux which didn't properly
14125 align the stack at program startup. A happy side-effect is that
14126 -mno-eabi libraries can be used with -meabi programs.)
14127
50d440bc 14128 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14129 the stack alignment requirements may differ. If -mno-eabi is not
14130 given, the required stack alignment is 8 bytes; if -mno-eabi is
14131 given, the required alignment is 16 bytes. (But see V.4 comment
14132 above.) */
4697a36c 14133
61b2fbe7
MM
14134#ifndef ABI_STACK_BOUNDARY
14135#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14136#endif
14137
d1d0c603 14138static rs6000_stack_t *
863d938c 14139rs6000_stack_info (void)
4697a36c 14140{
022123e6 14141 static rs6000_stack_t info;
4697a36c 14142 rs6000_stack_t *info_ptr = &info;
327e5343 14143 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14144 int ehrd_size;
64045029 14145 int save_align;
8070c91a 14146 int first_gp;
44688022 14147 HOST_WIDE_INT non_fixed_size;
4697a36c 14148
022123e6 14149 memset (&info, 0, sizeof (info));
4697a36c 14150
c19de7aa
AH
14151 if (TARGET_SPE)
14152 {
14153 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14154 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14155 cfun->machine->insn_chain_scanned_p
14156 = spe_func_has_64bit_regs_p () + 1;
14157 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14158 }
14159
a4f6c312 14160 /* Select which calling sequence. */
178274da 14161 info_ptr->abi = DEFAULT_ABI;
9878760c 14162
a4f6c312 14163 /* Calculate which registers need to be saved & save area size. */
4697a36c 14164 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14165 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14166 even if it currently looks like we won't. Reload may need it to
14167 get at a constant; if so, it will have already created a constant
14168 pool entry for it. */
2bfcf297 14169 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14170 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14171 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14172 && current_function_uses_const_pool
1db02437 14173 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14174 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14175 else
8070c91a
DJ
14176 first_gp = info_ptr->first_gp_reg_save;
14177
14178 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14179
a3170dc6
AH
14180 /* For the SPE, we have an additional upper 32-bits on each GPR.
14181 Ideally we should save the entire 64-bits only when the upper
14182 half is used in SIMD instructions. Since we only record
14183 registers live (not the size they are used in), this proves
14184 difficult because we'd have to traverse the instruction chain at
14185 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14186 so we opt to save the GPRs in 64-bits always if but one register
14187 gets used in 64-bits. Otherwise, all the registers in the frame
14188 get saved in 32-bits.
a3170dc6 14189
c19de7aa 14190 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14191 traditional GP save area will be empty. */
c19de7aa 14192 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14193 info_ptr->gp_size = 0;
14194
4697a36c
MM
14195 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14196 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14197
00b960c7
AH
14198 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14199 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14200 - info_ptr->first_altivec_reg_save);
14201
592696dd 14202 /* Does this function call anything? */
71f123ca
FS
14203 info_ptr->calls_p = (! current_function_is_leaf
14204 || cfun->machine->ra_needs_full_frame);
b6c9286a 14205
a4f6c312 14206 /* Determine if we need to save the link register. */
022123e6
AM
14207 if ((DEFAULT_ABI == ABI_AIX
14208 && current_function_profile
14209 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14210#ifdef TARGET_RELOCATABLE
14211 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14212#endif
14213 || (info_ptr->first_fp_reg_save != 64
14214 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 14215 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 14216 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14217 || info_ptr->calls_p
14218 || rs6000_ra_ever_killed ())
4697a36c
MM
14219 {
14220 info_ptr->lr_save_p = 1;
1de43f85 14221 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14222 }
14223
9ebbca7d 14224 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14225 if (df_regs_ever_live_p (CR2_REGNO)
14226 || df_regs_ever_live_p (CR3_REGNO)
14227 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14228 {
14229 info_ptr->cr_save_p = 1;
178274da 14230 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14231 info_ptr->cr_size = reg_size;
14232 }
14233
83720594
RH
14234 /* If the current function calls __builtin_eh_return, then we need
14235 to allocate stack space for registers that will hold data for
14236 the exception handler. */
14237 if (current_function_calls_eh_return)
14238 {
14239 unsigned int i;
14240 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14241 continue;
a3170dc6
AH
14242
14243 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14244 ehrd_size = i * (TARGET_SPE_ABI
14245 && info_ptr->spe_64bit_regs_used != 0
14246 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14247 }
14248 else
14249 ehrd_size = 0;
14250
592696dd 14251 /* Determine various sizes. */
4697a36c
MM
14252 info_ptr->reg_size = reg_size;
14253 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14254 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14255 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14256 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14257 if (FRAME_GROWS_DOWNWARD)
14258 info_ptr->vars_size
5b667039
JJ
14259 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14260 + info_ptr->parm_size,
7d5175e1 14261 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14262 - (info_ptr->fixed_size + info_ptr->vars_size
14263 + info_ptr->parm_size);
00b960c7 14264
c19de7aa 14265 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14266 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14267 else
14268 info_ptr->spe_gp_size = 0;
14269
4d774ff8
HP
14270 if (TARGET_ALTIVEC_ABI)
14271 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14272 else
4d774ff8
HP
14273 info_ptr->vrsave_mask = 0;
14274
14275 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14276 info_ptr->vrsave_size = 4;
14277 else
14278 info_ptr->vrsave_size = 0;
b6c9286a 14279
d62294f5
FJ
14280 compute_save_world_info (info_ptr);
14281
592696dd 14282 /* Calculate the offsets. */
178274da 14283 switch (DEFAULT_ABI)
4697a36c 14284 {
b6c9286a 14285 case ABI_NONE:
24d304eb 14286 default:
37409796 14287 gcc_unreachable ();
b6c9286a
MM
14288
14289 case ABI_AIX:
ee890fe2 14290 case ABI_DARWIN:
b6c9286a
MM
14291 info_ptr->fp_save_offset = - info_ptr->fp_size;
14292 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14293
14294 if (TARGET_ALTIVEC_ABI)
14295 {
14296 info_ptr->vrsave_save_offset
14297 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14298
982afe02 14299 /* Align stack so vector save area is on a quadword boundary.
9278121c 14300 The padding goes above the vectors. */
00b960c7
AH
14301 if (info_ptr->altivec_size != 0)
14302 info_ptr->altivec_padding_size
9278121c 14303 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14304 else
14305 info_ptr->altivec_padding_size = 0;
14306
14307 info_ptr->altivec_save_offset
14308 = info_ptr->vrsave_save_offset
14309 - info_ptr->altivec_padding_size
14310 - info_ptr->altivec_size;
9278121c
GK
14311 gcc_assert (info_ptr->altivec_size == 0
14312 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14313
14314 /* Adjust for AltiVec case. */
14315 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14316 }
14317 else
14318 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14319 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14320 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14321 break;
14322
14323 case ABI_V4:
b6c9286a
MM
14324 info_ptr->fp_save_offset = - info_ptr->fp_size;
14325 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14326 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14327
c19de7aa 14328 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14329 {
14330 /* Align stack so SPE GPR save area is aligned on a
14331 double-word boundary. */
14332 if (info_ptr->spe_gp_size != 0)
14333 info_ptr->spe_padding_size
14334 = 8 - (-info_ptr->cr_save_offset % 8);
14335 else
14336 info_ptr->spe_padding_size = 0;
14337
14338 info_ptr->spe_gp_save_offset
14339 = info_ptr->cr_save_offset
14340 - info_ptr->spe_padding_size
14341 - info_ptr->spe_gp_size;
14342
14343 /* Adjust for SPE case. */
022123e6 14344 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14345 }
a3170dc6 14346 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14347 {
14348 info_ptr->vrsave_save_offset
14349 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14350
14351 /* Align stack so vector save area is on a quadword boundary. */
14352 if (info_ptr->altivec_size != 0)
14353 info_ptr->altivec_padding_size
14354 = 16 - (-info_ptr->vrsave_save_offset % 16);
14355 else
14356 info_ptr->altivec_padding_size = 0;
14357
14358 info_ptr->altivec_save_offset
14359 = info_ptr->vrsave_save_offset
14360 - info_ptr->altivec_padding_size
14361 - info_ptr->altivec_size;
14362
14363 /* Adjust for AltiVec case. */
022123e6 14364 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14365 }
14366 else
022123e6
AM
14367 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14368 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14369 info_ptr->lr_save_offset = reg_size;
14370 break;
4697a36c
MM
14371 }
14372
64045029 14373 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14374 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14375 + info_ptr->gp_size
14376 + info_ptr->altivec_size
14377 + info_ptr->altivec_padding_size
a3170dc6
AH
14378 + info_ptr->spe_gp_size
14379 + info_ptr->spe_padding_size
00b960c7
AH
14380 + ehrd_size
14381 + info_ptr->cr_size
022123e6 14382 + info_ptr->vrsave_size,
64045029 14383 save_align);
00b960c7 14384
44688022 14385 non_fixed_size = (info_ptr->vars_size
ff381587 14386 + info_ptr->parm_size
5b667039 14387 + info_ptr->save_size);
ff381587 14388
44688022
AM
14389 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14390 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14391
14392 /* Determine if we need to allocate any stack frame:
14393
a4f6c312
SS
14394 For AIX we need to push the stack if a frame pointer is needed
14395 (because the stack might be dynamically adjusted), if we are
14396 debugging, if we make calls, or if the sum of fp_save, gp_save,
14397 and local variables are more than the space needed to save all
14398 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14399 + 18*8 = 288 (GPR13 reserved).
ff381587 14400
a4f6c312
SS
14401 For V.4 we don't have the stack cushion that AIX uses, but assume
14402 that the debugger can handle stackless frames. */
ff381587
MM
14403
14404 if (info_ptr->calls_p)
14405 info_ptr->push_p = 1;
14406
178274da 14407 else if (DEFAULT_ABI == ABI_V4)
44688022 14408 info_ptr->push_p = non_fixed_size != 0;
ff381587 14409
178274da
AM
14410 else if (frame_pointer_needed)
14411 info_ptr->push_p = 1;
14412
14413 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14414 info_ptr->push_p = 1;
14415
ff381587 14416 else
44688022 14417 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14418
a4f6c312 14419 /* Zero offsets if we're not saving those registers. */
8dda1a21 14420 if (info_ptr->fp_size == 0)
4697a36c
MM
14421 info_ptr->fp_save_offset = 0;
14422
8dda1a21 14423 if (info_ptr->gp_size == 0)
4697a36c
MM
14424 info_ptr->gp_save_offset = 0;
14425
00b960c7
AH
14426 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14427 info_ptr->altivec_save_offset = 0;
14428
14429 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14430 info_ptr->vrsave_save_offset = 0;
14431
c19de7aa
AH
14432 if (! TARGET_SPE_ABI
14433 || info_ptr->spe_64bit_regs_used == 0
14434 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14435 info_ptr->spe_gp_save_offset = 0;
14436
c81fc13e 14437 if (! info_ptr->lr_save_p)
4697a36c
MM
14438 info_ptr->lr_save_offset = 0;
14439
c81fc13e 14440 if (! info_ptr->cr_save_p)
4697a36c
MM
14441 info_ptr->cr_save_offset = 0;
14442
14443 return info_ptr;
14444}
14445
c19de7aa
AH
14446/* Return true if the current function uses any GPRs in 64-bit SIMD
14447 mode. */
14448
14449static bool
863d938c 14450spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14451{
14452 rtx insns, insn;
14453
14454 /* Functions that save and restore all the call-saved registers will
14455 need to save/restore the registers in 64-bits. */
14456 if (current_function_calls_eh_return
14457 || current_function_calls_setjmp
14458 || current_function_has_nonlocal_goto)
14459 return true;
14460
14461 insns = get_insns ();
14462
14463 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14464 {
14465 if (INSN_P (insn))
14466 {
14467 rtx i;
14468
b5a5beb9
AH
14469 /* FIXME: This should be implemented with attributes...
14470
14471 (set_attr "spe64" "true")....then,
14472 if (get_spe64(insn)) return true;
14473
14474 It's the only reliable way to do the stuff below. */
14475
c19de7aa 14476 i = PATTERN (insn);
f82f556d
AH
14477 if (GET_CODE (i) == SET)
14478 {
14479 enum machine_mode mode = GET_MODE (SET_SRC (i));
14480
14481 if (SPE_VECTOR_MODE (mode))
14482 return true;
4d4447b5
PB
14483 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
14484 || mode == DDmode || mode == TDmode))
f82f556d
AH
14485 return true;
14486 }
c19de7aa
AH
14487 }
14488 }
14489
14490 return false;
14491}
14492
d1d0c603 14493static void
a2369ed3 14494debug_stack_info (rs6000_stack_t *info)
9878760c 14495{
d330fd93 14496 const char *abi_string;
24d304eb 14497
c81fc13e 14498 if (! info)
4697a36c
MM
14499 info = rs6000_stack_info ();
14500
14501 fprintf (stderr, "\nStack information for function %s:\n",
14502 ((current_function_decl && DECL_NAME (current_function_decl))
14503 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14504 : "<unknown>"));
14505
24d304eb
RK
14506 switch (info->abi)
14507 {
b6c9286a
MM
14508 default: abi_string = "Unknown"; break;
14509 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14510 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14511 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14512 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14513 }
14514
14515 fprintf (stderr, "\tABI = %5s\n", abi_string);
14516
00b960c7
AH
14517 if (TARGET_ALTIVEC_ABI)
14518 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14519
a3170dc6
AH
14520 if (TARGET_SPE_ABI)
14521 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14522
4697a36c
MM
14523 if (info->first_gp_reg_save != 32)
14524 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14525
14526 if (info->first_fp_reg_save != 64)
14527 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14528
00b960c7
AH
14529 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14530 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14531 info->first_altivec_reg_save);
14532
4697a36c
MM
14533 if (info->lr_save_p)
14534 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14535
4697a36c
MM
14536 if (info->cr_save_p)
14537 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14538
00b960c7
AH
14539 if (info->vrsave_mask)
14540 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14541
4697a36c
MM
14542 if (info->push_p)
14543 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14544
14545 if (info->calls_p)
14546 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14547
4697a36c
MM
14548 if (info->gp_save_offset)
14549 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14550
14551 if (info->fp_save_offset)
14552 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14553
00b960c7
AH
14554 if (info->altivec_save_offset)
14555 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14556 info->altivec_save_offset);
14557
a3170dc6
AH
14558 if (info->spe_gp_save_offset)
14559 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14560 info->spe_gp_save_offset);
14561
00b960c7
AH
14562 if (info->vrsave_save_offset)
14563 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14564 info->vrsave_save_offset);
14565
4697a36c
MM
14566 if (info->lr_save_offset)
14567 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14568
14569 if (info->cr_save_offset)
14570 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14571
14572 if (info->varargs_save_offset)
14573 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14574
14575 if (info->total_size)
d1d0c603
JJ
14576 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14577 info->total_size);
4697a36c 14578
4697a36c 14579 if (info->vars_size)
d1d0c603
JJ
14580 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14581 info->vars_size);
4697a36c
MM
14582
14583 if (info->parm_size)
14584 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14585
14586 if (info->fixed_size)
14587 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14588
14589 if (info->gp_size)
14590 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14591
a3170dc6
AH
14592 if (info->spe_gp_size)
14593 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14594
4697a36c
MM
14595 if (info->fp_size)
14596 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14597
00b960c7
AH
14598 if (info->altivec_size)
14599 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14600
14601 if (info->vrsave_size)
14602 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14603
14604 if (info->altivec_padding_size)
14605 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14606 info->altivec_padding_size);
14607
a3170dc6
AH
14608 if (info->spe_padding_size)
14609 fprintf (stderr, "\tspe_padding_size = %5d\n",
14610 info->spe_padding_size);
14611
4697a36c
MM
14612 if (info->cr_size)
14613 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14614
14615 if (info->save_size)
14616 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14617
14618 if (info->reg_size != 4)
14619 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14620
14621 fprintf (stderr, "\n");
9878760c 14622}
71f123ca
FS
14623
14624rtx
a2369ed3 14625rs6000_return_addr (int count, rtx frame)
71f123ca 14626{
a4f6c312
SS
14627 /* Currently we don't optimize very well between prolog and body
14628 code and for PIC code the code can be actually quite bad, so
14629 don't try to be too clever here. */
f1384257 14630 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14631 {
14632 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14633
14634 return
14635 gen_rtx_MEM
14636 (Pmode,
14637 memory_address
14638 (Pmode,
14639 plus_constant (copy_to_reg
14640 (gen_rtx_MEM (Pmode,
14641 memory_address (Pmode, frame))),
14642 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14643 }
14644
8c29550d 14645 cfun->machine->ra_need_lr = 1;
1de43f85 14646 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14647}
14648
5e1bf043
DJ
14649/* Say whether a function is a candidate for sibcall handling or not.
14650 We do not allow indirect calls to be optimized into sibling calls.
14651 Also, we can't do it if there are any vector parameters; there's
14652 nowhere to put the VRsave code so it works; note that functions with
14653 vector parameters are required to have a prototype, so the argument
14654 type info must be available here. (The tail recursion case can work
14655 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14656static bool
a2369ed3 14657rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14658{
14659 tree type;
4977bab6 14660 if (decl)
5e1bf043
DJ
14661 {
14662 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14663 {
4977bab6 14664 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14665 type; type = TREE_CHAIN (type))
14666 {
c15b529f 14667 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14668 return false;
5e1bf043 14669 }
c4ad648e 14670 }
5e1bf043 14671 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14672 || ((*targetm.binds_local_p) (decl)
14673 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14674 {
4977bab6 14675 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14676
14677 if (!lookup_attribute ("longcall", attr_list)
14678 || lookup_attribute ("shortcall", attr_list))
4977bab6 14679 return true;
2bcc50d0 14680 }
5e1bf043 14681 }
4977bab6 14682 return false;
5e1bf043
DJ
14683}
14684
e7e64a25
AS
14685/* NULL if INSN insn is valid within a low-overhead loop.
14686 Otherwise return why doloop cannot be applied.
9419649c
DE
14687 PowerPC uses the COUNT register for branch on table instructions. */
14688
e7e64a25 14689static const char *
3101faab 14690rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14691{
14692 if (CALL_P (insn))
e7e64a25 14693 return "Function call in the loop.";
9419649c
DE
14694
14695 if (JUMP_P (insn)
14696 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14697 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14698 return "Computed branch in the loop.";
9419649c 14699
e7e64a25 14700 return NULL;
9419649c
DE
14701}
14702
71f123ca 14703static int
863d938c 14704rs6000_ra_ever_killed (void)
71f123ca
FS
14705{
14706 rtx top;
5e1bf043
DJ
14707 rtx reg;
14708 rtx insn;
71f123ca 14709
dd292d0a 14710 if (current_function_is_thunk)
71f123ca 14711 return 0;
eb0424da 14712
36f7e964
AH
14713 /* regs_ever_live has LR marked as used if any sibcalls are present,
14714 but this should not force saving and restoring in the
14715 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14716 clobbers LR, so that is inappropriate. */
36f7e964 14717
5e1bf043
DJ
14718 /* Also, the prologue can generate a store into LR that
14719 doesn't really count, like this:
36f7e964 14720
5e1bf043
DJ
14721 move LR->R0
14722 bcl to set PIC register
14723 move LR->R31
14724 move R0->LR
36f7e964
AH
14725
14726 When we're called from the epilogue, we need to avoid counting
14727 this as a store. */
f676971a 14728
71f123ca
FS
14729 push_topmost_sequence ();
14730 top = get_insns ();
14731 pop_topmost_sequence ();
1de43f85 14732 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14733
5e1bf043
DJ
14734 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14735 {
14736 if (INSN_P (insn))
14737 {
022123e6
AM
14738 if (CALL_P (insn))
14739 {
14740 if (!SIBLING_CALL_P (insn))
14741 return 1;
14742 }
1de43f85 14743 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14744 return 1;
36f7e964
AH
14745 else if (set_of (reg, insn) != NULL_RTX
14746 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14747 return 1;
14748 }
14749 }
14750 return 0;
71f123ca 14751}
4697a36c 14752\f
9ebbca7d 14753/* Emit instructions needed to load the TOC register.
c7ca610e 14754 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14755 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14756
14757void
a2369ed3 14758rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14759{
6fb5fa3c 14760 rtx dest;
1db02437 14761 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14762
7f970b70 14763 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14764 {
7f970b70 14765 char buf[30];
e65a3857 14766 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14767
14768 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14769 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14770 if (flag_pic == 2)
14771 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14772 else
14773 got = rs6000_got_sym ();
14774 tmp1 = tmp2 = dest;
14775 if (!fromprolog)
14776 {
14777 tmp1 = gen_reg_rtx (Pmode);
14778 tmp2 = gen_reg_rtx (Pmode);
14779 }
6fb5fa3c
DB
14780 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14781 emit_move_insn (tmp1,
1de43f85 14782 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14783 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14784 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14785 }
14786 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14787 {
6fb5fa3c 14788 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14789 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14790 }
14791 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14792 {
14793 char buf[30];
20b71b17
AM
14794 rtx temp0 = (fromprolog
14795 ? gen_rtx_REG (Pmode, 0)
14796 : gen_reg_rtx (Pmode));
20b71b17 14797
20b71b17
AM
14798 if (fromprolog)
14799 {
ccbca5e4 14800 rtx symF, symL;
38c1f2d7 14801
20b71b17
AM
14802 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14803 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14804
20b71b17
AM
14805 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14806 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14807
6fb5fa3c
DB
14808 emit_insn (gen_load_toc_v4_PIC_1 (symF));
14809 emit_move_insn (dest,
1de43f85 14810 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 14811 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
14812 }
14813 else
20b71b17
AM
14814 {
14815 rtx tocsym;
20b71b17
AM
14816
14817 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14818 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14819 emit_move_insn (dest,
1de43f85 14820 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 14821 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14822 }
6fb5fa3c 14823 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 14824 }
20b71b17
AM
14825 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14826 {
14827 /* This is for AIX code running in non-PIC ELF32. */
14828 char buf[30];
14829 rtx realsym;
14830 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14831 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14832
6fb5fa3c
DB
14833 emit_insn (gen_elf_high (dest, realsym));
14834 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 14835 }
37409796 14836 else
9ebbca7d 14837 {
37409796 14838 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14839
9ebbca7d 14840 if (TARGET_32BIT)
6fb5fa3c 14841 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14842 else
6fb5fa3c 14843 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
14844 }
14845}
14846
d1d0c603
JJ
14847/* Emit instructions to restore the link register after determining where
14848 its value has been stored. */
14849
14850void
14851rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14852{
14853 rs6000_stack_t *info = rs6000_stack_info ();
14854 rtx operands[2];
14855
14856 operands[0] = source;
14857 operands[1] = scratch;
14858
14859 if (info->lr_save_p)
14860 {
14861 rtx frame_rtx = stack_pointer_rtx;
14862 HOST_WIDE_INT sp_offset = 0;
14863 rtx tmp;
14864
14865 if (frame_pointer_needed
14866 || current_function_calls_alloca
14867 || info->total_size > 32767)
14868 {
0be76840 14869 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14870 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14871 frame_rtx = operands[1];
14872 }
14873 else if (info->push_p)
14874 sp_offset = info->total_size;
14875
14876 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14877 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14878 emit_move_insn (tmp, operands[0]);
14879 }
14880 else
1de43f85 14881 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
14882}
14883
4862826d 14884static GTY(()) alias_set_type set = -1;
f103e34d 14885
4862826d 14886alias_set_type
863d938c 14887get_TOC_alias_set (void)
9ebbca7d 14888{
f103e34d
GK
14889 if (set == -1)
14890 set = new_alias_set ();
14891 return set;
f676971a 14892}
9ebbca7d 14893
c1207243 14894/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14895 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14896 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14897#if TARGET_ELF
3c9eb5f4 14898static int
f676971a 14899uses_TOC (void)
9ebbca7d 14900{
c4501e62 14901 rtx insn;
38c1f2d7 14902
c4501e62
JJ
14903 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14904 if (INSN_P (insn))
14905 {
14906 rtx pat = PATTERN (insn);
14907 int i;
9ebbca7d 14908
f676971a 14909 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14910 for (i = 0; i < XVECLEN (pat, 0); i++)
14911 {
14912 rtx sub = XVECEXP (pat, 0, i);
14913 if (GET_CODE (sub) == USE)
14914 {
14915 sub = XEXP (sub, 0);
14916 if (GET_CODE (sub) == UNSPEC
14917 && XINT (sub, 1) == UNSPEC_TOC)
14918 return 1;
14919 }
14920 }
14921 }
14922 return 0;
9ebbca7d 14923}
c954844a 14924#endif
38c1f2d7 14925
9ebbca7d 14926rtx
f676971a 14927create_TOC_reference (rtx symbol)
9ebbca7d 14928{
b3a13419 14929 if (!can_create_pseudo_p ())
6fb5fa3c 14930 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 14931 return gen_rtx_PLUS (Pmode,
a8a05998 14932 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14933 gen_rtx_CONST (Pmode,
14934 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14935 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14936}
38c1f2d7 14937
fc4767bb
JJ
14938/* If _Unwind_* has been called from within the same module,
14939 toc register is not guaranteed to be saved to 40(1) on function
14940 entry. Save it there in that case. */
c7ca610e 14941
9ebbca7d 14942void
863d938c 14943rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14944{
14945 rtx mem;
14946 rtx stack_top = gen_reg_rtx (Pmode);
14947 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14948 rtx opcode = gen_reg_rtx (SImode);
14949 rtx tocompare = gen_reg_rtx (SImode);
14950 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14951
8308679f 14952 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14953 emit_move_insn (stack_top, mem);
14954
8308679f
DE
14955 mem = gen_frame_mem (Pmode,
14956 gen_rtx_PLUS (Pmode, stack_top,
14957 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14958 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14959 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14960 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14961 : 0xE8410028, SImode));
9ebbca7d 14962
fc4767bb 14963 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14964 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14965 no_toc_save_needed);
9ebbca7d 14966
8308679f
DE
14967 mem = gen_frame_mem (Pmode,
14968 gen_rtx_PLUS (Pmode, stack_top,
14969 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14970 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14971 emit_label (no_toc_save_needed);
9ebbca7d 14972}
38c1f2d7 14973\f
0be76840
DE
14974/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14975 and the change to the stack pointer. */
ba4828e0 14976
9ebbca7d 14977static void
863d938c 14978rs6000_emit_stack_tie (void)
9ebbca7d 14979{
0be76840
DE
14980 rtx mem = gen_frame_mem (BLKmode,
14981 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14982
9ebbca7d
GK
14983 emit_insn (gen_stack_tie (mem));
14984}
38c1f2d7 14985
9ebbca7d
GK
14986/* Emit the correct code for allocating stack space, as insns.
14987 If COPY_R12, make sure a copy of the old frame is left in r12.
14988 The generated code may use hard register 0 as a temporary. */
14989
14990static void
a2369ed3 14991rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14992{
9ebbca7d
GK
14993 rtx insn;
14994 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14995 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14996 rtx todec = gen_int_mode (-size, Pmode);
14997
14998 if (INTVAL (todec) != -size)
14999 {
d4ee4d25 15000 warning (0, "stack frame too large");
61168ff1
RS
15001 emit_insn (gen_trap ());
15002 return;
15003 }
a157febd
GK
15004
15005 if (current_function_limit_stack)
15006 {
15007 if (REG_P (stack_limit_rtx)
f676971a 15008 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15009 && REGNO (stack_limit_rtx) <= 31)
15010 {
5b71a4e7 15011 emit_insn (TARGET_32BIT
9ebbca7d
GK
15012 ? gen_addsi3 (tmp_reg,
15013 stack_limit_rtx,
15014 GEN_INT (size))
15015 : gen_adddi3 (tmp_reg,
15016 stack_limit_rtx,
15017 GEN_INT (size)));
5b71a4e7 15018
9ebbca7d
GK
15019 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15020 const0_rtx));
a157febd
GK
15021 }
15022 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15023 && TARGET_32BIT
f607bc57 15024 && DEFAULT_ABI == ABI_V4)
a157febd 15025 {
9ebbca7d 15026 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15027 gen_rtx_PLUS (Pmode,
15028 stack_limit_rtx,
9ebbca7d 15029 GEN_INT (size)));
5b71a4e7 15030
9ebbca7d
GK
15031 emit_insn (gen_elf_high (tmp_reg, toload));
15032 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15033 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15034 const0_rtx));
a157febd
GK
15035 }
15036 else
d4ee4d25 15037 warning (0, "stack limit expression is not supported");
a157febd
GK
15038 }
15039
9ebbca7d
GK
15040 if (copy_r12 || ! TARGET_UPDATE)
15041 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
15042
38c1f2d7
MM
15043 if (TARGET_UPDATE)
15044 {
9ebbca7d 15045 if (size > 32767)
38c1f2d7 15046 {
9ebbca7d 15047 /* Need a note here so that try_split doesn't get confused. */
9390387d 15048 if (get_last_insn () == NULL_RTX)
2e040219 15049 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15050 insn = emit_move_insn (tmp_reg, todec);
15051 try_split (PATTERN (insn), insn, 0);
15052 todec = tmp_reg;
38c1f2d7 15053 }
5b71a4e7
DE
15054
15055 insn = emit_insn (TARGET_32BIT
15056 ? gen_movsi_update (stack_reg, stack_reg,
15057 todec, stack_reg)
c4ad648e 15058 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15059 todec, stack_reg));
38c1f2d7
MM
15060 }
15061 else
15062 {
5b71a4e7
DE
15063 insn = emit_insn (TARGET_32BIT
15064 ? gen_addsi3 (stack_reg, stack_reg, todec)
15065 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
15066 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
15067 gen_rtx_REG (Pmode, 12));
15068 }
f676971a 15069
9ebbca7d 15070 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15071 REG_NOTES (insn) =
9ebbca7d 15072 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15073 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15074 gen_rtx_PLUS (Pmode, stack_reg,
15075 GEN_INT (-size))),
15076 REG_NOTES (insn));
15077}
15078
a4f6c312
SS
15079/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15080 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15081 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15082 deduce these equivalences by itself so it wasn't necessary to hold
15083 its hand so much. */
9ebbca7d
GK
15084
15085static void
f676971a 15086rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15087 rtx reg2, rtx rreg)
9ebbca7d
GK
15088{
15089 rtx real, temp;
15090
e56c4463
JL
15091 /* copy_rtx will not make unique copies of registers, so we need to
15092 ensure we don't have unwanted sharing here. */
15093 if (reg == reg2)
15094 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15095
15096 if (reg == rreg)
15097 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15098
9ebbca7d
GK
15099 real = copy_rtx (PATTERN (insn));
15100
89e7058f
AH
15101 if (reg2 != NULL_RTX)
15102 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15103
15104 real = replace_rtx (real, reg,
9ebbca7d
GK
15105 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15106 STACK_POINTER_REGNUM),
15107 GEN_INT (val)));
f676971a 15108
9ebbca7d
GK
15109 /* We expect that 'real' is either a SET or a PARALLEL containing
15110 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15111 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15112
15113 if (GET_CODE (real) == SET)
15114 {
15115 rtx set = real;
f676971a 15116
9ebbca7d
GK
15117 temp = simplify_rtx (SET_SRC (set));
15118 if (temp)
15119 SET_SRC (set) = temp;
15120 temp = simplify_rtx (SET_DEST (set));
15121 if (temp)
15122 SET_DEST (set) = temp;
15123 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15124 {
9ebbca7d
GK
15125 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15126 if (temp)
15127 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15128 }
38c1f2d7 15129 }
37409796 15130 else
9ebbca7d
GK
15131 {
15132 int i;
37409796
NS
15133
15134 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15135 for (i = 0; i < XVECLEN (real, 0); i++)
15136 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15137 {
15138 rtx set = XVECEXP (real, 0, i);
f676971a 15139
9ebbca7d
GK
15140 temp = simplify_rtx (SET_SRC (set));
15141 if (temp)
15142 SET_SRC (set) = temp;
15143 temp = simplify_rtx (SET_DEST (set));
15144 if (temp)
15145 SET_DEST (set) = temp;
15146 if (GET_CODE (SET_DEST (set)) == MEM)
15147 {
15148 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15149 if (temp)
15150 XEXP (SET_DEST (set), 0) = temp;
15151 }
15152 RTX_FRAME_RELATED_P (set) = 1;
15153 }
15154 }
c19de7aa
AH
15155
15156 if (TARGET_SPE)
15157 real = spe_synthesize_frame_save (real);
15158
9ebbca7d
GK
15159 RTX_FRAME_RELATED_P (insn) = 1;
15160 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15161 real,
15162 REG_NOTES (insn));
38c1f2d7
MM
15163}
15164
c19de7aa
AH
15165/* Given an SPE frame note, return a PARALLEL of SETs with the
15166 original note, plus a synthetic register save. */
15167
15168static rtx
a2369ed3 15169spe_synthesize_frame_save (rtx real)
c19de7aa
AH
15170{
15171 rtx synth, offset, reg, real2;
15172
15173 if (GET_CODE (real) != SET
15174 || GET_MODE (SET_SRC (real)) != V2SImode)
15175 return real;
15176
15177 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
15178 frame related note. The parallel contains a set of the register
41f3a930 15179 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
15180 This is so we can differentiate between 64-bit and 32-bit saves.
15181 Words cannot describe this nastiness. */
15182
37409796
NS
15183 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
15184 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
15185 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
15186
15187 /* Transform:
15188 (set (mem (plus (reg x) (const y)))
15189 (reg z))
15190 into:
15191 (set (mem (plus (reg x) (const y+4)))
41f3a930 15192 (reg z+1200))
c19de7aa
AH
15193 */
15194
15195 real2 = copy_rtx (real);
15196 PUT_MODE (SET_DEST (real2), SImode);
15197 reg = SET_SRC (real2);
15198 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
15199 synth = copy_rtx (real2);
15200
15201 if (BYTES_BIG_ENDIAN)
15202 {
15203 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
15204 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
15205 }
15206
15207 reg = SET_SRC (synth);
41f3a930 15208
c19de7aa 15209 synth = replace_rtx (synth, reg,
41f3a930 15210 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
15211
15212 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
15213 synth = replace_rtx (synth, offset,
15214 GEN_INT (INTVAL (offset)
15215 + (BYTES_BIG_ENDIAN ? 0 : 4)));
15216
15217 RTX_FRAME_RELATED_P (synth) = 1;
15218 RTX_FRAME_RELATED_P (real2) = 1;
15219 if (BYTES_BIG_ENDIAN)
15220 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
15221 else
15222 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
15223
15224 return real;
15225}
15226
00b960c7
AH
15227/* Returns an insn that has a vrsave set operation with the
15228 appropriate CLOBBERs. */
15229
15230static rtx
a2369ed3 15231generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15232{
15233 int nclobs, i;
15234 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15235 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15236
a004eb82
AH
15237 clobs[0]
15238 = gen_rtx_SET (VOIDmode,
15239 vrsave,
15240 gen_rtx_UNSPEC_VOLATILE (SImode,
15241 gen_rtvec (2, reg, vrsave),
3aca4bff 15242 UNSPECV_SET_VRSAVE));
00b960c7
AH
15243
15244 nclobs = 1;
15245
9aa86737
AH
15246 /* We need to clobber the registers in the mask so the scheduler
15247 does not move sets to VRSAVE before sets of AltiVec registers.
15248
15249 However, if the function receives nonlocal gotos, reload will set
15250 all call saved registers live. We will end up with:
15251
15252 (set (reg 999) (mem))
15253 (parallel [ (set (reg vrsave) (unspec blah))
15254 (clobber (reg 999))])
15255
15256 The clobber will cause the store into reg 999 to be dead, and
15257 flow will attempt to delete an epilogue insn. In this case, we
15258 need an unspec use/set of the register. */
00b960c7
AH
15259
15260 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15261 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15262 {
15263 if (!epiloguep || call_used_regs [i])
15264 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15265 gen_rtx_REG (V4SImode, i));
15266 else
15267 {
15268 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15269
15270 clobs[nclobs++]
a004eb82
AH
15271 = gen_rtx_SET (VOIDmode,
15272 reg,
15273 gen_rtx_UNSPEC (V4SImode,
15274 gen_rtvec (1, reg), 27));
9aa86737
AH
15275 }
15276 }
00b960c7
AH
15277
15278 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15279
15280 for (i = 0; i < nclobs; ++i)
15281 XVECEXP (insn, 0, i) = clobs[i];
15282
15283 return insn;
15284}
15285
89e7058f
AH
15286/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15287 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15288
15289static void
f676971a 15290emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15291 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15292{
15293 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15294 rtx replacea, replaceb;
15295
15296 int_rtx = GEN_INT (offset);
15297
15298 /* Some cases that need register indexed addressing. */
15299 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4447b5 15300 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode))
a3170dc6
AH
15301 || (TARGET_SPE_ABI
15302 && SPE_VECTOR_MODE (mode)
15303 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15304 {
15305 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15306 flow path of instructions in the prologue. */
89e7058f
AH
15307 offset_rtx = gen_rtx_REG (Pmode, 11);
15308 emit_move_insn (offset_rtx, int_rtx);
15309
15310 replacea = offset_rtx;
15311 replaceb = int_rtx;
15312 }
15313 else
15314 {
15315 offset_rtx = int_rtx;
15316 replacea = NULL_RTX;
15317 replaceb = NULL_RTX;
15318 }
15319
15320 reg = gen_rtx_REG (mode, regno);
15321 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15322 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15323
15324 insn = emit_move_insn (mem, reg);
15325
15326 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15327}
15328
a3170dc6
AH
15329/* Emit an offset memory reference suitable for a frame store, while
15330 converting to a valid addressing mode. */
15331
15332static rtx
a2369ed3 15333gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15334{
15335 rtx int_rtx, offset_rtx;
15336
15337 int_rtx = GEN_INT (offset);
15338
4d4cbc0e 15339 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4d4447b5 15340 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == DDmode)))
a3170dc6
AH
15341 {
15342 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15343 emit_move_insn (offset_rtx, int_rtx);
15344 }
15345 else
15346 offset_rtx = int_rtx;
15347
0be76840 15348 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15349}
15350
6d0a8091
DJ
15351/* Look for user-defined global regs. We should not save and restore these,
15352 and cannot use stmw/lmw if there are any in its range. */
15353
15354static bool
15355no_global_regs_above (int first_greg)
15356{
15357 int i;
15358 for (i = 0; i < 32 - first_greg; i++)
15359 if (global_regs[first_greg + i])
15360 return false;
15361 return true;
15362}
15363
699c914a
MS
15364#ifndef TARGET_FIX_AND_CONTINUE
15365#define TARGET_FIX_AND_CONTINUE 0
15366#endif
15367
52ff33d0
NF
15368/* Determine whether the gp REG is really used. */
15369
15370static bool
15371rs6000_reg_live_or_pic_offset_p (int reg)
15372{
6fb5fa3c 15373 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15374 && (!call_used_regs[reg]
15375 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15376 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15377 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15378 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15379 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15380}
15381
9ebbca7d
GK
15382/* Emit function prologue as insns. */
15383
9878760c 15384void
863d938c 15385rs6000_emit_prologue (void)
9878760c 15386{
4697a36c 15387 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15388 enum machine_mode reg_mode = Pmode;
327e5343 15389 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15390 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15391 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15392 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15393 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15394 rtx insn;
15395 int saving_FPRs_inline;
15396 int using_store_multiple;
15397 HOST_WIDE_INT sp_offset = 0;
f676971a 15398
699c914a
MS
15399 if (TARGET_FIX_AND_CONTINUE)
15400 {
15401 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15402 address by modifying the first 5 instructions of the function
699c914a
MS
15403 to branch to the overriding function. This is necessary to
15404 permit function pointers that point to the old function to
15405 actually forward to the new function. */
15406 emit_insn (gen_nop ());
15407 emit_insn (gen_nop ());
de2ab0ca 15408 emit_insn (gen_nop ());
699c914a
MS
15409 emit_insn (gen_nop ());
15410 emit_insn (gen_nop ());
15411 }
15412
15413 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15414 {
15415 reg_mode = V2SImode;
15416 reg_size = 8;
15417 }
a3170dc6 15418
9ebbca7d 15419 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15420 && (!TARGET_SPE_ABI
15421 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15422 && info->first_gp_reg_save < 31
15423 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15424 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15425 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15426 || current_function_calls_eh_return
8c29550d 15427 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15428
15429 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15430 if (! WORLD_SAVE_P (info)
15431 && info->push_p
acd0b319
AM
15432 && (DEFAULT_ABI == ABI_V4
15433 || current_function_calls_eh_return))
9ebbca7d
GK
15434 {
15435 if (info->total_size < 32767)
15436 sp_offset = info->total_size;
15437 else
15438 frame_reg_rtx = frame_ptr_rtx;
f676971a 15439 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15440 (frame_reg_rtx != sp_reg_rtx
15441 && (info->cr_save_p
15442 || info->lr_save_p
15443 || info->first_fp_reg_save < 64
15444 || info->first_gp_reg_save < 32
15445 )));
15446 if (frame_reg_rtx != sp_reg_rtx)
15447 rs6000_emit_stack_tie ();
15448 }
15449
d62294f5 15450 /* Handle world saves specially here. */
f57fe068 15451 if (WORLD_SAVE_P (info))
d62294f5
FJ
15452 {
15453 int i, j, sz;
15454 rtx treg;
15455 rtvec p;
22fa69da 15456 rtx reg0;
d62294f5
FJ
15457
15458 /* save_world expects lr in r0. */
22fa69da 15459 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15460 if (info->lr_save_p)
c4ad648e 15461 {
22fa69da 15462 insn = emit_move_insn (reg0,
1de43f85 15463 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15464 RTX_FRAME_RELATED_P (insn) = 1;
15465 }
d62294f5
FJ
15466
15467 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15468 assumptions about the offsets of various bits of the stack
992d08b1 15469 frame. */
37409796
NS
15470 gcc_assert (info->gp_save_offset == -220
15471 && info->fp_save_offset == -144
15472 && info->lr_save_offset == 8
15473 && info->cr_save_offset == 4
15474 && info->push_p
15475 && info->lr_save_p
15476 && (!current_function_calls_eh_return
15477 || info->ehrd_offset == -432)
15478 && info->vrsave_save_offset == -224
22fa69da 15479 && info->altivec_save_offset == -416);
d62294f5
FJ
15480
15481 treg = gen_rtx_REG (SImode, 11);
15482 emit_move_insn (treg, GEN_INT (-info->total_size));
15483
15484 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15485 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15486
15487 /* Preserve CR2 for save_world prologues */
22fa69da 15488 sz = 5;
d62294f5
FJ
15489 sz += 32 - info->first_gp_reg_save;
15490 sz += 64 - info->first_fp_reg_save;
15491 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15492 p = rtvec_alloc (sz);
15493 j = 0;
15494 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15495 gen_rtx_REG (SImode,
1de43f85 15496 LR_REGNO));
d62294f5 15497 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15498 gen_rtx_SYMBOL_REF (Pmode,
15499 "*save_world"));
d62294f5 15500 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15501 properly. */
15502 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15503 {
15504 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15505 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15506 GEN_INT (info->fp_save_offset
15507 + sp_offset + 8 * i));
0be76840 15508 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15509
15510 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15511 }
d62294f5 15512 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15513 {
15514 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15515 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15516 GEN_INT (info->altivec_save_offset
15517 + sp_offset + 16 * i));
0be76840 15518 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15519
15520 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15521 }
d62294f5 15522 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15523 {
15524 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15525 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15526 GEN_INT (info->gp_save_offset
15527 + sp_offset + reg_size * i));
0be76840 15528 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15529
15530 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15531 }
15532
15533 {
15534 /* CR register traditionally saved as CR2. */
15535 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15536 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15537 GEN_INT (info->cr_save_offset
15538 + sp_offset));
0be76840 15539 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15540
15541 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15542 }
22fa69da
GK
15543 /* Explain about use of R0. */
15544 if (info->lr_save_p)
15545 {
15546 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15547 GEN_INT (info->lr_save_offset
15548 + sp_offset));
15549 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15550
22fa69da
GK
15551 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15552 }
15553 /* Explain what happens to the stack pointer. */
15554 {
15555 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15556 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15557 }
d62294f5
FJ
15558
15559 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15560 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15561 treg, GEN_INT (-info->total_size));
15562 sp_offset = info->total_size;
d62294f5
FJ
15563 }
15564
9ebbca7d 15565 /* If we use the link register, get it into r0. */
f57fe068 15566 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15567 {
52ff33d0
NF
15568 rtx addr, reg, mem;
15569
f8a57be8 15570 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15571 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15572 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15573
15574 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15575 GEN_INT (info->lr_save_offset + sp_offset));
15576 reg = gen_rtx_REG (Pmode, 0);
15577 mem = gen_rtx_MEM (Pmode, addr);
15578 /* This should not be of rs6000_sr_alias_set, because of
15579 __builtin_return_address. */
15580
15581 insn = emit_move_insn (mem, reg);
15582 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15583 NULL_RTX, NULL_RTX);
f8a57be8 15584 }
9ebbca7d
GK
15585
15586 /* If we need to save CR, put it into r12. */
f57fe068 15587 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15588 {
f8a57be8 15589 rtx set;
f676971a 15590
9ebbca7d 15591 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15592 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15593 RTX_FRAME_RELATED_P (insn) = 1;
15594 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15595 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15596 But that's OK. All we have to do is specify that _one_ condition
15597 code register is saved in this stack slot. The thrower's epilogue
15598 will then restore all the call-saved registers.
15599 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15600 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15601 gen_rtx_REG (SImode, CR2_REGNO));
15602 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15603 set,
15604 REG_NOTES (insn));
9ebbca7d
GK
15605 }
15606
a4f6c312
SS
15607 /* Do any required saving of fpr's. If only one or two to save, do
15608 it ourselves. Otherwise, call function. */
f57fe068 15609 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15610 {
15611 int i;
15612 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15613 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15614 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15615 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15616 info->first_fp_reg_save + i,
15617 info->fp_save_offset + sp_offset + 8 * i,
15618 info->total_size);
9ebbca7d 15619 }
f57fe068 15620 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15621 {
15622 int i;
15623 char rname[30];
520a57c8 15624 const char *alloc_rname;
9ebbca7d
GK
15625 rtvec p;
15626 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15627
15628 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15629 gen_rtx_REG (Pmode,
1de43f85 15630 LR_REGNO));
9ebbca7d
GK
15631 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15632 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15633 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15634 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15635 gen_rtx_SYMBOL_REF (Pmode,
15636 alloc_rname));
15637 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15638 {
15639 rtx addr, reg, mem;
15640 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15641 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15642 GEN_INT (info->fp_save_offset
9ebbca7d 15643 + sp_offset + 8*i));
0be76840 15644 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15645
15646 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15647 }
15648 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15649 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15650 NULL_RTX, NULL_RTX);
15651 }
b6c9286a 15652
9ebbca7d
GK
15653 /* Save GPRs. This is done as a PARALLEL if we are using
15654 the store-multiple instructions. */
f57fe068 15655 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15656 {
308c142a 15657 rtvec p;
9ebbca7d
GK
15658 int i;
15659 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15660 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15661 {
15662 rtx addr, reg, mem;
15663 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15664 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15665 GEN_INT (info->gp_save_offset
15666 + sp_offset
9ebbca7d 15667 + reg_size * i));
0be76840 15668 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15669
15670 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15671 }
15672 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15673 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15674 NULL_RTX, NULL_RTX);
b6c9286a 15675 }
52ff33d0
NF
15676 else if (!WORLD_SAVE_P (info)
15677 && TARGET_SPE_ABI
15678 && info->spe_64bit_regs_used != 0
15679 && info->first_gp_reg_save != 32)
15680 {
15681 int i;
15682 rtx spe_save_area_ptr;
15683 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15684 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15685 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15686
15687 /* Determine whether we can address all of the registers that need
15688 to be saved with an offset from the stack pointer that fits in
15689 the small const field for SPE memory instructions. */
15690 int spe_regs_addressable_via_sp
15691 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15692 + (32 - info->first_gp_reg_save - 1) * reg_size);
15693 int spe_offset;
15694
15695 if (spe_regs_addressable_via_sp)
15696 {
30895f30 15697 spe_save_area_ptr = frame_reg_rtx;
52ff33d0
NF
15698 spe_offset = info->spe_gp_save_offset + sp_offset;
15699 }
15700 else
15701 {
15702 /* Make r11 point to the start of the SPE save area. We need
15703 to be careful here if r11 is holding the static chain. If
15704 it is, then temporarily save it in r0. We would use r0 as
15705 our base register here, but using r0 as a base register in
15706 loads and stores means something different from what we
15707 would like. */
15708 if (using_static_chain_p)
15709 {
15710 rtx r0 = gen_rtx_REG (Pmode, 0);
15711
15712 gcc_assert (info->first_gp_reg_save > 11);
15713
15714 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15715 }
15716
15717 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
30895f30 15718 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
52ff33d0
NF
15719 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15720
15721 spe_offset = 0;
15722 }
15723
15724 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15725 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15726 {
15727 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15728 rtx offset, addr, mem;
15729
15730 /* We're doing all this to ensure that the offset fits into
15731 the immediate offset of 'evstdd'. */
15732 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15733
15734 offset = GEN_INT (reg_size * i + spe_offset);
15735 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15736 mem = gen_rtx_MEM (V2SImode, addr);
15737
15738 insn = emit_move_insn (mem, reg);
15739
15740 rs6000_frame_related (insn, spe_save_area_ptr,
15741 info->spe_gp_save_offset
15742 + sp_offset + reg_size * i,
15743 offset, const0_rtx);
15744 }
15745
15746 /* Move the static chain pointer back. */
15747 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15748 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15749 }
f57fe068 15750 else if (!WORLD_SAVE_P (info))
b6c9286a 15751 {
9ebbca7d
GK
15752 int i;
15753 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15754 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15755 {
15756 rtx addr, reg, mem;
15757 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15758
52ff33d0
NF
15759 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15760 GEN_INT (info->gp_save_offset
15761 + sp_offset
15762 + reg_size * i));
15763 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15764
52ff33d0
NF
15765 insn = emit_move_insn (mem, reg);
15766 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15767 NULL_RTX, NULL_RTX);
15768 }
9ebbca7d
GK
15769 }
15770
83720594
RH
15771 /* ??? There's no need to emit actual instructions here, but it's the
15772 easiest way to get the frame unwind information emitted. */
22fa69da 15773 if (current_function_calls_eh_return)
83720594 15774 {
78e1b90d
DE
15775 unsigned int i, regno;
15776
fc4767bb
JJ
15777 /* In AIX ABI we need to pretend we save r2 here. */
15778 if (TARGET_AIX)
15779 {
15780 rtx addr, reg, mem;
15781
15782 reg = gen_rtx_REG (reg_mode, 2);
15783 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15784 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15785 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15786
15787 insn = emit_move_insn (mem, reg);
f676971a 15788 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15789 NULL_RTX, NULL_RTX);
15790 PATTERN (insn) = gen_blockage ();
15791 }
15792
83720594
RH
15793 for (i = 0; ; ++i)
15794 {
83720594
RH
15795 regno = EH_RETURN_DATA_REGNO (i);
15796 if (regno == INVALID_REGNUM)
15797 break;
15798
89e7058f
AH
15799 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15800 info->ehrd_offset + sp_offset
15801 + reg_size * (int) i,
15802 info->total_size);
83720594
RH
15803 }
15804 }
15805
9ebbca7d 15806 /* Save CR if we use any that must be preserved. */
f57fe068 15807 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15808 {
15809 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15810 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15811 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15812 /* See the large comment above about why CR2_REGNO is used. */
15813 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15814
9ebbca7d
GK
15815 /* If r12 was used to hold the original sp, copy cr into r0 now
15816 that it's free. */
15817 if (REGNO (frame_reg_rtx) == 12)
15818 {
f8a57be8
GK
15819 rtx set;
15820
9ebbca7d 15821 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15822 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15823 RTX_FRAME_RELATED_P (insn) = 1;
15824 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15825 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15826 set,
15827 REG_NOTES (insn));
f676971a 15828
9ebbca7d
GK
15829 }
15830 insn = emit_move_insn (mem, cr_save_rtx);
15831
f676971a 15832 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15833 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15834 }
15835
f676971a 15836 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15837 for which it was done previously. */
f57fe068 15838 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15839 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 15840 {
bcb2d701 15841 if (info->total_size < 32767)
2b2c2fe5 15842 sp_offset = info->total_size;
bcb2d701
EC
15843 else
15844 frame_reg_rtx = frame_ptr_rtx;
15845 rs6000_emit_allocate_stack (info->total_size,
15846 (frame_reg_rtx != sp_reg_rtx
15847 && ((info->altivec_size != 0)
15848 || (info->vrsave_mask != 0)
15849 )));
15850 if (frame_reg_rtx != sp_reg_rtx)
15851 rs6000_emit_stack_tie ();
2b2c2fe5 15852 }
9ebbca7d
GK
15853
15854 /* Set frame pointer, if needed. */
15855 if (frame_pointer_needed)
15856 {
7d5175e1 15857 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15858 sp_reg_rtx);
15859 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15860 }
9878760c 15861
2b2c2fe5
EC
15862 /* Save AltiVec registers if needed. Save here because the red zone does
15863 not include AltiVec registers. */
15864 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15865 {
15866 int i;
15867
15868 /* There should be a non inline version of this, for when we
15869 are saving lots of vector registers. */
15870 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15871 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15872 {
15873 rtx areg, savereg, mem;
15874 int offset;
15875
15876 offset = info->altivec_save_offset + sp_offset
15877 + 16 * (i - info->first_altivec_reg_save);
15878
15879 savereg = gen_rtx_REG (V4SImode, i);
15880
15881 areg = gen_rtx_REG (Pmode, 0);
15882 emit_move_insn (areg, GEN_INT (offset));
15883
15884 /* AltiVec addressing mode is [reg+reg]. */
15885 mem = gen_frame_mem (V4SImode,
15886 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15887
15888 insn = emit_move_insn (mem, savereg);
15889
15890 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15891 areg, GEN_INT (offset));
15892 }
15893 }
15894
15895 /* VRSAVE is a bit vector representing which AltiVec registers
15896 are used. The OS uses this to determine which vector
15897 registers to save on a context switch. We need to save
15898 VRSAVE on the stack frame, add whatever AltiVec registers we
15899 used in this function, and do the corresponding magic in the
15900 epilogue. */
15901
15902 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15903 && info->vrsave_mask != 0)
15904 {
15905 rtx reg, mem, vrsave;
15906 int offset;
15907
15908 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15909 as frame_reg_rtx and r11 as the static chain pointer for
15910 nested functions. */
15911 reg = gen_rtx_REG (SImode, 0);
15912 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15913 if (TARGET_MACHO)
15914 emit_insn (gen_get_vrsave_internal (reg));
15915 else
15916 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15917
15918 if (!WORLD_SAVE_P (info))
15919 {
15920 /* Save VRSAVE. */
15921 offset = info->vrsave_save_offset + sp_offset;
15922 mem = gen_frame_mem (SImode,
15923 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15924 GEN_INT (offset)));
15925 insn = emit_move_insn (mem, reg);
15926 }
15927
15928 /* Include the registers in the mask. */
15929 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15930
15931 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15932 }
15933
1db02437 15934 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15935 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15936 || (DEFAULT_ABI == ABI_V4
15937 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 15938 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
15939 {
15940 /* If emit_load_toc_table will use the link register, we need to save
15941 it. We use R12 for this purpose because emit_load_toc_table
15942 can use register 0. This allows us to use a plain 'blr' to return
15943 from the procedure more often. */
15944 int save_LR_around_toc_setup = (TARGET_ELF
15945 && DEFAULT_ABI != ABI_AIX
15946 && flag_pic
15947 && ! info->lr_save_p
15948 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15949 if (save_LR_around_toc_setup)
15950 {
1de43f85 15951 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 15952
c4ad648e 15953 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 15954 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15955
c4ad648e 15956 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15957
c4ad648e 15958 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
15959 RTX_FRAME_RELATED_P (insn) = 1;
15960 }
15961 else
15962 rs6000_emit_load_toc_table (TRUE);
15963 }
ee890fe2 15964
fcce224d 15965#if TARGET_MACHO
ee890fe2
SS
15966 if (DEFAULT_ABI == ABI_DARWIN
15967 && flag_pic && current_function_uses_pic_offset_table)
15968 {
1de43f85 15969 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 15970 rtx src = machopic_function_base_sym ();
ee890fe2 15971
6d0a8091
DJ
15972 /* Save and restore LR locally around this call (in R0). */
15973 if (!info->lr_save_p)
6fb5fa3c 15974 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 15975
6fb5fa3c 15976 emit_insn (gen_load_macho_picbase (src));
ee890fe2 15977
6fb5fa3c
DB
15978 emit_move_insn (gen_rtx_REG (Pmode,
15979 RS6000_PIC_OFFSET_TABLE_REGNUM),
15980 lr);
6d0a8091
DJ
15981
15982 if (!info->lr_save_p)
6fb5fa3c 15983 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 15984 }
fcce224d 15985#endif
9ebbca7d
GK
15986}
15987
9ebbca7d 15988/* Write function prologue. */
a4f6c312 15989
08c148a8 15990static void
f676971a 15991rs6000_output_function_prologue (FILE *file,
a2369ed3 15992 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15993{
15994 rs6000_stack_t *info = rs6000_stack_info ();
15995
4697a36c
MM
15996 if (TARGET_DEBUG_STACK)
15997 debug_stack_info (info);
9878760c 15998
a4f6c312
SS
15999 /* Write .extern for any function we will call to save and restore
16000 fp values. */
16001 if (info->first_fp_reg_save < 64
16002 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16003 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16004 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
16005 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
16006 RESTORE_FP_SUFFIX);
9878760c 16007
c764f757
RK
16008 /* Write .extern for AIX common mode routines, if needed. */
16009 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16010 {
f6709c70
JW
16011 fputs ("\t.extern __mulh\n", file);
16012 fputs ("\t.extern __mull\n", file);
16013 fputs ("\t.extern __divss\n", file);
16014 fputs ("\t.extern __divus\n", file);
16015 fputs ("\t.extern __quoss\n", file);
16016 fputs ("\t.extern __quous\n", file);
c764f757
RK
16017 common_mode_defined = 1;
16018 }
9878760c 16019
9ebbca7d 16020 if (! HAVE_prologue)
979721f8 16021 {
9ebbca7d 16022 start_sequence ();
9dda4cc8 16023
a4f6c312
SS
16024 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16025 the "toplevel" insn chain. */
2e040219 16026 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16027 rs6000_emit_prologue ();
2e040219 16028 emit_note (NOTE_INSN_DELETED);
178c3eff 16029
a3c9585f 16030 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16031 {
16032 rtx insn;
16033 unsigned addr = 0;
16034 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16035 {
16036 INSN_ADDRESSES_NEW (insn, addr);
16037 addr += 4;
16038 }
16039 }
9dda4cc8 16040
9ebbca7d 16041 if (TARGET_DEBUG_STACK)
a4f6c312 16042 debug_rtx_list (get_insns (), 100);
c9d691e9 16043 final (get_insns (), file, FALSE);
9ebbca7d 16044 end_sequence ();
979721f8
MM
16045 }
16046
9ebbca7d
GK
16047 rs6000_pic_labelno++;
16048}
f676971a 16049
9ebbca7d 16050/* Emit function epilogue as insns.
9878760c 16051
9ebbca7d
GK
16052 At present, dwarf2out_frame_debug_expr doesn't understand
16053 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16054 anywhere in the epilogue. Most of the insns below would in any case
16055 need special notes to explain where r11 is in relation to the stack. */
9878760c 16056
9ebbca7d 16057void
a2369ed3 16058rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16059{
16060 rs6000_stack_t *info;
16061 int restoring_FPRs_inline;
16062 int using_load_multiple;
d296e02e 16063 int using_mtcr_multiple;
9ebbca7d
GK
16064 int use_backchain_to_restore_sp;
16065 int sp_offset = 0;
16066 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16067 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16068 enum machine_mode reg_mode = Pmode;
327e5343 16069 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16070 int i;
16071
c19de7aa
AH
16072 info = rs6000_stack_info ();
16073
16074 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16075 {
16076 reg_mode = V2SImode;
16077 reg_size = 8;
16078 }
16079
9ebbca7d 16080 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
16081 && (!TARGET_SPE_ABI
16082 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
16083 && info->first_gp_reg_save < 31
16084 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 16085 restoring_FPRs_inline = (sibcall
83720594 16086 || current_function_calls_eh_return
9ebbca7d
GK
16087 || info->first_fp_reg_save == 64
16088 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 16089 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
16090 || current_function_calls_alloca
16091 || info->total_size > 32767);
d296e02e 16092 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16093 || rs6000_cpu == PROCESSOR_PPC603
16094 || rs6000_cpu == PROCESSOR_PPC750
16095 || optimize_size);
16096
f57fe068 16097 if (WORLD_SAVE_P (info))
d62294f5
FJ
16098 {
16099 int i, j;
16100 char rname[30];
16101 const char *alloc_rname;
16102 rtvec p;
16103
16104 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16105 stack slot (which is not likely to be our caller.)
16106 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16107 rest_world is similar, except any R10 parameter is ignored.
16108 The exception-handling stuff that was here in 2.95 is no
16109 longer necessary. */
d62294f5
FJ
16110
16111 p = rtvec_alloc (9
16112 + 1
f676971a 16113 + 32 - info->first_gp_reg_save
c4ad648e
AM
16114 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16115 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16116
c4ad648e
AM
16117 strcpy (rname, ((current_function_calls_eh_return) ?
16118 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16119 alloc_rname = ggc_strdup (rname);
16120
16121 j = 0;
16122 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16123 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16124 gen_rtx_REG (Pmode,
1de43f85 16125 LR_REGNO));
d62294f5 16126 RTVEC_ELT (p, j++)
c4ad648e 16127 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16128 /* The instruction pattern requires a clobber here;
c4ad648e 16129 it is shared with the restVEC helper. */
d62294f5 16130 RTVEC_ELT (p, j++)
c4ad648e 16131 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16132
16133 {
c4ad648e
AM
16134 /* CR register traditionally saved as CR2. */
16135 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16136 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16137 GEN_INT (info->cr_save_offset));
0be76840 16138 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16139
16140 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16141 }
16142
16143 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16144 {
16145 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16146 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16147 GEN_INT (info->gp_save_offset
16148 + reg_size * i));
0be76840 16149 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16150
16151 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16152 }
d62294f5 16153 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16154 {
16155 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16156 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16157 GEN_INT (info->altivec_save_offset
16158 + 16 * i));
0be76840 16159 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16160
16161 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16162 }
d62294f5 16163 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16164 {
16165 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16166 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16167 GEN_INT (info->fp_save_offset
16168 + 8 * i));
0be76840 16169 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16170
16171 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16172 }
d62294f5 16173 RTVEC_ELT (p, j++)
c4ad648e 16174 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16175 RTVEC_ELT (p, j++)
c4ad648e 16176 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16177 RTVEC_ELT (p, j++)
c4ad648e 16178 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16179 RTVEC_ELT (p, j++)
c4ad648e 16180 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16181 RTVEC_ELT (p, j++)
c4ad648e 16182 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16183 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16184
16185 return;
16186 }
16187
45b194f8
AM
16188 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16189 if (info->push_p)
2b2c2fe5 16190 sp_offset = info->total_size;
f676971a 16191
9aa86737
AH
16192 /* Restore AltiVec registers if needed. */
16193 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16194 {
16195 int i;
16196
16197 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16198 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16199 {
16200 rtx addr, areg, mem;
16201
16202 areg = gen_rtx_REG (Pmode, 0);
16203 emit_move_insn
16204 (areg, GEN_INT (info->altivec_save_offset
16205 + sp_offset
16206 + 16 * (i - info->first_altivec_reg_save)));
16207
16208 /* AltiVec addressing mode is [reg+reg]. */
16209 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16210 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16211
16212 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16213 }
16214 }
16215
2b2c2fe5
EC
16216 /* If we have a frame pointer, a call to alloca, or a large stack
16217 frame, restore the old stack pointer using the backchain. Otherwise,
16218 we know what size to update it with. */
16219 if (use_backchain_to_restore_sp)
16220 {
16221 /* Under V.4, don't reset the stack pointer until after we're done
16222 loading the saved registers. */
16223 if (DEFAULT_ABI == ABI_V4)
16224 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16225
16226 emit_move_insn (frame_reg_rtx,
16227 gen_rtx_MEM (Pmode, sp_reg_rtx));
45b194f8 16228 sp_offset = 0;
2b2c2fe5 16229 }
45b194f8
AM
16230 else if (info->push_p
16231 && DEFAULT_ABI != ABI_V4
16232 && !current_function_calls_eh_return)
2b2c2fe5 16233 {
45b194f8
AM
16234 emit_insn (TARGET_32BIT
16235 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16236 GEN_INT (info->total_size))
16237 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16238 GEN_INT (info->total_size)));
16239 sp_offset = 0;
2b2c2fe5
EC
16240 }
16241
554c2941
AM
16242 /* Restore VRSAVE if needed. */
16243 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16244 && info->vrsave_mask != 0)
16245 {
16246 rtx addr, mem, reg;
16247
16248 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16249 GEN_INT (info->vrsave_save_offset + sp_offset));
16250 mem = gen_frame_mem (SImode, addr);
16251 reg = gen_rtx_REG (SImode, 12);
16252 emit_move_insn (reg, mem);
16253
16254 emit_insn (generate_set_vrsave (reg, info, 1));
16255 }
16256
9ebbca7d
GK
16257 /* Get the old lr if we saved it. */
16258 if (info->lr_save_p)
b6c9286a 16259 {
a3170dc6
AH
16260 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16261 info->lr_save_offset + sp_offset);
ba4828e0 16262
9ebbca7d 16263 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16264 }
f676971a 16265
9ebbca7d
GK
16266 /* Get the old cr if we saved it. */
16267 if (info->cr_save_p)
16268 {
16269 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16270 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16271 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16272
9ebbca7d
GK
16273 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16274 }
f676971a 16275
9ebbca7d 16276 /* Set LR here to try to overlap restores below. */
4697a36c 16277 if (info->lr_save_p)
1de43f85 16278 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16279 gen_rtx_REG (Pmode, 0));
f676971a 16280
83720594
RH
16281 /* Load exception handler data registers, if needed. */
16282 if (current_function_calls_eh_return)
16283 {
78e1b90d
DE
16284 unsigned int i, regno;
16285
fc4767bb
JJ
16286 if (TARGET_AIX)
16287 {
16288 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16289 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16290 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16291
16292 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16293 }
16294
83720594
RH
16295 for (i = 0; ; ++i)
16296 {
a3170dc6 16297 rtx mem;
83720594
RH
16298
16299 regno = EH_RETURN_DATA_REGNO (i);
16300 if (regno == INVALID_REGNUM)
16301 break;
16302
a3170dc6
AH
16303 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16304 info->ehrd_offset + sp_offset
16305 + reg_size * (int) i);
83720594
RH
16306
16307 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16308 }
16309 }
f676971a 16310
9ebbca7d
GK
16311 /* Restore GPRs. This is done as a PARALLEL if we are using
16312 the load-multiple instructions. */
16313 if (using_load_multiple)
979721f8 16314 {
9ebbca7d
GK
16315 rtvec p;
16316 p = rtvec_alloc (32 - info->first_gp_reg_save);
16317 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16318 {
f676971a
EC
16319 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16320 GEN_INT (info->gp_save_offset
16321 + sp_offset
9ebbca7d 16322 + reg_size * i));
0be76840 16323 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16324
f676971a 16325 RTVEC_ELT (p, i) =
9ebbca7d
GK
16326 gen_rtx_SET (VOIDmode,
16327 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16328 mem);
979721f8 16329 }
9ebbca7d 16330 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16331 }
52ff33d0
NF
16332 else if (TARGET_SPE_ABI
16333 && info->spe_64bit_regs_used != 0
16334 && info->first_gp_reg_save != 32)
16335 {
52ff33d0
NF
16336 /* Determine whether we can address all of the registers that need
16337 to be saved with an offset from the stack pointer that fits in
16338 the small const field for SPE memory instructions. */
16339 int spe_regs_addressable_via_sp
16340 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16341 + (32 - info->first_gp_reg_save - 1) * reg_size);
16342 int spe_offset;
16343
16344 if (spe_regs_addressable_via_sp)
45b194f8 16345 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16346 else
16347 {
45b194f8 16348 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16349 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16350 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16351 There's no need to worry here because the static chain is passed
16352 anew to every function. */
45b194f8
AM
16353 if (frame_reg_rtx == sp_reg_rtx)
16354 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16355 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16356 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16357 /* Keep the invariant that frame_reg_rtx + sp_offset points
16358 at the top of the stack frame. */
16359 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16360
16361 spe_offset = 0;
16362 }
16363
16364 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16365 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16366 {
16367 rtx offset, addr, mem;
16368
16369 /* We're doing all this to ensure that the immediate offset
16370 fits into the immediate field of 'evldd'. */
16371 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16372
16373 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16374 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16375 mem = gen_rtx_MEM (V2SImode, addr);
16376
16377 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16378 mem);
16379 }
16380 }
9ebbca7d
GK
16381 else
16382 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16383 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16384 {
f676971a
EC
16385 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16386 GEN_INT (info->gp_save_offset
16387 + sp_offset
9ebbca7d 16388 + reg_size * i));
0be76840 16389 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16390
f676971a 16391 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16392 info->first_gp_reg_save + i), mem);
9ebbca7d 16393 }
9878760c 16394
9ebbca7d
GK
16395 /* Restore fpr's if we need to do it without calling a function. */
16396 if (restoring_FPRs_inline)
16397 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16398 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16399 && ! call_used_regs[info->first_fp_reg_save+i]))
16400 {
16401 rtx addr, mem;
16402 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16403 GEN_INT (info->fp_save_offset
16404 + sp_offset
a4f6c312 16405 + 8 * i));
0be76840 16406 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16407
f676971a 16408 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16409 info->first_fp_reg_save + i),
16410 mem);
16411 }
8d30c4ee 16412
9ebbca7d
GK
16413 /* If we saved cr, restore it here. Just those that were used. */
16414 if (info->cr_save_p)
979721f8 16415 {
9ebbca7d 16416 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16417 int count = 0;
f676971a 16418
d296e02e 16419 if (using_mtcr_multiple)
979721f8 16420 {
9ebbca7d 16421 for (i = 0; i < 8; i++)
6fb5fa3c 16422 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16423 count++;
37409796 16424 gcc_assert (count);
e35b9579
GK
16425 }
16426
d296e02e 16427 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16428 {
16429 rtvec p;
16430 int ndx;
f676971a 16431
e35b9579 16432 p = rtvec_alloc (count);
9ebbca7d 16433
e35b9579 16434 ndx = 0;
9ebbca7d 16435 for (i = 0; i < 8; i++)
6fb5fa3c 16436 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16437 {
16438 rtvec r = rtvec_alloc (2);
16439 RTVEC_ELT (r, 0) = r12_rtx;
16440 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16441 RTVEC_ELT (p, ndx) =
f676971a 16442 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16443 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16444 ndx++;
9ebbca7d
GK
16445 }
16446 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16447 gcc_assert (ndx == count);
979721f8
MM
16448 }
16449 else
9ebbca7d 16450 for (i = 0; i < 8; i++)
6fb5fa3c 16451 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16452 {
f676971a 16453 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16454 CR0_REGNO+i),
16455 r12_rtx));
979721f8 16456 }
979721f8
MM
16457 }
16458
9ebbca7d 16459 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16460 have been done. */
16461 if (frame_reg_rtx != sp_reg_rtx)
16462 {
16463 /* This blockage is needed so that sched doesn't decide to move
16464 the sp change before the register restores. */
16465 rs6000_emit_stack_tie ();
45b194f8
AM
16466 if (sp_offset != 0)
16467 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16468 GEN_INT (sp_offset)));
52ff33d0
NF
16469 else
16470 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16471 }
16472 else if (sp_offset != 0)
16473 emit_insn (TARGET_32BIT
16474 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16475 GEN_INT (sp_offset))
16476 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16477 GEN_INT (sp_offset)));
b6c9286a 16478
83720594
RH
16479 if (current_function_calls_eh_return)
16480 {
16481 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16482 emit_insn (TARGET_32BIT
83720594
RH
16483 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16484 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16485 }
16486
9ebbca7d
GK
16487 if (!sibcall)
16488 {
16489 rtvec p;
16490 if (! restoring_FPRs_inline)
16491 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16492 else
16493 p = rtvec_alloc (2);
b6c9286a 16494
e35b9579 16495 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16496 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16497 gen_rtx_REG (Pmode,
1de43f85 16498 LR_REGNO));
9ebbca7d
GK
16499
16500 /* If we have to restore more than two FP registers, branch to the
16501 restore function. It will return to our caller. */
16502 if (! restoring_FPRs_inline)
16503 {
16504 int i;
16505 char rname[30];
520a57c8 16506 const char *alloc_rname;
979721f8 16507
f676971a 16508 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16509 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16510 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16511 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16512 gen_rtx_SYMBOL_REF (Pmode,
16513 alloc_rname));
b6c9286a 16514
9ebbca7d
GK
16515 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16516 {
16517 rtx addr, mem;
16518 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16519 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16520 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16521
f676971a 16522 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16523 gen_rtx_SET (VOIDmode,
16524 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16525 mem);
b6c9286a
MM
16526 }
16527 }
f676971a 16528
9ebbca7d 16529 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16530 }
9878760c
RK
16531}
16532
16533/* Write function epilogue. */
16534
08c148a8 16535static void
f676971a 16536rs6000_output_function_epilogue (FILE *file,
a2369ed3 16537 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16538{
9ebbca7d 16539 if (! HAVE_epilogue)
9878760c 16540 {
9ebbca7d
GK
16541 rtx insn = get_last_insn ();
16542 /* If the last insn was a BARRIER, we don't have to write anything except
16543 the trace table. */
16544 if (GET_CODE (insn) == NOTE)
16545 insn = prev_nonnote_insn (insn);
16546 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16547 {
9ebbca7d
GK
16548 /* This is slightly ugly, but at least we don't have two
16549 copies of the epilogue-emitting code. */
16550 start_sequence ();
16551
16552 /* A NOTE_INSN_DELETED is supposed to be at the start
16553 and end of the "toplevel" insn chain. */
2e040219 16554 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16555 rs6000_emit_epilogue (FALSE);
2e040219 16556 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16557
a3c9585f 16558 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16559 {
16560 rtx insn;
16561 unsigned addr = 0;
16562 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16563 {
16564 INSN_ADDRESSES_NEW (insn, addr);
16565 addr += 4;
16566 }
16567 }
16568
9ebbca7d 16569 if (TARGET_DEBUG_STACK)
a4f6c312 16570 debug_rtx_list (get_insns (), 100);
c9d691e9 16571 final (get_insns (), file, FALSE);
9ebbca7d 16572 end_sequence ();
4697a36c 16573 }
9878760c 16574 }
b4ac57ab 16575
efdba735
SH
16576#if TARGET_MACHO
16577 macho_branch_islands ();
0e5da0be
GK
16578 /* Mach-O doesn't support labels at the end of objects, so if
16579 it looks like we might want one, insert a NOP. */
16580 {
16581 rtx insn = get_last_insn ();
16582 while (insn
16583 && NOTE_P (insn)
a38e7aa5 16584 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16585 insn = PREV_INSN (insn);
f676971a
EC
16586 if (insn
16587 && (LABEL_P (insn)
0e5da0be 16588 || (NOTE_P (insn)
a38e7aa5 16589 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16590 fputs ("\tnop\n", file);
16591 }
16592#endif
16593
9b30bae2 16594 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16595 on its format.
16596
16597 We don't output a traceback table if -finhibit-size-directive was
16598 used. The documentation for -finhibit-size-directive reads
16599 ``don't output a @code{.size} assembler directive, or anything
16600 else that would cause trouble if the function is split in the
16601 middle, and the two halves are placed at locations far apart in
16602 memory.'' The traceback table has this property, since it
16603 includes the offset from the start of the function to the
4d30c363
MM
16604 traceback table itself.
16605
16606 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16607 different traceback table. */
57ac7be9 16608 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16609 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16610 {
69c75916 16611 const char *fname = NULL;
3ac88239 16612 const char *language_string = lang_hooks.name;
6041bf2f 16613 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16614 int i;
57ac7be9 16615 int optional_tbtab;
8097c268 16616 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16617
16618 if (rs6000_traceback == traceback_full)
16619 optional_tbtab = 1;
16620 else if (rs6000_traceback == traceback_part)
16621 optional_tbtab = 0;
16622 else
16623 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16624
69c75916
AM
16625 if (optional_tbtab)
16626 {
16627 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16628 while (*fname == '.') /* V.4 encodes . in the name */
16629 fname++;
16630
16631 /* Need label immediately before tbtab, so we can compute
16632 its offset from the function start. */
16633 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16634 ASM_OUTPUT_LABEL (file, fname);
16635 }
314fc5a9
ILT
16636
16637 /* The .tbtab pseudo-op can only be used for the first eight
16638 expressions, since it can't handle the possibly variable
16639 length fields that follow. However, if you omit the optional
16640 fields, the assembler outputs zeros for all optional fields
16641 anyways, giving each variable length field is minimum length
16642 (as defined in sys/debug.h). Thus we can not use the .tbtab
16643 pseudo-op at all. */
16644
16645 /* An all-zero word flags the start of the tbtab, for debuggers
16646 that have to find it by searching forward from the entry
16647 point or from the current pc. */
19d2d16f 16648 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16649
16650 /* Tbtab format type. Use format type 0. */
19d2d16f 16651 fputs ("\t.byte 0,", file);
314fc5a9 16652
5fc921c1
DE
16653 /* Language type. Unfortunately, there does not seem to be any
16654 official way to discover the language being compiled, so we
16655 use language_string.
16656 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16657 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16658 a number, so for now use 9. */
5fc921c1 16659 if (! strcmp (language_string, "GNU C"))
314fc5a9 16660 i = 0;
6de9cd9a
DN
16661 else if (! strcmp (language_string, "GNU F77")
16662 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16663 i = 1;
8b83775b 16664 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16665 i = 2;
5fc921c1
DE
16666 else if (! strcmp (language_string, "GNU Ada"))
16667 i = 3;
56438901
AM
16668 else if (! strcmp (language_string, "GNU C++")
16669 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16670 i = 9;
9517ead8
AG
16671 else if (! strcmp (language_string, "GNU Java"))
16672 i = 13;
5fc921c1
DE
16673 else if (! strcmp (language_string, "GNU Objective-C"))
16674 i = 14;
314fc5a9 16675 else
37409796 16676 gcc_unreachable ();
314fc5a9
ILT
16677 fprintf (file, "%d,", i);
16678
16679 /* 8 single bit fields: global linkage (not set for C extern linkage,
16680 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16681 from start of procedure stored in tbtab, internal function, function
16682 has controlled storage, function has no toc, function uses fp,
16683 function logs/aborts fp operations. */
16684 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16685 fprintf (file, "%d,",
16686 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16687
16688 /* 6 bitfields: function is interrupt handler, name present in
16689 proc table, function calls alloca, on condition directives
16690 (controls stack walks, 3 bits), saves condition reg, saves
16691 link reg. */
16692 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16693 set up as a frame pointer, even when there is no alloca call. */
16694 fprintf (file, "%d,",
6041bf2f
DE
16695 ((optional_tbtab << 6)
16696 | ((optional_tbtab & frame_pointer_needed) << 5)
16697 | (info->cr_save_p << 1)
16698 | (info->lr_save_p)));
314fc5a9 16699
6041bf2f 16700 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16701 (6 bits). */
16702 fprintf (file, "%d,",
4697a36c 16703 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16704
16705 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16706 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16707
6041bf2f
DE
16708 if (optional_tbtab)
16709 {
16710 /* Compute the parameter info from the function decl argument
16711 list. */
16712 tree decl;
16713 int next_parm_info_bit = 31;
314fc5a9 16714
6041bf2f
DE
16715 for (decl = DECL_ARGUMENTS (current_function_decl);
16716 decl; decl = TREE_CHAIN (decl))
16717 {
16718 rtx parameter = DECL_INCOMING_RTL (decl);
16719 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16720
6041bf2f
DE
16721 if (GET_CODE (parameter) == REG)
16722 {
ebb109ad 16723 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16724 {
16725 int bits;
16726
16727 float_parms++;
16728
37409796
NS
16729 switch (mode)
16730 {
16731 case SFmode:
16732 bits = 0x2;
16733 break;
16734
16735 case DFmode:
7393f7f8 16736 case DDmode:
37409796 16737 case TFmode:
7393f7f8 16738 case TDmode:
37409796
NS
16739 bits = 0x3;
16740 break;
16741
16742 default:
16743 gcc_unreachable ();
16744 }
6041bf2f
DE
16745
16746 /* If only one bit will fit, don't or in this entry. */
16747 if (next_parm_info_bit > 0)
16748 parm_info |= (bits << (next_parm_info_bit - 1));
16749 next_parm_info_bit -= 2;
16750 }
16751 else
16752 {
16753 fixed_parms += ((GET_MODE_SIZE (mode)
16754 + (UNITS_PER_WORD - 1))
16755 / UNITS_PER_WORD);
16756 next_parm_info_bit -= 1;
16757 }
16758 }
16759 }
16760 }
314fc5a9
ILT
16761
16762 /* Number of fixed point parameters. */
16763 /* This is actually the number of words of fixed point parameters; thus
16764 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16765 fprintf (file, "%d,", fixed_parms);
16766
16767 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16768 all on stack. */
16769 /* This is actually the number of fp registers that hold parameters;
16770 and thus the maximum value is 13. */
16771 /* Set parameters on stack bit if parameters are not in their original
16772 registers, regardless of whether they are on the stack? Xlc
16773 seems to set the bit when not optimizing. */
16774 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16775
6041bf2f
DE
16776 if (! optional_tbtab)
16777 return;
16778
314fc5a9
ILT
16779 /* Optional fields follow. Some are variable length. */
16780
16781 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16782 11 double float. */
16783 /* There is an entry for each parameter in a register, in the order that
16784 they occur in the parameter list. Any intervening arguments on the
16785 stack are ignored. If the list overflows a long (max possible length
16786 34 bits) then completely leave off all elements that don't fit. */
16787 /* Only emit this long if there was at least one parameter. */
16788 if (fixed_parms || float_parms)
16789 fprintf (file, "\t.long %d\n", parm_info);
16790
16791 /* Offset from start of code to tb table. */
19d2d16f 16792 fputs ("\t.long ", file);
314fc5a9 16793 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16794 if (TARGET_AIX)
16795 RS6000_OUTPUT_BASENAME (file, fname);
16796 else
16797 assemble_name (file, fname);
16798 putc ('-', file);
16799 rs6000_output_function_entry (file, fname);
19d2d16f 16800 putc ('\n', file);
314fc5a9
ILT
16801
16802 /* Interrupt handler mask. */
16803 /* Omit this long, since we never set the interrupt handler bit
16804 above. */
16805
16806 /* Number of CTL (controlled storage) anchors. */
16807 /* Omit this long, since the has_ctl bit is never set above. */
16808
16809 /* Displacement into stack of each CTL anchor. */
16810 /* Omit this list of longs, because there are no CTL anchors. */
16811
16812 /* Length of function name. */
69c75916
AM
16813 if (*fname == '*')
16814 ++fname;
296b8152 16815 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16816
16817 /* Function name. */
16818 assemble_string (fname, strlen (fname));
16819
16820 /* Register for alloca automatic storage; this is always reg 31.
16821 Only emit this if the alloca bit was set above. */
16822 if (frame_pointer_needed)
19d2d16f 16823 fputs ("\t.byte 31\n", file);
b1765bde
DE
16824
16825 fputs ("\t.align 2\n", file);
9b30bae2 16826 }
9878760c 16827}
17167fd8 16828\f
a4f6c312
SS
16829/* A C compound statement that outputs the assembler code for a thunk
16830 function, used to implement C++ virtual function calls with
16831 multiple inheritance. The thunk acts as a wrapper around a virtual
16832 function, adjusting the implicit object parameter before handing
16833 control off to the real function.
16834
16835 First, emit code to add the integer DELTA to the location that
16836 contains the incoming first argument. Assume that this argument
16837 contains a pointer, and is the one used to pass the `this' pointer
16838 in C++. This is the incoming argument *before* the function
16839 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16840 values of all other incoming arguments.
17167fd8
MM
16841
16842 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16843 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16844 not touch the return address. Hence returning from FUNCTION will
16845 return to whoever called the current `thunk'.
17167fd8 16846
a4f6c312
SS
16847 The effect must be as if FUNCTION had been called directly with the
16848 adjusted first argument. This macro is responsible for emitting
16849 all of the code for a thunk function; output_function_prologue()
16850 and output_function_epilogue() are not invoked.
17167fd8 16851
a4f6c312
SS
16852 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16853 been extracted from it.) It might possibly be useful on some
16854 targets, but probably not.
17167fd8 16855
a4f6c312
SS
16856 If you do not define this macro, the target-independent code in the
16857 C++ frontend will generate a less efficient heavyweight thunk that
16858 calls FUNCTION instead of jumping to it. The generic approach does
16859 not support varargs. */
17167fd8 16860
3961e8fe 16861static void
f676971a
EC
16862rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16863 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16864 tree function)
17167fd8 16865{
5b71a4e7 16866 rtx this, insn, funexp;
17167fd8 16867
5b71a4e7 16868 reload_completed = 1;
fe3ad572 16869 epilogue_completed = 1;
56a7189a 16870
5b71a4e7 16871 /* Mark the end of the (empty) prologue. */
2e040219 16872 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16873
5b71a4e7
DE
16874 /* Find the "this" pointer. If the function returns a structure,
16875 the structure return pointer is in r3. */
61f71b34 16876 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16877 this = gen_rtx_REG (Pmode, 4);
56a7189a 16878 else
5b71a4e7 16879 this = gen_rtx_REG (Pmode, 3);
17167fd8 16880
5b71a4e7
DE
16881 /* Apply the constant offset, if required. */
16882 if (delta)
16883 {
16884 rtx delta_rtx = GEN_INT (delta);
16885 emit_insn (TARGET_32BIT
16886 ? gen_addsi3 (this, this, delta_rtx)
16887 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16888 }
16889
5b71a4e7
DE
16890 /* Apply the offset from the vtable, if required. */
16891 if (vcall_offset)
17167fd8 16892 {
5b71a4e7
DE
16893 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16894 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16895
5b71a4e7 16896 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16897 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16898 {
16899 emit_insn (TARGET_32BIT
16900 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16901 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16902 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16903 }
16904 else
16905 {
16906 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16907
16908 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16909 }
5b71a4e7
DE
16910 emit_insn (TARGET_32BIT
16911 ? gen_addsi3 (this, this, tmp)
16912 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16913 }
16914
5b71a4e7
DE
16915 /* Generate a tail call to the target function. */
16916 if (!TREE_USED (function))
16917 {
16918 assemble_external (function);
16919 TREE_USED (function) = 1;
16920 }
16921 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16922 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16923
16924#if TARGET_MACHO
ab82a49f 16925 if (MACHOPIC_INDIRECT)
5b71a4e7 16926 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16927#endif
5b71a4e7
DE
16928
16929 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16930 generate sibcall RTL explicitly. */
5b71a4e7
DE
16931 insn = emit_call_insn (
16932 gen_rtx_PARALLEL (VOIDmode,
16933 gen_rtvec (4,
16934 gen_rtx_CALL (VOIDmode,
16935 funexp, const0_rtx),
16936 gen_rtx_USE (VOIDmode, const0_rtx),
16937 gen_rtx_USE (VOIDmode,
16938 gen_rtx_REG (SImode,
1de43f85 16939 LR_REGNO)),
5b71a4e7
DE
16940 gen_rtx_RETURN (VOIDmode))));
16941 SIBLING_CALL_P (insn) = 1;
16942 emit_barrier ();
16943
16944 /* Run just enough of rest_of_compilation to get the insns emitted.
16945 There's not really enough bulk here to make other passes such as
16946 instruction scheduling worth while. Note that use_thunk calls
16947 assemble_start_function and assemble_end_function. */
16948 insn = get_insns ();
55e092c4 16949 insn_locators_alloc ();
5b71a4e7
DE
16950 shorten_branches (insn);
16951 final_start_function (insn, file, 1);
c9d691e9 16952 final (insn, file, 1);
5b71a4e7
DE
16953 final_end_function ();
16954
16955 reload_completed = 0;
fe3ad572 16956 epilogue_completed = 0;
9ebbca7d 16957}
9ebbca7d
GK
16958\f
16959/* A quick summary of the various types of 'constant-pool tables'
16960 under PowerPC:
16961
f676971a 16962 Target Flags Name One table per
9ebbca7d
GK
16963 AIX (none) AIX TOC object file
16964 AIX -mfull-toc AIX TOC object file
16965 AIX -mminimal-toc AIX minimal TOC translation unit
16966 SVR4/EABI (none) SVR4 SDATA object file
16967 SVR4/EABI -fpic SVR4 pic object file
16968 SVR4/EABI -fPIC SVR4 PIC translation unit
16969 SVR4/EABI -mrelocatable EABI TOC function
16970 SVR4/EABI -maix AIX TOC object file
f676971a 16971 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16972 AIX minimal TOC translation unit
16973
16974 Name Reg. Set by entries contains:
16975 made by addrs? fp? sum?
16976
16977 AIX TOC 2 crt0 as Y option option
16978 AIX minimal TOC 30 prolog gcc Y Y option
16979 SVR4 SDATA 13 crt0 gcc N Y N
16980 SVR4 pic 30 prolog ld Y not yet N
16981 SVR4 PIC 30 prolog gcc Y option option
16982 EABI TOC 30 prolog gcc Y option option
16983
16984*/
16985
9ebbca7d
GK
16986/* Hash functions for the hash table. */
16987
16988static unsigned
a2369ed3 16989rs6000_hash_constant (rtx k)
9ebbca7d 16990{
46b33600
RH
16991 enum rtx_code code = GET_CODE (k);
16992 enum machine_mode mode = GET_MODE (k);
16993 unsigned result = (code << 3) ^ mode;
16994 const char *format;
16995 int flen, fidx;
f676971a 16996
46b33600
RH
16997 format = GET_RTX_FORMAT (code);
16998 flen = strlen (format);
16999 fidx = 0;
9ebbca7d 17000
46b33600
RH
17001 switch (code)
17002 {
17003 case LABEL_REF:
17004 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17005
17006 case CONST_DOUBLE:
17007 if (mode != VOIDmode)
17008 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17009 flen = 2;
17010 break;
17011
17012 case CODE_LABEL:
17013 fidx = 3;
17014 break;
17015
17016 default:
17017 break;
17018 }
9ebbca7d
GK
17019
17020 for (; fidx < flen; fidx++)
17021 switch (format[fidx])
17022 {
17023 case 's':
17024 {
17025 unsigned i, len;
17026 const char *str = XSTR (k, fidx);
17027 len = strlen (str);
17028 result = result * 613 + len;
17029 for (i = 0; i < len; i++)
17030 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17031 break;
17032 }
9ebbca7d
GK
17033 case 'u':
17034 case 'e':
17035 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17036 break;
17037 case 'i':
17038 case 'n':
17039 result = result * 613 + (unsigned) XINT (k, fidx);
17040 break;
17041 case 'w':
17042 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17043 result = result * 613 + (unsigned) XWINT (k, fidx);
17044 else
17045 {
17046 size_t i;
9390387d 17047 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17048 result = result * 613 + (unsigned) (XWINT (k, fidx)
17049 >> CHAR_BIT * i);
17050 }
17051 break;
09501938
DE
17052 case '0':
17053 break;
9ebbca7d 17054 default:
37409796 17055 gcc_unreachable ();
9ebbca7d 17056 }
46b33600 17057
9ebbca7d
GK
17058 return result;
17059}
17060
17061static unsigned
a2369ed3 17062toc_hash_function (const void *hash_entry)
9ebbca7d 17063{
f676971a 17064 const struct toc_hash_struct *thc =
a9098fd0
GK
17065 (const struct toc_hash_struct *) hash_entry;
17066 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17067}
17068
17069/* Compare H1 and H2 for equivalence. */
17070
17071static int
a2369ed3 17072toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17073{
17074 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17075 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17076
a9098fd0
GK
17077 if (((const struct toc_hash_struct *) h1)->key_mode
17078 != ((const struct toc_hash_struct *) h2)->key_mode)
17079 return 0;
17080
5692c7bc 17081 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17082}
17083
28e510bd
MM
17084/* These are the names given by the C++ front-end to vtables, and
17085 vtable-like objects. Ideally, this logic should not be here;
17086 instead, there should be some programmatic way of inquiring as
17087 to whether or not an object is a vtable. */
17088
17089#define VTABLE_NAME_P(NAME) \
9390387d 17090 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17091 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17092 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17093 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17094 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17095
17096void
a2369ed3 17097rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17098{
17099 /* Currently C++ toc references to vtables can be emitted before it
17100 is decided whether the vtable is public or private. If this is
17101 the case, then the linker will eventually complain that there is
f676971a 17102 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17103 we emit the TOC reference to reference the symbol and not the
17104 section. */
17105 const char *name = XSTR (x, 0);
54ee9799 17106
f676971a 17107 if (VTABLE_NAME_P (name))
54ee9799
DE
17108 {
17109 RS6000_OUTPUT_BASENAME (file, name);
17110 }
17111 else
17112 assemble_name (file, name);
28e510bd
MM
17113}
17114
a4f6c312
SS
17115/* Output a TOC entry. We derive the entry name from what is being
17116 written. */
9878760c
RK
17117
17118void
a2369ed3 17119output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17120{
17121 char buf[256];
3cce094d 17122 const char *name = buf;
ec940faa 17123 const char *real_name;
9878760c 17124 rtx base = x;
16fdeb48 17125 HOST_WIDE_INT offset = 0;
9878760c 17126
37409796 17127 gcc_assert (!TARGET_NO_TOC);
4697a36c 17128
9ebbca7d
GK
17129 /* When the linker won't eliminate them, don't output duplicate
17130 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17131 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17132 CODE_LABELs. */
17133 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17134 {
17135 struct toc_hash_struct *h;
17136 void * * found;
f676971a 17137
17211ab5 17138 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17139 time because GGC is not initialized at that point. */
17211ab5 17140 if (toc_hash_table == NULL)
f676971a 17141 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17142 toc_hash_eq, NULL);
17143
9ebbca7d
GK
17144 h = ggc_alloc (sizeof (*h));
17145 h->key = x;
a9098fd0 17146 h->key_mode = mode;
9ebbca7d 17147 h->labelno = labelno;
f676971a 17148
9ebbca7d
GK
17149 found = htab_find_slot (toc_hash_table, h, 1);
17150 if (*found == NULL)
17151 *found = h;
f676971a 17152 else /* This is indeed a duplicate.
9ebbca7d
GK
17153 Set this label equal to that label. */
17154 {
17155 fputs ("\t.set ", file);
17156 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17157 fprintf (file, "%d,", labelno);
17158 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17159 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17160 found)->labelno));
17161 return;
17162 }
17163 }
17164
17165 /* If we're going to put a double constant in the TOC, make sure it's
17166 aligned properly when strict alignment is on. */
ff1720ed
RK
17167 if (GET_CODE (x) == CONST_DOUBLE
17168 && STRICT_ALIGNMENT
a9098fd0 17169 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17170 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17171 ASM_OUTPUT_ALIGN (file, 3);
17172 }
17173
4977bab6 17174 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17175
37c37a57
RK
17176 /* Handle FP constants specially. Note that if we have a minimal
17177 TOC, things we put here aren't actually in the TOC, so we can allow
17178 FP constants. */
00b79d54
BE
17179 if (GET_CODE (x) == CONST_DOUBLE &&
17180 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17181 {
17182 REAL_VALUE_TYPE rv;
17183 long k[4];
17184
17185 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17186 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17187 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17188 else
17189 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17190
17191 if (TARGET_64BIT)
17192 {
17193 if (TARGET_MINIMAL_TOC)
17194 fputs (DOUBLE_INT_ASM_OP, file);
17195 else
17196 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17197 k[0] & 0xffffffff, k[1] & 0xffffffff,
17198 k[2] & 0xffffffff, k[3] & 0xffffffff);
17199 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17200 k[0] & 0xffffffff, k[1] & 0xffffffff,
17201 k[2] & 0xffffffff, k[3] & 0xffffffff);
17202 return;
17203 }
17204 else
17205 {
17206 if (TARGET_MINIMAL_TOC)
17207 fputs ("\t.long ", file);
17208 else
17209 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17210 k[0] & 0xffffffff, k[1] & 0xffffffff,
17211 k[2] & 0xffffffff, k[3] & 0xffffffff);
17212 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17213 k[0] & 0xffffffff, k[1] & 0xffffffff,
17214 k[2] & 0xffffffff, k[3] & 0xffffffff);
17215 return;
17216 }
17217 }
00b79d54
BE
17218 else if (GET_CODE (x) == CONST_DOUBLE &&
17219 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17220 {
042259f2
DE
17221 REAL_VALUE_TYPE rv;
17222 long k[2];
0adc764e 17223
042259f2 17224 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17225
17226 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17227 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17228 else
17229 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17230
13ded975
DE
17231 if (TARGET_64BIT)
17232 {
17233 if (TARGET_MINIMAL_TOC)
2bfcf297 17234 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17235 else
2f0552b6
AM
17236 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17237 k[0] & 0xffffffff, k[1] & 0xffffffff);
17238 fprintf (file, "0x%lx%08lx\n",
17239 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17240 return;
17241 }
1875cc88 17242 else
13ded975
DE
17243 {
17244 if (TARGET_MINIMAL_TOC)
2bfcf297 17245 fputs ("\t.long ", file);
13ded975 17246 else
2f0552b6
AM
17247 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17248 k[0] & 0xffffffff, k[1] & 0xffffffff);
17249 fprintf (file, "0x%lx,0x%lx\n",
17250 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17251 return;
17252 }
9878760c 17253 }
00b79d54
BE
17254 else if (GET_CODE (x) == CONST_DOUBLE &&
17255 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17256 {
042259f2
DE
17257 REAL_VALUE_TYPE rv;
17258 long l;
9878760c 17259
042259f2 17260 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17261 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17262 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17263 else
17264 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17265
31bfaa0b
DE
17266 if (TARGET_64BIT)
17267 {
17268 if (TARGET_MINIMAL_TOC)
2bfcf297 17269 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17270 else
2f0552b6
AM
17271 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17272 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17273 return;
17274 }
042259f2 17275 else
31bfaa0b
DE
17276 {
17277 if (TARGET_MINIMAL_TOC)
2bfcf297 17278 fputs ("\t.long ", file);
31bfaa0b 17279 else
2f0552b6
AM
17280 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17281 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17282 return;
17283 }
042259f2 17284 }
f176e826 17285 else if (GET_MODE (x) == VOIDmode
a9098fd0 17286 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17287 {
e2c953b6 17288 unsigned HOST_WIDE_INT low;
042259f2
DE
17289 HOST_WIDE_INT high;
17290
17291 if (GET_CODE (x) == CONST_DOUBLE)
17292 {
17293 low = CONST_DOUBLE_LOW (x);
17294 high = CONST_DOUBLE_HIGH (x);
17295 }
17296 else
17297#if HOST_BITS_PER_WIDE_INT == 32
17298 {
17299 low = INTVAL (x);
0858c623 17300 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17301 }
17302#else
17303 {
c4ad648e
AM
17304 low = INTVAL (x) & 0xffffffff;
17305 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17306 }
17307#endif
9878760c 17308
a9098fd0
GK
17309 /* TOC entries are always Pmode-sized, but since this
17310 is a bigendian machine then if we're putting smaller
17311 integer constants in the TOC we have to pad them.
17312 (This is still a win over putting the constants in
17313 a separate constant pool, because then we'd have
02a4ec28
FS
17314 to have both a TOC entry _and_ the actual constant.)
17315
17316 For a 32-bit target, CONST_INT values are loaded and shifted
17317 entirely within `low' and can be stored in one TOC entry. */
17318
37409796
NS
17319 /* It would be easy to make this work, but it doesn't now. */
17320 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17321
17322 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17323 {
17324#if HOST_BITS_PER_WIDE_INT == 32
17325 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17326 POINTER_SIZE, &low, &high, 0);
17327#else
17328 low |= high << 32;
17329 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17330 high = (HOST_WIDE_INT) low >> 32;
17331 low &= 0xffffffff;
17332#endif
17333 }
a9098fd0 17334
13ded975
DE
17335 if (TARGET_64BIT)
17336 {
17337 if (TARGET_MINIMAL_TOC)
2bfcf297 17338 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17339 else
2f0552b6
AM
17340 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17341 (long) high & 0xffffffff, (long) low & 0xffffffff);
17342 fprintf (file, "0x%lx%08lx\n",
17343 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17344 return;
17345 }
1875cc88 17346 else
13ded975 17347 {
02a4ec28
FS
17348 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17349 {
17350 if (TARGET_MINIMAL_TOC)
2bfcf297 17351 fputs ("\t.long ", file);
02a4ec28 17352 else
2bfcf297 17353 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17354 (long) high & 0xffffffff, (long) low & 0xffffffff);
17355 fprintf (file, "0x%lx,0x%lx\n",
17356 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17357 }
13ded975 17358 else
02a4ec28
FS
17359 {
17360 if (TARGET_MINIMAL_TOC)
2bfcf297 17361 fputs ("\t.long ", file);
02a4ec28 17362 else
2f0552b6
AM
17363 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17364 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17365 }
13ded975
DE
17366 return;
17367 }
9878760c
RK
17368 }
17369
17370 if (GET_CODE (x) == CONST)
17371 {
37409796 17372 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17373
9878760c
RK
17374 base = XEXP (XEXP (x, 0), 0);
17375 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17376 }
f676971a 17377
37409796
NS
17378 switch (GET_CODE (base))
17379 {
17380 case SYMBOL_REF:
17381 name = XSTR (base, 0);
17382 break;
17383
17384 case LABEL_REF:
17385 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17386 CODE_LABEL_NUMBER (XEXP (base, 0)));
17387 break;
17388
17389 case CODE_LABEL:
17390 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17391 break;
17392
17393 default:
17394 gcc_unreachable ();
17395 }
9878760c 17396
772c5265 17397 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17398 if (TARGET_MINIMAL_TOC)
2bfcf297 17399 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17400 else
17401 {
b6c9286a 17402 fprintf (file, "\t.tc %s", real_name);
9878760c 17403
1875cc88 17404 if (offset < 0)
16fdeb48 17405 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17406 else if (offset)
16fdeb48 17407 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17408
19d2d16f 17409 fputs ("[TC],", file);
1875cc88 17410 }
581bc4de
MM
17411
17412 /* Currently C++ toc references to vtables can be emitted before it
17413 is decided whether the vtable is public or private. If this is
17414 the case, then the linker will eventually complain that there is
17415 a TOC reference to an unknown section. Thus, for vtables only,
17416 we emit the TOC reference to reference the symbol and not the
17417 section. */
28e510bd 17418 if (VTABLE_NAME_P (name))
581bc4de 17419 {
54ee9799 17420 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17421 if (offset < 0)
16fdeb48 17422 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17423 else if (offset > 0)
16fdeb48 17424 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17425 }
17426 else
17427 output_addr_const (file, x);
19d2d16f 17428 putc ('\n', file);
9878760c
RK
17429}
17430\f
17431/* Output an assembler pseudo-op to write an ASCII string of N characters
17432 starting at P to FILE.
17433
17434 On the RS/6000, we have to do this using the .byte operation and
17435 write out special characters outside the quoted string.
17436 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17437 so we must artificially break them up early. */
9878760c
RK
17438
17439void
a2369ed3 17440output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17441{
17442 char c;
17443 int i, count_string;
d330fd93
KG
17444 const char *for_string = "\t.byte \"";
17445 const char *for_decimal = "\t.byte ";
17446 const char *to_close = NULL;
9878760c
RK
17447
17448 count_string = 0;
17449 for (i = 0; i < n; i++)
17450 {
17451 c = *p++;
17452 if (c >= ' ' && c < 0177)
17453 {
17454 if (for_string)
17455 fputs (for_string, file);
17456 putc (c, file);
17457
17458 /* Write two quotes to get one. */
17459 if (c == '"')
17460 {
17461 putc (c, file);
17462 ++count_string;
17463 }
17464
17465 for_string = NULL;
17466 for_decimal = "\"\n\t.byte ";
17467 to_close = "\"\n";
17468 ++count_string;
17469
17470 if (count_string >= 512)
17471 {
17472 fputs (to_close, file);
17473
17474 for_string = "\t.byte \"";
17475 for_decimal = "\t.byte ";
17476 to_close = NULL;
17477 count_string = 0;
17478 }
17479 }
17480 else
17481 {
17482 if (for_decimal)
17483 fputs (for_decimal, file);
17484 fprintf (file, "%d", c);
17485
17486 for_string = "\n\t.byte \"";
17487 for_decimal = ", ";
17488 to_close = "\n";
17489 count_string = 0;
17490 }
17491 }
17492
17493 /* Now close the string if we have written one. Then end the line. */
17494 if (to_close)
9ebbca7d 17495 fputs (to_close, file);
9878760c
RK
17496}
17497\f
17498/* Generate a unique section name for FILENAME for a section type
17499 represented by SECTION_DESC. Output goes into BUF.
17500
17501 SECTION_DESC can be any string, as long as it is different for each
17502 possible section type.
17503
17504 We name the section in the same manner as xlc. The name begins with an
17505 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17506 names) with the last period replaced by the string SECTION_DESC. If
17507 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17508 the name. */
9878760c
RK
17509
17510void
f676971a 17511rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17512 const char *section_desc)
9878760c 17513{
9ebbca7d 17514 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17515 char *p;
17516 int len;
9878760c
RK
17517
17518 after_last_slash = filename;
17519 for (q = filename; *q; q++)
11e5fe42
RK
17520 {
17521 if (*q == '/')
17522 after_last_slash = q + 1;
17523 else if (*q == '.')
17524 last_period = q;
17525 }
9878760c 17526
11e5fe42 17527 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17528 *buf = (char *) xmalloc (len);
9878760c
RK
17529
17530 p = *buf;
17531 *p++ = '_';
17532
17533 for (q = after_last_slash; *q; q++)
17534 {
11e5fe42 17535 if (q == last_period)
c4ad648e 17536 {
9878760c
RK
17537 strcpy (p, section_desc);
17538 p += strlen (section_desc);
e3981aab 17539 break;
c4ad648e 17540 }
9878760c 17541
e9a780ec 17542 else if (ISALNUM (*q))
c4ad648e 17543 *p++ = *q;
9878760c
RK
17544 }
17545
11e5fe42 17546 if (last_period == 0)
9878760c
RK
17547 strcpy (p, section_desc);
17548 else
17549 *p = '\0';
17550}
e165f3f0 17551\f
a4f6c312 17552/* Emit profile function. */
411707f4 17553
411707f4 17554void
a2369ed3 17555output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17556{
858081ad
AH
17557 /* Non-standard profiling for kernels, which just saves LR then calls
17558 _mcount without worrying about arg saves. The idea is to change
17559 the function prologue as little as possible as it isn't easy to
17560 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17561 if (TARGET_PROFILE_KERNEL)
17562 return;
17563
8480e480
CC
17564 if (DEFAULT_ABI == ABI_AIX)
17565 {
9739c90c
JJ
17566#ifndef NO_PROFILE_COUNTERS
17567# define NO_PROFILE_COUNTERS 0
17568#endif
f676971a 17569 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17570 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17571 else
17572 {
17573 char buf[30];
17574 const char *label_name;
17575 rtx fun;
411707f4 17576
9739c90c
JJ
17577 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17578 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17579 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17580
9739c90c
JJ
17581 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17582 fun, Pmode);
17583 }
8480e480 17584 }
ee890fe2
SS
17585 else if (DEFAULT_ABI == ABI_DARWIN)
17586 {
d5fa86ba 17587 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17588 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17589
17590 /* Be conservative and always set this, at least for now. */
17591 current_function_uses_pic_offset_table = 1;
17592
17593#if TARGET_MACHO
17594 /* For PIC code, set up a stub and collect the caller's address
17595 from r0, which is where the prologue puts it. */
11abc112
MM
17596 if (MACHOPIC_INDIRECT
17597 && current_function_uses_pic_offset_table)
17598 caller_addr_regno = 0;
ee890fe2
SS
17599#endif
17600 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17601 0, VOIDmode, 1,
17602 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17603 }
411707f4
CC
17604}
17605
a4f6c312 17606/* Write function profiler code. */
e165f3f0
RK
17607
17608void
a2369ed3 17609output_function_profiler (FILE *file, int labelno)
e165f3f0 17610{
3daf36a4 17611 char buf[100];
e165f3f0 17612
38c1f2d7 17613 switch (DEFAULT_ABI)
3daf36a4 17614 {
38c1f2d7 17615 default:
37409796 17616 gcc_unreachable ();
38c1f2d7
MM
17617
17618 case ABI_V4:
09eeeacb
AM
17619 if (!TARGET_32BIT)
17620 {
d4ee4d25 17621 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17622 return;
17623 }
ffcfcb5f 17624 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17625 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17626 if (NO_PROFILE_COUNTERS)
17627 {
17628 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17629 reg_names[0], reg_names[1]);
17630 }
17631 else if (TARGET_SECURE_PLT && flag_pic)
17632 {
17633 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17634 reg_names[0], reg_names[1]);
17635 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17636 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17637 reg_names[12], reg_names[12]);
17638 assemble_name (file, buf);
17639 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17640 assemble_name (file, buf);
17641 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17642 }
17643 else if (flag_pic == 1)
38c1f2d7 17644 {
dfdfa60f 17645 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17646 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17647 reg_names[0], reg_names[1]);
17167fd8 17648 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17649 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17650 assemble_name (file, buf);
17167fd8 17651 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17652 }
9ebbca7d 17653 else if (flag_pic > 1)
38c1f2d7 17654 {
71625f3d
AM
17655 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17656 reg_names[0], reg_names[1]);
9ebbca7d 17657 /* Now, we need to get the address of the label. */
71625f3d 17658 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17659 assemble_name (file, buf);
9ebbca7d
GK
17660 fputs ("-.\n1:", file);
17661 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17662 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17663 reg_names[0], reg_names[11]);
17664 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17665 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17666 }
38c1f2d7
MM
17667 else
17668 {
17167fd8 17669 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17670 assemble_name (file, buf);
dfdfa60f 17671 fputs ("@ha\n", file);
71625f3d
AM
17672 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17673 reg_names[0], reg_names[1]);
a260abc9 17674 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17675 assemble_name (file, buf);
17167fd8 17676 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17677 }
17678
50d440bc 17679 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17680 fprintf (file, "\tbl %s%s\n",
17681 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17682 break;
17683
17684 case ABI_AIX:
ee890fe2 17685 case ABI_DARWIN:
ffcfcb5f
AM
17686 if (!TARGET_PROFILE_KERNEL)
17687 {
a3c9585f 17688 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17689 }
17690 else
17691 {
37409796 17692 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17693
17694 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17695 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17696
6de9cd9a 17697 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17698 {
17699 asm_fprintf (file, "\tstd %s,24(%s)\n",
17700 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17701 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17702 asm_fprintf (file, "\tld %s,24(%s)\n",
17703 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17704 }
17705 else
17706 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17707 }
38c1f2d7
MM
17708 break;
17709 }
e165f3f0 17710}
a251ffd0 17711
b54cf83a 17712\f
44cd321e
PS
17713
17714/* The following variable value is the last issued insn. */
17715
17716static rtx last_scheduled_insn;
17717
17718/* The following variable helps to balance issuing of load and
17719 store instructions */
17720
17721static int load_store_pendulum;
17722
b54cf83a
DE
17723/* Power4 load update and store update instructions are cracked into a
17724 load or store and an integer insn which are executed in the same cycle.
17725 Branches have their own dispatch slot which does not count against the
17726 GCC issue rate, but it changes the program flow so there are no other
17727 instructions to issue in this cycle. */
17728
17729static int
f676971a
EC
17730rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17731 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17732 rtx insn, int more)
b54cf83a 17733{
44cd321e 17734 last_scheduled_insn = insn;
b54cf83a
DE
17735 if (GET_CODE (PATTERN (insn)) == USE
17736 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17737 {
17738 cached_can_issue_more = more;
17739 return cached_can_issue_more;
17740 }
17741
17742 if (insn_terminates_group_p (insn, current_group))
17743 {
17744 cached_can_issue_more = 0;
17745 return cached_can_issue_more;
17746 }
b54cf83a 17747
d296e02e
AP
17748 /* If no reservation, but reach here */
17749 if (recog_memoized (insn) < 0)
17750 return more;
17751
ec507f2d 17752 if (rs6000_sched_groups)
b54cf83a 17753 {
cbe26ab8 17754 if (is_microcoded_insn (insn))
44cd321e 17755 cached_can_issue_more = 0;
cbe26ab8 17756 else if (is_cracked_insn (insn))
44cd321e
PS
17757 cached_can_issue_more = more > 2 ? more - 2 : 0;
17758 else
17759 cached_can_issue_more = more - 1;
17760
17761 return cached_can_issue_more;
b54cf83a 17762 }
165b263e 17763
d296e02e
AP
17764 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17765 return 0;
17766
44cd321e
PS
17767 cached_can_issue_more = more - 1;
17768 return cached_can_issue_more;
b54cf83a
DE
17769}
17770
a251ffd0
TG
17771/* Adjust the cost of a scheduling dependency. Return the new cost of
17772 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17773
c237e94a 17774static int
0a4f0294 17775rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17776{
44cd321e 17777 enum attr_type attr_type;
a251ffd0 17778
44cd321e 17779 if (! recog_memoized (insn))
a251ffd0
TG
17780 return 0;
17781
44cd321e 17782 switch (REG_NOTE_KIND (link))
a251ffd0 17783 {
44cd321e
PS
17784 case REG_DEP_TRUE:
17785 {
17786 /* Data dependency; DEP_INSN writes a register that INSN reads
17787 some cycles later. */
17788
17789 /* Separate a load from a narrower, dependent store. */
17790 if (rs6000_sched_groups
17791 && GET_CODE (PATTERN (insn)) == SET
17792 && GET_CODE (PATTERN (dep_insn)) == SET
17793 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17794 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17795 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17796 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17797 return cost + 14;
17798
17799 attr_type = get_attr_type (insn);
17800
17801 switch (attr_type)
17802 {
17803 case TYPE_JMPREG:
17804 /* Tell the first scheduling pass about the latency between
17805 a mtctr and bctr (and mtlr and br/blr). The first
17806 scheduling pass will not know about this latency since
17807 the mtctr instruction, which has the latency associated
17808 to it, will be generated by reload. */
17809 return TARGET_POWER ? 5 : 4;
17810 case TYPE_BRANCH:
17811 /* Leave some extra cycles between a compare and its
17812 dependent branch, to inhibit expensive mispredicts. */
17813 if ((rs6000_cpu_attr == CPU_PPC603
17814 || rs6000_cpu_attr == CPU_PPC604
17815 || rs6000_cpu_attr == CPU_PPC604E
17816 || rs6000_cpu_attr == CPU_PPC620
17817 || rs6000_cpu_attr == CPU_PPC630
17818 || rs6000_cpu_attr == CPU_PPC750
17819 || rs6000_cpu_attr == CPU_PPC7400
17820 || rs6000_cpu_attr == CPU_PPC7450
17821 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17822 || rs6000_cpu_attr == CPU_POWER5
17823 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17824 && recog_memoized (dep_insn)
17825 && (INSN_CODE (dep_insn) >= 0))
982afe02 17826
44cd321e
PS
17827 switch (get_attr_type (dep_insn))
17828 {
17829 case TYPE_CMP:
17830 case TYPE_COMPARE:
17831 case TYPE_DELAYED_COMPARE:
17832 case TYPE_IMUL_COMPARE:
17833 case TYPE_LMUL_COMPARE:
17834 case TYPE_FPCOMPARE:
17835 case TYPE_CR_LOGICAL:
17836 case TYPE_DELAYED_CR:
17837 return cost + 2;
17838 default:
17839 break;
17840 }
17841 break;
17842
17843 case TYPE_STORE:
17844 case TYPE_STORE_U:
17845 case TYPE_STORE_UX:
17846 case TYPE_FPSTORE:
17847 case TYPE_FPSTORE_U:
17848 case TYPE_FPSTORE_UX:
17849 if ((rs6000_cpu == PROCESSOR_POWER6)
17850 && recog_memoized (dep_insn)
17851 && (INSN_CODE (dep_insn) >= 0))
17852 {
17853
17854 if (GET_CODE (PATTERN (insn)) != SET)
17855 /* If this happens, we have to extend this to schedule
17856 optimally. Return default for now. */
17857 return cost;
17858
17859 /* Adjust the cost for the case where the value written
17860 by a fixed point operation is used as the address
17861 gen value on a store. */
17862 switch (get_attr_type (dep_insn))
17863 {
17864 case TYPE_LOAD:
17865 case TYPE_LOAD_U:
17866 case TYPE_LOAD_UX:
17867 case TYPE_CNTLZ:
17868 {
17869 if (! store_data_bypass_p (dep_insn, insn))
17870 return 4;
17871 break;
17872 }
17873 case TYPE_LOAD_EXT:
17874 case TYPE_LOAD_EXT_U:
17875 case TYPE_LOAD_EXT_UX:
17876 case TYPE_VAR_SHIFT_ROTATE:
17877 case TYPE_VAR_DELAYED_COMPARE:
17878 {
17879 if (! store_data_bypass_p (dep_insn, insn))
17880 return 6;
17881 break;
17882 }
17883 case TYPE_INTEGER:
17884 case TYPE_COMPARE:
17885 case TYPE_FAST_COMPARE:
17886 case TYPE_EXTS:
17887 case TYPE_SHIFT:
17888 case TYPE_INSERT_WORD:
17889 case TYPE_INSERT_DWORD:
17890 case TYPE_FPLOAD_U:
17891 case TYPE_FPLOAD_UX:
17892 case TYPE_STORE_U:
17893 case TYPE_STORE_UX:
17894 case TYPE_FPSTORE_U:
17895 case TYPE_FPSTORE_UX:
17896 {
17897 if (! store_data_bypass_p (dep_insn, insn))
17898 return 3;
17899 break;
17900 }
17901 case TYPE_IMUL:
17902 case TYPE_IMUL2:
17903 case TYPE_IMUL3:
17904 case TYPE_LMUL:
17905 case TYPE_IMUL_COMPARE:
17906 case TYPE_LMUL_COMPARE:
17907 {
17908 if (! store_data_bypass_p (dep_insn, insn))
17909 return 17;
17910 break;
17911 }
17912 case TYPE_IDIV:
17913 {
17914 if (! store_data_bypass_p (dep_insn, insn))
17915 return 45;
17916 break;
17917 }
17918 case TYPE_LDIV:
17919 {
17920 if (! store_data_bypass_p (dep_insn, insn))
17921 return 57;
17922 break;
17923 }
17924 default:
17925 break;
17926 }
17927 }
17928 break;
17929
17930 case TYPE_LOAD:
17931 case TYPE_LOAD_U:
17932 case TYPE_LOAD_UX:
17933 case TYPE_LOAD_EXT:
17934 case TYPE_LOAD_EXT_U:
17935 case TYPE_LOAD_EXT_UX:
17936 if ((rs6000_cpu == PROCESSOR_POWER6)
17937 && recog_memoized (dep_insn)
17938 && (INSN_CODE (dep_insn) >= 0))
17939 {
17940
17941 /* Adjust the cost for the case where the value written
17942 by a fixed point instruction is used within the address
17943 gen portion of a subsequent load(u)(x) */
17944 switch (get_attr_type (dep_insn))
17945 {
17946 case TYPE_LOAD:
17947 case TYPE_LOAD_U:
17948 case TYPE_LOAD_UX:
17949 case TYPE_CNTLZ:
17950 {
17951 if (set_to_load_agen (dep_insn, insn))
17952 return 4;
17953 break;
17954 }
17955 case TYPE_LOAD_EXT:
17956 case TYPE_LOAD_EXT_U:
17957 case TYPE_LOAD_EXT_UX:
17958 case TYPE_VAR_SHIFT_ROTATE:
17959 case TYPE_VAR_DELAYED_COMPARE:
17960 {
17961 if (set_to_load_agen (dep_insn, insn))
17962 return 6;
17963 break;
17964 }
17965 case TYPE_INTEGER:
17966 case TYPE_COMPARE:
17967 case TYPE_FAST_COMPARE:
17968 case TYPE_EXTS:
17969 case TYPE_SHIFT:
17970 case TYPE_INSERT_WORD:
17971 case TYPE_INSERT_DWORD:
17972 case TYPE_FPLOAD_U:
17973 case TYPE_FPLOAD_UX:
17974 case TYPE_STORE_U:
17975 case TYPE_STORE_UX:
17976 case TYPE_FPSTORE_U:
17977 case TYPE_FPSTORE_UX:
17978 {
17979 if (set_to_load_agen (dep_insn, insn))
17980 return 3;
17981 break;
17982 }
17983 case TYPE_IMUL:
17984 case TYPE_IMUL2:
17985 case TYPE_IMUL3:
17986 case TYPE_LMUL:
17987 case TYPE_IMUL_COMPARE:
17988 case TYPE_LMUL_COMPARE:
17989 {
17990 if (set_to_load_agen (dep_insn, insn))
17991 return 17;
17992 break;
17993 }
17994 case TYPE_IDIV:
17995 {
17996 if (set_to_load_agen (dep_insn, insn))
17997 return 45;
17998 break;
17999 }
18000 case TYPE_LDIV:
18001 {
18002 if (set_to_load_agen (dep_insn, insn))
18003 return 57;
18004 break;
18005 }
18006 default:
18007 break;
18008 }
18009 }
18010 break;
18011
18012 case TYPE_FPLOAD:
18013 if ((rs6000_cpu == PROCESSOR_POWER6)
18014 && recog_memoized (dep_insn)
18015 && (INSN_CODE (dep_insn) >= 0)
18016 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18017 return 2;
18018
18019 default:
18020 break;
18021 }
c9dbf840 18022
a251ffd0 18023 /* Fall out to return default cost. */
44cd321e
PS
18024 }
18025 break;
18026
18027 case REG_DEP_OUTPUT:
18028 /* Output dependency; DEP_INSN writes a register that INSN writes some
18029 cycles later. */
18030 if ((rs6000_cpu == PROCESSOR_POWER6)
18031 && recog_memoized (dep_insn)
18032 && (INSN_CODE (dep_insn) >= 0))
18033 {
18034 attr_type = get_attr_type (insn);
18035
18036 switch (attr_type)
18037 {
18038 case TYPE_FP:
18039 if (get_attr_type (dep_insn) == TYPE_FP)
18040 return 1;
18041 break;
18042 case TYPE_FPLOAD:
18043 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18044 return 2;
18045 break;
18046 default:
18047 break;
18048 }
18049 }
18050 case REG_DEP_ANTI:
18051 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18052 cycles later. */
18053 return 0;
18054
18055 default:
18056 gcc_unreachable ();
a251ffd0
TG
18057 }
18058
18059 return cost;
18060}
b6c9286a 18061
cbe26ab8 18062/* The function returns a true if INSN is microcoded.
839a4992 18063 Return false otherwise. */
cbe26ab8
DN
18064
18065static bool
18066is_microcoded_insn (rtx insn)
18067{
18068 if (!insn || !INSN_P (insn)
18069 || GET_CODE (PATTERN (insn)) == USE
18070 || GET_CODE (PATTERN (insn)) == CLOBBER)
18071 return false;
18072
d296e02e
AP
18073 if (rs6000_cpu_attr == CPU_CELL)
18074 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18075
ec507f2d 18076 if (rs6000_sched_groups)
cbe26ab8
DN
18077 {
18078 enum attr_type type = get_attr_type (insn);
18079 if (type == TYPE_LOAD_EXT_U
18080 || type == TYPE_LOAD_EXT_UX
18081 || type == TYPE_LOAD_UX
18082 || type == TYPE_STORE_UX
18083 || type == TYPE_MFCR)
c4ad648e 18084 return true;
cbe26ab8
DN
18085 }
18086
18087 return false;
18088}
18089
cbe26ab8
DN
18090/* The function returns true if INSN is cracked into 2 instructions
18091 by the processor (and therefore occupies 2 issue slots). */
18092
18093static bool
18094is_cracked_insn (rtx insn)
18095{
18096 if (!insn || !INSN_P (insn)
18097 || GET_CODE (PATTERN (insn)) == USE
18098 || GET_CODE (PATTERN (insn)) == CLOBBER)
18099 return false;
18100
ec507f2d 18101 if (rs6000_sched_groups)
cbe26ab8
DN
18102 {
18103 enum attr_type type = get_attr_type (insn);
18104 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18105 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18106 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18107 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18108 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18109 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18110 || type == TYPE_IDIV || type == TYPE_LDIV
18111 || type == TYPE_INSERT_WORD)
18112 return true;
cbe26ab8
DN
18113 }
18114
18115 return false;
18116}
18117
18118/* The function returns true if INSN can be issued only from
a3c9585f 18119 the branch slot. */
cbe26ab8
DN
18120
18121static bool
18122is_branch_slot_insn (rtx insn)
18123{
18124 if (!insn || !INSN_P (insn)
18125 || GET_CODE (PATTERN (insn)) == USE
18126 || GET_CODE (PATTERN (insn)) == CLOBBER)
18127 return false;
18128
ec507f2d 18129 if (rs6000_sched_groups)
cbe26ab8
DN
18130 {
18131 enum attr_type type = get_attr_type (insn);
18132 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18133 return true;
cbe26ab8
DN
18134 return false;
18135 }
18136
18137 return false;
18138}
79ae11c4 18139
44cd321e
PS
18140/* The function returns true if out_inst sets a value that is
18141 used in the address generation computation of in_insn */
18142static bool
18143set_to_load_agen (rtx out_insn, rtx in_insn)
18144{
18145 rtx out_set, in_set;
18146
18147 /* For performance reasons, only handle the simple case where
18148 both loads are a single_set. */
18149 out_set = single_set (out_insn);
18150 if (out_set)
18151 {
18152 in_set = single_set (in_insn);
18153 if (in_set)
18154 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18155 }
18156
18157 return false;
18158}
18159
18160/* The function returns true if the target storage location of
18161 out_insn is adjacent to the target storage location of in_insn */
18162/* Return 1 if memory locations are adjacent. */
18163
18164static bool
18165adjacent_mem_locations (rtx insn1, rtx insn2)
18166{
18167
e3a0e200
PB
18168 rtx a = get_store_dest (PATTERN (insn1));
18169 rtx b = get_store_dest (PATTERN (insn2));
18170
44cd321e
PS
18171 if ((GET_CODE (XEXP (a, 0)) == REG
18172 || (GET_CODE (XEXP (a, 0)) == PLUS
18173 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18174 && (GET_CODE (XEXP (b, 0)) == REG
18175 || (GET_CODE (XEXP (b, 0)) == PLUS
18176 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18177 {
f98e8938 18178 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18179 rtx reg0, reg1;
44cd321e
PS
18180
18181 if (GET_CODE (XEXP (a, 0)) == PLUS)
18182 {
18183 reg0 = XEXP (XEXP (a, 0), 0);
18184 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18185 }
18186 else
18187 reg0 = XEXP (a, 0);
18188
18189 if (GET_CODE (XEXP (b, 0)) == PLUS)
18190 {
18191 reg1 = XEXP (XEXP (b, 0), 0);
18192 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18193 }
18194 else
18195 reg1 = XEXP (b, 0);
18196
18197 val_diff = val1 - val0;
18198
18199 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18200 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18201 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18202 }
18203
18204 return false;
18205}
18206
a4f6c312 18207/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18208 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18209 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18210 define this macro if you do not need to adjust the scheduling
18211 priorities of insns. */
bef84347 18212
c237e94a 18213static int
a2369ed3 18214rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18215{
a4f6c312
SS
18216 /* On machines (like the 750) which have asymmetric integer units,
18217 where one integer unit can do multiply and divides and the other
18218 can't, reduce the priority of multiply/divide so it is scheduled
18219 before other integer operations. */
bef84347
VM
18220
18221#if 0
2c3c49de 18222 if (! INSN_P (insn))
bef84347
VM
18223 return priority;
18224
18225 if (GET_CODE (PATTERN (insn)) == USE)
18226 return priority;
18227
18228 switch (rs6000_cpu_attr) {
18229 case CPU_PPC750:
18230 switch (get_attr_type (insn))
18231 {
18232 default:
18233 break;
18234
18235 case TYPE_IMUL:
18236 case TYPE_IDIV:
3cb999d8
DE
18237 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18238 priority, priority);
bef84347
VM
18239 if (priority >= 0 && priority < 0x01000000)
18240 priority >>= 3;
18241 break;
18242 }
18243 }
18244#endif
18245
44cd321e 18246 if (insn_must_be_first_in_group (insn)
79ae11c4 18247 && reload_completed
f676971a 18248 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18249 && rs6000_sched_restricted_insns_priority)
18250 {
18251
c4ad648e
AM
18252 /* Prioritize insns that can be dispatched only in the first
18253 dispatch slot. */
79ae11c4 18254 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18255 /* Attach highest priority to insn. This means that in
18256 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18257 precede 'priority' (critical path) considerations. */
f676971a 18258 return current_sched_info->sched_max_insns_priority;
79ae11c4 18259 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18260 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18261 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18262 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18263 return (priority + 1);
18264 }
79ae11c4 18265
44cd321e
PS
18266 if (rs6000_cpu == PROCESSOR_POWER6
18267 && ((load_store_pendulum == -2 && is_load_insn (insn))
18268 || (load_store_pendulum == 2 && is_store_insn (insn))))
18269 /* Attach highest priority to insn if the scheduler has just issued two
18270 stores and this instruction is a load, or two loads and this instruction
18271 is a store. Power6 wants loads and stores scheduled alternately
18272 when possible */
18273 return current_sched_info->sched_max_insns_priority;
18274
bef84347
VM
18275 return priority;
18276}
18277
d296e02e
AP
18278/* Return true if the instruction is nonpipelined on the Cell. */
18279static bool
18280is_nonpipeline_insn (rtx insn)
18281{
18282 enum attr_type type;
18283 if (!insn || !INSN_P (insn)
18284 || GET_CODE (PATTERN (insn)) == USE
18285 || GET_CODE (PATTERN (insn)) == CLOBBER)
18286 return false;
18287
18288 type = get_attr_type (insn);
18289 if (type == TYPE_IMUL
18290 || type == TYPE_IMUL2
18291 || type == TYPE_IMUL3
18292 || type == TYPE_LMUL
18293 || type == TYPE_IDIV
18294 || type == TYPE_LDIV
18295 || type == TYPE_SDIV
18296 || type == TYPE_DDIV
18297 || type == TYPE_SSQRT
18298 || type == TYPE_DSQRT
18299 || type == TYPE_MFCR
18300 || type == TYPE_MFCRF
18301 || type == TYPE_MFJMPR)
18302 {
18303 return true;
18304 }
18305 return false;
18306}
18307
18308
a4f6c312
SS
18309/* Return how many instructions the machine can issue per cycle. */
18310
c237e94a 18311static int
863d938c 18312rs6000_issue_rate (void)
b6c9286a 18313{
3317bab1
DE
18314 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18315 if (!reload_completed)
18316 return 1;
18317
b6c9286a 18318 switch (rs6000_cpu_attr) {
3cb999d8
DE
18319 case CPU_RIOS1: /* ? */
18320 case CPU_RS64A:
18321 case CPU_PPC601: /* ? */
ed947a96 18322 case CPU_PPC7450:
3cb999d8 18323 return 3;
b54cf83a 18324 case CPU_PPC440:
b6c9286a 18325 case CPU_PPC603:
bef84347 18326 case CPU_PPC750:
ed947a96 18327 case CPU_PPC7400:
be12c2b0 18328 case CPU_PPC8540:
d296e02e 18329 case CPU_CELL:
f676971a 18330 return 2;
3cb999d8 18331 case CPU_RIOS2:
b6c9286a 18332 case CPU_PPC604:
19684119 18333 case CPU_PPC604E:
b6c9286a 18334 case CPU_PPC620:
3cb999d8 18335 case CPU_PPC630:
b6c9286a 18336 return 4;
cbe26ab8 18337 case CPU_POWER4:
ec507f2d 18338 case CPU_POWER5:
44cd321e 18339 case CPU_POWER6:
cbe26ab8 18340 return 5;
b6c9286a
MM
18341 default:
18342 return 1;
18343 }
18344}
18345
be12c2b0
VM
18346/* Return how many instructions to look ahead for better insn
18347 scheduling. */
18348
18349static int
863d938c 18350rs6000_use_sched_lookahead (void)
be12c2b0
VM
18351{
18352 if (rs6000_cpu_attr == CPU_PPC8540)
18353 return 4;
d296e02e
AP
18354 if (rs6000_cpu_attr == CPU_CELL)
18355 return (reload_completed ? 8 : 0);
be12c2b0
VM
18356 return 0;
18357}
18358
d296e02e
AP
18359/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18360static int
18361rs6000_use_sched_lookahead_guard (rtx insn)
18362{
18363 if (rs6000_cpu_attr != CPU_CELL)
18364 return 1;
18365
18366 if (insn == NULL_RTX || !INSN_P (insn))
18367 abort ();
982afe02 18368
d296e02e
AP
18369 if (!reload_completed
18370 || is_nonpipeline_insn (insn)
18371 || is_microcoded_insn (insn))
18372 return 0;
18373
18374 return 1;
18375}
18376
569fa502
DN
18377/* Determine is PAT refers to memory. */
18378
18379static bool
18380is_mem_ref (rtx pat)
18381{
18382 const char * fmt;
18383 int i, j;
18384 bool ret = false;
18385
18386 if (GET_CODE (pat) == MEM)
18387 return true;
18388
18389 /* Recursively process the pattern. */
18390 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18391
18392 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18393 {
18394 if (fmt[i] == 'e')
18395 ret |= is_mem_ref (XEXP (pat, i));
18396 else if (fmt[i] == 'E')
18397 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18398 ret |= is_mem_ref (XVECEXP (pat, i, j));
18399 }
18400
18401 return ret;
18402}
18403
18404/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18405
569fa502
DN
18406static bool
18407is_load_insn1 (rtx pat)
18408{
18409 if (!pat || pat == NULL_RTX)
18410 return false;
18411
18412 if (GET_CODE (pat) == SET)
18413 return is_mem_ref (SET_SRC (pat));
18414
18415 if (GET_CODE (pat) == PARALLEL)
18416 {
18417 int i;
18418
18419 for (i = 0; i < XVECLEN (pat, 0); i++)
18420 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18421 return true;
18422 }
18423
18424 return false;
18425}
18426
18427/* Determine if INSN loads from memory. */
18428
18429static bool
18430is_load_insn (rtx insn)
18431{
18432 if (!insn || !INSN_P (insn))
18433 return false;
18434
18435 if (GET_CODE (insn) == CALL_INSN)
18436 return false;
18437
18438 return is_load_insn1 (PATTERN (insn));
18439}
18440
18441/* Determine if PAT is a PATTERN of a store insn. */
18442
18443static bool
18444is_store_insn1 (rtx pat)
18445{
18446 if (!pat || pat == NULL_RTX)
18447 return false;
18448
18449 if (GET_CODE (pat) == SET)
18450 return is_mem_ref (SET_DEST (pat));
18451
18452 if (GET_CODE (pat) == PARALLEL)
18453 {
18454 int i;
18455
18456 for (i = 0; i < XVECLEN (pat, 0); i++)
18457 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18458 return true;
18459 }
18460
18461 return false;
18462}
18463
18464/* Determine if INSN stores to memory. */
18465
18466static bool
18467is_store_insn (rtx insn)
18468{
18469 if (!insn || !INSN_P (insn))
18470 return false;
18471
18472 return is_store_insn1 (PATTERN (insn));
18473}
18474
e3a0e200
PB
18475/* Return the dest of a store insn. */
18476
18477static rtx
18478get_store_dest (rtx pat)
18479{
18480 gcc_assert (is_store_insn1 (pat));
18481
18482 if (GET_CODE (pat) == SET)
18483 return SET_DEST (pat);
18484 else if (GET_CODE (pat) == PARALLEL)
18485 {
18486 int i;
18487
18488 for (i = 0; i < XVECLEN (pat, 0); i++)
18489 {
18490 rtx inner_pat = XVECEXP (pat, 0, i);
18491 if (GET_CODE (inner_pat) == SET
18492 && is_mem_ref (SET_DEST (inner_pat)))
18493 return inner_pat;
18494 }
18495 }
18496 /* We shouldn't get here, because we should have either a simple
18497 store insn or a store with update which are covered above. */
18498 gcc_unreachable();
18499}
18500
569fa502
DN
18501/* Returns whether the dependence between INSN and NEXT is considered
18502 costly by the given target. */
18503
18504static bool
b198261f 18505rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18506{
b198261f
MK
18507 rtx insn;
18508 rtx next;
18509
aabcd309 18510 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18511 allow all dependent insns in the same group.
569fa502
DN
18512 This is the most aggressive option. */
18513 if (rs6000_sched_costly_dep == no_dep_costly)
18514 return false;
18515
f676971a 18516 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18517 do not allow dependent instructions in the same group.
18518 This is the most conservative option. */
18519 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18520 return true;
569fa502 18521
b198261f
MK
18522 insn = DEP_PRO (dep);
18523 next = DEP_CON (dep);
18524
f676971a
EC
18525 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18526 && is_load_insn (next)
569fa502
DN
18527 && is_store_insn (insn))
18528 /* Prevent load after store in the same group. */
18529 return true;
18530
18531 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18532 && is_load_insn (next)
569fa502 18533 && is_store_insn (insn)
e2f6ff94 18534 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18535 /* Prevent load after store in the same group if it is a true
18536 dependence. */
569fa502 18537 return true;
f676971a
EC
18538
18539 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18540 and will not be scheduled in the same group. */
18541 if (rs6000_sched_costly_dep <= max_dep_latency
18542 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18543 return true;
18544
18545 return false;
18546}
18547
f676971a 18548/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18549 skipping any "non-active" insns - insns that will not actually occupy
18550 an issue slot. Return NULL_RTX if such an insn is not found. */
18551
18552static rtx
18553get_next_active_insn (rtx insn, rtx tail)
18554{
f489aff8 18555 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18556 return NULL_RTX;
18557
f489aff8 18558 while (1)
cbe26ab8 18559 {
f489aff8
AM
18560 insn = NEXT_INSN (insn);
18561 if (insn == NULL_RTX || insn == tail)
18562 return NULL_RTX;
cbe26ab8 18563
f489aff8
AM
18564 if (CALL_P (insn)
18565 || JUMP_P (insn)
18566 || (NONJUMP_INSN_P (insn)
18567 && GET_CODE (PATTERN (insn)) != USE
18568 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18569 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18570 break;
18571 }
18572 return insn;
cbe26ab8
DN
18573}
18574
44cd321e
PS
18575/* We are about to begin issuing insns for this clock cycle. */
18576
18577static int
18578rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18579 rtx *ready ATTRIBUTE_UNUSED,
18580 int *pn_ready ATTRIBUTE_UNUSED,
18581 int clock_var ATTRIBUTE_UNUSED)
18582{
d296e02e
AP
18583 int n_ready = *pn_ready;
18584
44cd321e
PS
18585 if (sched_verbose)
18586 fprintf (dump, "// rs6000_sched_reorder :\n");
18587
d296e02e
AP
18588 /* Reorder the ready list, if the second to last ready insn
18589 is a nonepipeline insn. */
18590 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18591 {
18592 if (is_nonpipeline_insn (ready[n_ready - 1])
18593 && (recog_memoized (ready[n_ready - 2]) > 0))
18594 /* Simply swap first two insns. */
18595 {
18596 rtx tmp = ready[n_ready - 1];
18597 ready[n_ready - 1] = ready[n_ready - 2];
18598 ready[n_ready - 2] = tmp;
18599 }
18600 }
18601
44cd321e
PS
18602 if (rs6000_cpu == PROCESSOR_POWER6)
18603 load_store_pendulum = 0;
18604
18605 return rs6000_issue_rate ();
18606}
18607
18608/* Like rs6000_sched_reorder, but called after issuing each insn. */
18609
18610static int
18611rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18612 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18613{
18614 if (sched_verbose)
18615 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18616
18617 /* For Power6, we need to handle some special cases to try and keep the
18618 store queue from overflowing and triggering expensive flushes.
18619
18620 This code monitors how load and store instructions are being issued
18621 and skews the ready list one way or the other to increase the likelihood
18622 that a desired instruction is issued at the proper time.
18623
18624 A couple of things are done. First, we maintain a "load_store_pendulum"
18625 to track the current state of load/store issue.
18626
18627 - If the pendulum is at zero, then no loads or stores have been
18628 issued in the current cycle so we do nothing.
18629
18630 - If the pendulum is 1, then a single load has been issued in this
18631 cycle and we attempt to locate another load in the ready list to
18632 issue with it.
18633
2f8e468b 18634 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18635 issued in this cycle, so we increase the priority of the first load
18636 in the ready list to increase it's likelihood of being chosen first
18637 in the next cycle.
18638
18639 - If the pendulum is -1, then a single store has been issued in this
18640 cycle and we attempt to locate another store in the ready list to
18641 issue with it, preferring a store to an adjacent memory location to
18642 facilitate store pairing in the store queue.
18643
18644 - If the pendulum is 2, then two loads have already been
18645 issued in this cycle, so we increase the priority of the first store
18646 in the ready list to increase it's likelihood of being chosen first
18647 in the next cycle.
18648
18649 - If the pendulum < -2 or > 2, then do nothing.
18650
18651 Note: This code covers the most common scenarios. There exist non
18652 load/store instructions which make use of the LSU and which
18653 would need to be accounted for to strictly model the behavior
18654 of the machine. Those instructions are currently unaccounted
18655 for to help minimize compile time overhead of this code.
18656 */
18657 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18658 {
18659 int pos;
18660 int i;
18661 rtx tmp;
18662
18663 if (is_store_insn (last_scheduled_insn))
18664 /* Issuing a store, swing the load_store_pendulum to the left */
18665 load_store_pendulum--;
18666 else if (is_load_insn (last_scheduled_insn))
18667 /* Issuing a load, swing the load_store_pendulum to the right */
18668 load_store_pendulum++;
18669 else
18670 return cached_can_issue_more;
18671
18672 /* If the pendulum is balanced, or there is only one instruction on
18673 the ready list, then all is well, so return. */
18674 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18675 return cached_can_issue_more;
18676
18677 if (load_store_pendulum == 1)
18678 {
18679 /* A load has been issued in this cycle. Scan the ready list
18680 for another load to issue with it */
18681 pos = *pn_ready-1;
18682
18683 while (pos >= 0)
18684 {
18685 if (is_load_insn (ready[pos]))
18686 {
18687 /* Found a load. Move it to the head of the ready list,
18688 and adjust it's priority so that it is more likely to
18689 stay there */
18690 tmp = ready[pos];
18691 for (i=pos; i<*pn_ready-1; i++)
18692 ready[i] = ready[i + 1];
18693 ready[*pn_ready-1] = tmp;
18694 if INSN_PRIORITY_KNOWN (tmp)
18695 INSN_PRIORITY (tmp)++;
18696 break;
18697 }
18698 pos--;
18699 }
18700 }
18701 else if (load_store_pendulum == -2)
18702 {
18703 /* Two stores have been issued in this cycle. Increase the
18704 priority of the first load in the ready list to favor it for
18705 issuing in the next cycle. */
18706 pos = *pn_ready-1;
18707
18708 while (pos >= 0)
18709 {
18710 if (is_load_insn (ready[pos])
18711 && INSN_PRIORITY_KNOWN (ready[pos]))
18712 {
18713 INSN_PRIORITY (ready[pos])++;
18714
18715 /* Adjust the pendulum to account for the fact that a load
18716 was found and increased in priority. This is to prevent
18717 increasing the priority of multiple loads */
18718 load_store_pendulum--;
18719
18720 break;
18721 }
18722 pos--;
18723 }
18724 }
18725 else if (load_store_pendulum == -1)
18726 {
18727 /* A store has been issued in this cycle. Scan the ready list for
18728 another store to issue with it, preferring a store to an adjacent
18729 memory location */
18730 int first_store_pos = -1;
18731
18732 pos = *pn_ready-1;
18733
18734 while (pos >= 0)
18735 {
18736 if (is_store_insn (ready[pos]))
18737 {
18738 /* Maintain the index of the first store found on the
18739 list */
18740 if (first_store_pos == -1)
18741 first_store_pos = pos;
18742
18743 if (is_store_insn (last_scheduled_insn)
18744 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18745 {
18746 /* Found an adjacent store. Move it to the head of the
18747 ready list, and adjust it's priority so that it is
18748 more likely to stay there */
18749 tmp = ready[pos];
18750 for (i=pos; i<*pn_ready-1; i++)
18751 ready[i] = ready[i + 1];
18752 ready[*pn_ready-1] = tmp;
18753 if INSN_PRIORITY_KNOWN (tmp)
18754 INSN_PRIORITY (tmp)++;
18755 first_store_pos = -1;
18756
18757 break;
18758 };
18759 }
18760 pos--;
18761 }
18762
18763 if (first_store_pos >= 0)
18764 {
18765 /* An adjacent store wasn't found, but a non-adjacent store was,
18766 so move the non-adjacent store to the front of the ready
18767 list, and adjust its priority so that it is more likely to
18768 stay there. */
18769 tmp = ready[first_store_pos];
18770 for (i=first_store_pos; i<*pn_ready-1; i++)
18771 ready[i] = ready[i + 1];
18772 ready[*pn_ready-1] = tmp;
18773 if INSN_PRIORITY_KNOWN (tmp)
18774 INSN_PRIORITY (tmp)++;
18775 }
18776 }
18777 else if (load_store_pendulum == 2)
18778 {
18779 /* Two loads have been issued in this cycle. Increase the priority
18780 of the first store in the ready list to favor it for issuing in
18781 the next cycle. */
18782 pos = *pn_ready-1;
18783
18784 while (pos >= 0)
18785 {
18786 if (is_store_insn (ready[pos])
18787 && INSN_PRIORITY_KNOWN (ready[pos]))
18788 {
18789 INSN_PRIORITY (ready[pos])++;
18790
18791 /* Adjust the pendulum to account for the fact that a store
18792 was found and increased in priority. This is to prevent
18793 increasing the priority of multiple stores */
18794 load_store_pendulum++;
18795
18796 break;
18797 }
18798 pos--;
18799 }
18800 }
18801 }
18802
18803 return cached_can_issue_more;
18804}
18805
839a4992 18806/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18807 of group WHICH_GROUP.
18808
18809 If WHICH_GROUP == current_group, this function will return true if INSN
18810 causes the termination of the current group (i.e, the dispatch group to
18811 which INSN belongs). This means that INSN will be the last insn in the
18812 group it belongs to.
18813
18814 If WHICH_GROUP == previous_group, this function will return true if INSN
18815 causes the termination of the previous group (i.e, the dispatch group that
18816 precedes the group to which INSN belongs). This means that INSN will be
18817 the first insn in the group it belongs to). */
18818
18819static bool
18820insn_terminates_group_p (rtx insn, enum group_termination which_group)
18821{
44cd321e 18822 bool first, last;
cbe26ab8
DN
18823
18824 if (! insn)
18825 return false;
569fa502 18826
44cd321e
PS
18827 first = insn_must_be_first_in_group (insn);
18828 last = insn_must_be_last_in_group (insn);
cbe26ab8 18829
44cd321e 18830 if (first && last)
cbe26ab8
DN
18831 return true;
18832
18833 if (which_group == current_group)
44cd321e 18834 return last;
cbe26ab8 18835 else if (which_group == previous_group)
44cd321e
PS
18836 return first;
18837
18838 return false;
18839}
18840
18841
18842static bool
18843insn_must_be_first_in_group (rtx insn)
18844{
18845 enum attr_type type;
18846
18847 if (!insn
18848 || insn == NULL_RTX
18849 || GET_CODE (insn) == NOTE
18850 || GET_CODE (PATTERN (insn)) == USE
18851 || GET_CODE (PATTERN (insn)) == CLOBBER)
18852 return false;
18853
18854 switch (rs6000_cpu)
cbe26ab8 18855 {
44cd321e
PS
18856 case PROCESSOR_POWER5:
18857 if (is_cracked_insn (insn))
18858 return true;
18859 case PROCESSOR_POWER4:
18860 if (is_microcoded_insn (insn))
18861 return true;
18862
18863 if (!rs6000_sched_groups)
18864 return false;
18865
18866 type = get_attr_type (insn);
18867
18868 switch (type)
18869 {
18870 case TYPE_MFCR:
18871 case TYPE_MFCRF:
18872 case TYPE_MTCR:
18873 case TYPE_DELAYED_CR:
18874 case TYPE_CR_LOGICAL:
18875 case TYPE_MTJMPR:
18876 case TYPE_MFJMPR:
18877 case TYPE_IDIV:
18878 case TYPE_LDIV:
18879 case TYPE_LOAD_L:
18880 case TYPE_STORE_C:
18881 case TYPE_ISYNC:
18882 case TYPE_SYNC:
18883 return true;
18884 default:
18885 break;
18886 }
18887 break;
18888 case PROCESSOR_POWER6:
18889 type = get_attr_type (insn);
18890
18891 switch (type)
18892 {
18893 case TYPE_INSERT_DWORD:
18894 case TYPE_EXTS:
18895 case TYPE_CNTLZ:
18896 case TYPE_SHIFT:
18897 case TYPE_VAR_SHIFT_ROTATE:
18898 case TYPE_TRAP:
18899 case TYPE_IMUL:
18900 case TYPE_IMUL2:
18901 case TYPE_IMUL3:
18902 case TYPE_LMUL:
18903 case TYPE_IDIV:
18904 case TYPE_INSERT_WORD:
18905 case TYPE_DELAYED_COMPARE:
18906 case TYPE_IMUL_COMPARE:
18907 case TYPE_LMUL_COMPARE:
18908 case TYPE_FPCOMPARE:
18909 case TYPE_MFCR:
18910 case TYPE_MTCR:
18911 case TYPE_MFJMPR:
18912 case TYPE_MTJMPR:
18913 case TYPE_ISYNC:
18914 case TYPE_SYNC:
18915 case TYPE_LOAD_L:
18916 case TYPE_STORE_C:
18917 case TYPE_LOAD_U:
18918 case TYPE_LOAD_UX:
18919 case TYPE_LOAD_EXT_UX:
18920 case TYPE_STORE_U:
18921 case TYPE_STORE_UX:
18922 case TYPE_FPLOAD_U:
18923 case TYPE_FPLOAD_UX:
18924 case TYPE_FPSTORE_U:
18925 case TYPE_FPSTORE_UX:
18926 return true;
18927 default:
18928 break;
18929 }
18930 break;
18931 default:
18932 break;
18933 }
18934
18935 return false;
18936}
18937
18938static bool
18939insn_must_be_last_in_group (rtx insn)
18940{
18941 enum attr_type type;
18942
18943 if (!insn
18944 || insn == NULL_RTX
18945 || GET_CODE (insn) == NOTE
18946 || GET_CODE (PATTERN (insn)) == USE
18947 || GET_CODE (PATTERN (insn)) == CLOBBER)
18948 return false;
18949
18950 switch (rs6000_cpu) {
18951 case PROCESSOR_POWER4:
18952 case PROCESSOR_POWER5:
18953 if (is_microcoded_insn (insn))
18954 return true;
18955
18956 if (is_branch_slot_insn (insn))
18957 return true;
18958
18959 break;
18960 case PROCESSOR_POWER6:
18961 type = get_attr_type (insn);
18962
18963 switch (type)
18964 {
18965 case TYPE_EXTS:
18966 case TYPE_CNTLZ:
18967 case TYPE_SHIFT:
18968 case TYPE_VAR_SHIFT_ROTATE:
18969 case TYPE_TRAP:
18970 case TYPE_IMUL:
18971 case TYPE_IMUL2:
18972 case TYPE_IMUL3:
18973 case TYPE_LMUL:
18974 case TYPE_IDIV:
18975 case TYPE_DELAYED_COMPARE:
18976 case TYPE_IMUL_COMPARE:
18977 case TYPE_LMUL_COMPARE:
18978 case TYPE_FPCOMPARE:
18979 case TYPE_MFCR:
18980 case TYPE_MTCR:
18981 case TYPE_MFJMPR:
18982 case TYPE_MTJMPR:
18983 case TYPE_ISYNC:
18984 case TYPE_SYNC:
18985 case TYPE_LOAD_L:
18986 case TYPE_STORE_C:
18987 return true;
18988 default:
18989 break;
cbe26ab8 18990 }
44cd321e
PS
18991 break;
18992 default:
18993 break;
18994 }
cbe26ab8
DN
18995
18996 return false;
18997}
18998
839a4992 18999/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19000 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19001
19002static bool
19003is_costly_group (rtx *group_insns, rtx next_insn)
19004{
19005 int i;
cbe26ab8
DN
19006 int issue_rate = rs6000_issue_rate ();
19007
19008 for (i = 0; i < issue_rate; i++)
19009 {
e2f6ff94
MK
19010 sd_iterator_def sd_it;
19011 dep_t dep;
cbe26ab8 19012 rtx insn = group_insns[i];
b198261f 19013
cbe26ab8 19014 if (!insn)
c4ad648e 19015 continue;
b198261f 19016
e2f6ff94 19017 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19018 {
b198261f
MK
19019 rtx next = DEP_CON (dep);
19020
19021 if (next == next_insn
19022 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19023 return true;
c4ad648e 19024 }
cbe26ab8
DN
19025 }
19026
19027 return false;
19028}
19029
f676971a 19030/* Utility of the function redefine_groups.
cbe26ab8
DN
19031 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19032 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19033 to keep it "far" (in a separate group) from GROUP_INSNS, following
19034 one of the following schemes, depending on the value of the flag
19035 -minsert_sched_nops = X:
19036 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19037 in order to force NEXT_INSN into a separate group.
f676971a
EC
19038 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19039 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19040 insertion (has a group just ended, how many vacant issue slots remain in the
19041 last group, and how many dispatch groups were encountered so far). */
19042
f676971a 19043static int
c4ad648e
AM
19044force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19045 rtx next_insn, bool *group_end, int can_issue_more,
19046 int *group_count)
cbe26ab8
DN
19047{
19048 rtx nop;
19049 bool force;
19050 int issue_rate = rs6000_issue_rate ();
19051 bool end = *group_end;
19052 int i;
19053
19054 if (next_insn == NULL_RTX)
19055 return can_issue_more;
19056
19057 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19058 return can_issue_more;
19059
19060 force = is_costly_group (group_insns, next_insn);
19061 if (!force)
19062 return can_issue_more;
19063
19064 if (sched_verbose > 6)
19065 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19066 *group_count ,can_issue_more);
cbe26ab8
DN
19067
19068 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19069 {
19070 if (*group_end)
c4ad648e 19071 can_issue_more = 0;
cbe26ab8
DN
19072
19073 /* Since only a branch can be issued in the last issue_slot, it is
19074 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19075 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19076 in this case the last nop will start a new group and the branch
19077 will be forced to the new group. */
cbe26ab8 19078 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19079 can_issue_more--;
cbe26ab8
DN
19080
19081 while (can_issue_more > 0)
c4ad648e 19082 {
9390387d 19083 nop = gen_nop ();
c4ad648e
AM
19084 emit_insn_before (nop, next_insn);
19085 can_issue_more--;
19086 }
cbe26ab8
DN
19087
19088 *group_end = true;
19089 return 0;
f676971a 19090 }
cbe26ab8
DN
19091
19092 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19093 {
19094 int n_nops = rs6000_sched_insert_nops;
19095
f676971a 19096 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19097 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19098 if (can_issue_more == 0)
c4ad648e 19099 can_issue_more = issue_rate;
cbe26ab8
DN
19100 can_issue_more--;
19101 if (can_issue_more == 0)
c4ad648e
AM
19102 {
19103 can_issue_more = issue_rate - 1;
19104 (*group_count)++;
19105 end = true;
19106 for (i = 0; i < issue_rate; i++)
19107 {
19108 group_insns[i] = 0;
19109 }
19110 }
cbe26ab8
DN
19111
19112 while (n_nops > 0)
c4ad648e
AM
19113 {
19114 nop = gen_nop ();
19115 emit_insn_before (nop, next_insn);
19116 if (can_issue_more == issue_rate - 1) /* new group begins */
19117 end = false;
19118 can_issue_more--;
19119 if (can_issue_more == 0)
19120 {
19121 can_issue_more = issue_rate - 1;
19122 (*group_count)++;
19123 end = true;
19124 for (i = 0; i < issue_rate; i++)
19125 {
19126 group_insns[i] = 0;
19127 }
19128 }
19129 n_nops--;
19130 }
cbe26ab8
DN
19131
19132 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19133 can_issue_more++;
cbe26ab8 19134
c4ad648e
AM
19135 /* Is next_insn going to start a new group? */
19136 *group_end
19137 = (end
cbe26ab8
DN
19138 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19139 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19140 || (can_issue_more < issue_rate &&
c4ad648e 19141 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19142 if (*group_end && end)
c4ad648e 19143 (*group_count)--;
cbe26ab8
DN
19144
19145 if (sched_verbose > 6)
c4ad648e
AM
19146 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19147 *group_count, can_issue_more);
f676971a
EC
19148 return can_issue_more;
19149 }
cbe26ab8
DN
19150
19151 return can_issue_more;
19152}
19153
19154/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19155 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19156 form in practice. It tries to achieve this synchronization by forcing the
19157 estimated processor grouping on the compiler (as opposed to the function
19158 'pad_goups' which tries to force the scheduler's grouping on the processor).
19159
19160 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19161 examines the (estimated) dispatch groups that will be formed by the processor
19162 dispatcher. It marks these group boundaries to reflect the estimated
19163 processor grouping, overriding the grouping that the scheduler had marked.
19164 Depending on the value of the flag '-minsert-sched-nops' this function can
19165 force certain insns into separate groups or force a certain distance between
19166 them by inserting nops, for example, if there exists a "costly dependence"
19167 between the insns.
19168
19169 The function estimates the group boundaries that the processor will form as
0fa2e4df 19170 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19171 each insn. A subsequent insn will start a new group if one of the following
19172 4 cases applies:
19173 - no more vacant issue slots remain in the current dispatch group.
19174 - only the last issue slot, which is the branch slot, is vacant, but the next
19175 insn is not a branch.
19176 - only the last 2 or less issue slots, including the branch slot, are vacant,
19177 which means that a cracked insn (which occupies two issue slots) can't be
19178 issued in this group.
f676971a 19179 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19180 start a new group. */
19181
19182static int
19183redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19184{
19185 rtx insn, next_insn;
19186 int issue_rate;
19187 int can_issue_more;
19188 int slot, i;
19189 bool group_end;
19190 int group_count = 0;
19191 rtx *group_insns;
19192
19193 /* Initialize. */
19194 issue_rate = rs6000_issue_rate ();
19195 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19196 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19197 {
19198 group_insns[i] = 0;
19199 }
19200 can_issue_more = issue_rate;
19201 slot = 0;
19202 insn = get_next_active_insn (prev_head_insn, tail);
19203 group_end = false;
19204
19205 while (insn != NULL_RTX)
19206 {
19207 slot = (issue_rate - can_issue_more);
19208 group_insns[slot] = insn;
19209 can_issue_more =
c4ad648e 19210 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19211 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19212 can_issue_more = 0;
cbe26ab8
DN
19213
19214 next_insn = get_next_active_insn (insn, tail);
19215 if (next_insn == NULL_RTX)
c4ad648e 19216 return group_count + 1;
cbe26ab8 19217
c4ad648e
AM
19218 /* Is next_insn going to start a new group? */
19219 group_end
19220 = (can_issue_more == 0
19221 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19222 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19223 || (can_issue_more < issue_rate &&
19224 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19225
f676971a 19226 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19227 next_insn, &group_end, can_issue_more,
19228 &group_count);
cbe26ab8
DN
19229
19230 if (group_end)
c4ad648e
AM
19231 {
19232 group_count++;
19233 can_issue_more = 0;
19234 for (i = 0; i < issue_rate; i++)
19235 {
19236 group_insns[i] = 0;
19237 }
19238 }
cbe26ab8
DN
19239
19240 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19241 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19242 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19243 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19244
19245 insn = next_insn;
19246 if (can_issue_more == 0)
c4ad648e
AM
19247 can_issue_more = issue_rate;
19248 } /* while */
cbe26ab8
DN
19249
19250 return group_count;
19251}
19252
19253/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19254 dispatch group boundaries that the scheduler had marked. Pad with nops
19255 any dispatch groups which have vacant issue slots, in order to force the
19256 scheduler's grouping on the processor dispatcher. The function
19257 returns the number of dispatch groups found. */
19258
19259static int
19260pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19261{
19262 rtx insn, next_insn;
19263 rtx nop;
19264 int issue_rate;
19265 int can_issue_more;
19266 int group_end;
19267 int group_count = 0;
19268
19269 /* Initialize issue_rate. */
19270 issue_rate = rs6000_issue_rate ();
19271 can_issue_more = issue_rate;
19272
19273 insn = get_next_active_insn (prev_head_insn, tail);
19274 next_insn = get_next_active_insn (insn, tail);
19275
19276 while (insn != NULL_RTX)
19277 {
19278 can_issue_more =
19279 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19280
19281 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19282
19283 if (next_insn == NULL_RTX)
c4ad648e 19284 break;
cbe26ab8
DN
19285
19286 if (group_end)
c4ad648e
AM
19287 {
19288 /* If the scheduler had marked group termination at this location
19289 (between insn and next_indn), and neither insn nor next_insn will
19290 force group termination, pad the group with nops to force group
19291 termination. */
19292 if (can_issue_more
19293 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19294 && !insn_terminates_group_p (insn, current_group)
19295 && !insn_terminates_group_p (next_insn, previous_group))
19296 {
9390387d 19297 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19298 can_issue_more--;
19299
19300 while (can_issue_more)
19301 {
19302 nop = gen_nop ();
19303 emit_insn_before (nop, next_insn);
19304 can_issue_more--;
19305 }
19306 }
19307
19308 can_issue_more = issue_rate;
19309 group_count++;
19310 }
cbe26ab8
DN
19311
19312 insn = next_insn;
19313 next_insn = get_next_active_insn (insn, tail);
19314 }
19315
19316 return group_count;
19317}
19318
44cd321e
PS
19319/* We're beginning a new block. Initialize data structures as necessary. */
19320
19321static void
19322rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19323 int sched_verbose ATTRIBUTE_UNUSED,
19324 int max_ready ATTRIBUTE_UNUSED)
982afe02 19325{
44cd321e
PS
19326 last_scheduled_insn = NULL_RTX;
19327 load_store_pendulum = 0;
19328}
19329
cbe26ab8
DN
19330/* The following function is called at the end of scheduling BB.
19331 After reload, it inserts nops at insn group bundling. */
19332
19333static void
38f391a5 19334rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19335{
19336 int n_groups;
19337
19338 if (sched_verbose)
19339 fprintf (dump, "=== Finishing schedule.\n");
19340
ec507f2d 19341 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19342 {
19343 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19344 return;
cbe26ab8
DN
19345
19346 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19347 n_groups = pad_groups (dump, sched_verbose,
19348 current_sched_info->prev_head,
19349 current_sched_info->next_tail);
cbe26ab8 19350 else
c4ad648e
AM
19351 n_groups = redefine_groups (dump, sched_verbose,
19352 current_sched_info->prev_head,
19353 current_sched_info->next_tail);
cbe26ab8
DN
19354
19355 if (sched_verbose >= 6)
19356 {
19357 fprintf (dump, "ngroups = %d\n", n_groups);
19358 print_rtl (dump, current_sched_info->prev_head);
19359 fprintf (dump, "Done finish_sched\n");
19360 }
19361 }
19362}
b6c9286a 19363\f
b6c9286a
MM
19364/* Length in units of the trampoline for entering a nested function. */
19365
19366int
863d938c 19367rs6000_trampoline_size (void)
b6c9286a
MM
19368{
19369 int ret = 0;
19370
19371 switch (DEFAULT_ABI)
19372 {
19373 default:
37409796 19374 gcc_unreachable ();
b6c9286a
MM
19375
19376 case ABI_AIX:
8f802bfb 19377 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19378 break;
19379
4dabc42d 19380 case ABI_DARWIN:
b6c9286a 19381 case ABI_V4:
03a7e1a5 19382 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19383 break;
b6c9286a
MM
19384 }
19385
19386 return ret;
19387}
19388
19389/* Emit RTL insns to initialize the variable parts of a trampoline.
19390 FNADDR is an RTX for the address of the function's pure code.
19391 CXT is an RTX for the static chain value for the function. */
19392
19393void
a2369ed3 19394rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19395{
8bd04c56 19396 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19397 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19398
19399 switch (DEFAULT_ABI)
19400 {
19401 default:
37409796 19402 gcc_unreachable ();
b6c9286a 19403
8bd04c56 19404/* Macros to shorten the code expansions below. */
9613eaff 19405#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19406#define MEM_PLUS(addr,offset) \
9613eaff 19407 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19408
b6c9286a
MM
19409 /* Under AIX, just build the 3 word function descriptor */
19410 case ABI_AIX:
8bd04c56 19411 {
9613eaff
SH
19412 rtx fn_reg = gen_reg_rtx (Pmode);
19413 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19414 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19415 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19416 emit_move_insn (MEM_DEREF (addr), fn_reg);
19417 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19418 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19419 }
b6c9286a
MM
19420 break;
19421
4dabc42d
TC
19422 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19423 case ABI_DARWIN:
b6c9286a 19424 case ABI_V4:
9613eaff 19425 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19426 FALSE, VOIDmode, 4,
9613eaff 19427 addr, Pmode,
eaf1bcf1 19428 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19429 fnaddr, Pmode,
19430 ctx_reg, Pmode);
b6c9286a 19431 break;
b6c9286a
MM
19432 }
19433
19434 return;
19435}
7509c759
MM
19436
19437\f
91d231cb 19438/* Table of valid machine attributes. */
a4f6c312 19439
91d231cb 19440const struct attribute_spec rs6000_attribute_table[] =
7509c759 19441{
91d231cb 19442 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19443 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19444 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19445 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19446 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19447 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19448#ifdef SUBTARGET_ATTRIBUTE_TABLE
19449 SUBTARGET_ATTRIBUTE_TABLE,
19450#endif
a5c76ee6 19451 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19452};
7509c759 19453
8bb418a3
ZL
19454/* Handle the "altivec" attribute. The attribute may have
19455 arguments as follows:
f676971a 19456
8bb418a3
ZL
19457 __attribute__((altivec(vector__)))
19458 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19459 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19460
19461 and may appear more than once (e.g., 'vector bool char') in a
19462 given declaration. */
19463
19464static tree
f90ac3f0
UP
19465rs6000_handle_altivec_attribute (tree *node,
19466 tree name ATTRIBUTE_UNUSED,
19467 tree args,
8bb418a3
ZL
19468 int flags ATTRIBUTE_UNUSED,
19469 bool *no_add_attrs)
19470{
19471 tree type = *node, result = NULL_TREE;
19472 enum machine_mode mode;
19473 int unsigned_p;
19474 char altivec_type
19475 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19476 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19477 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19478 : '?');
8bb418a3
ZL
19479
19480 while (POINTER_TYPE_P (type)
19481 || TREE_CODE (type) == FUNCTION_TYPE
19482 || TREE_CODE (type) == METHOD_TYPE
19483 || TREE_CODE (type) == ARRAY_TYPE)
19484 type = TREE_TYPE (type);
19485
19486 mode = TYPE_MODE (type);
19487
f90ac3f0
UP
19488 /* Check for invalid AltiVec type qualifiers. */
19489 if (type == long_unsigned_type_node || type == long_integer_type_node)
19490 {
19491 if (TARGET_64BIT)
19492 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19493 else if (rs6000_warn_altivec_long)
d4ee4d25 19494 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19495 }
19496 else if (type == long_long_unsigned_type_node
19497 || type == long_long_integer_type_node)
19498 error ("use of %<long long%> in AltiVec types is invalid");
19499 else if (type == double_type_node)
19500 error ("use of %<double%> in AltiVec types is invalid");
19501 else if (type == long_double_type_node)
19502 error ("use of %<long double%> in AltiVec types is invalid");
19503 else if (type == boolean_type_node)
19504 error ("use of boolean types in AltiVec types is invalid");
19505 else if (TREE_CODE (type) == COMPLEX_TYPE)
19506 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19507 else if (DECIMAL_FLOAT_MODE_P (mode))
19508 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19509
19510 switch (altivec_type)
19511 {
19512 case 'v':
8df83eae 19513 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19514 switch (mode)
19515 {
c4ad648e
AM
19516 case SImode:
19517 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19518 break;
19519 case HImode:
19520 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19521 break;
19522 case QImode:
19523 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19524 break;
19525 case SFmode: result = V4SF_type_node; break;
19526 /* If the user says 'vector int bool', we may be handed the 'bool'
19527 attribute _before_ the 'vector' attribute, and so select the
19528 proper type in the 'b' case below. */
19529 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19530 result = type;
19531 default: break;
8bb418a3
ZL
19532 }
19533 break;
19534 case 'b':
19535 switch (mode)
19536 {
c4ad648e
AM
19537 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19538 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19539 case QImode: case V16QImode: result = bool_V16QI_type_node;
19540 default: break;
8bb418a3
ZL
19541 }
19542 break;
19543 case 'p':
19544 switch (mode)
19545 {
c4ad648e
AM
19546 case V8HImode: result = pixel_V8HI_type_node;
19547 default: break;
8bb418a3
ZL
19548 }
19549 default: break;
19550 }
19551
7958a2a6
FJ
19552 if (result && result != type && TYPE_READONLY (type))
19553 result = build_qualified_type (result, TYPE_QUAL_CONST);
19554
8bb418a3
ZL
19555 *no_add_attrs = true; /* No need to hang on to the attribute. */
19556
f90ac3f0 19557 if (result)
8bb418a3
ZL
19558 *node = reconstruct_complex_type (*node, result);
19559
19560 return NULL_TREE;
19561}
19562
f18eca82
ZL
19563/* AltiVec defines four built-in scalar types that serve as vector
19564 elements; we must teach the compiler how to mangle them. */
19565
19566static const char *
3101faab 19567rs6000_mangle_type (const_tree type)
f18eca82 19568{
608063c3
JB
19569 type = TYPE_MAIN_VARIANT (type);
19570
19571 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19572 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19573 return NULL;
19574
f18eca82
ZL
19575 if (type == bool_char_type_node) return "U6__boolc";
19576 if (type == bool_short_type_node) return "U6__bools";
19577 if (type == pixel_type_node) return "u7__pixel";
19578 if (type == bool_int_type_node) return "U6__booli";
19579
337bde91
DE
19580 /* Mangle IBM extended float long double as `g' (__float128) on
19581 powerpc*-linux where long-double-64 previously was the default. */
19582 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19583 && TARGET_ELF
19584 && TARGET_LONG_DOUBLE_128
19585 && !TARGET_IEEEQUAD)
19586 return "g";
19587
f18eca82
ZL
19588 /* For all other types, use normal C++ mangling. */
19589 return NULL;
19590}
19591
a5c76ee6
ZW
19592/* Handle a "longcall" or "shortcall" attribute; arguments as in
19593 struct attribute_spec.handler. */
a4f6c312 19594
91d231cb 19595static tree
f676971a
EC
19596rs6000_handle_longcall_attribute (tree *node, tree name,
19597 tree args ATTRIBUTE_UNUSED,
19598 int flags ATTRIBUTE_UNUSED,
a2369ed3 19599 bool *no_add_attrs)
91d231cb
JM
19600{
19601 if (TREE_CODE (*node) != FUNCTION_TYPE
19602 && TREE_CODE (*node) != FIELD_DECL
19603 && TREE_CODE (*node) != TYPE_DECL)
19604 {
5c498b10 19605 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19606 IDENTIFIER_POINTER (name));
19607 *no_add_attrs = true;
19608 }
6a4cee5f 19609
91d231cb 19610 return NULL_TREE;
7509c759
MM
19611}
19612
a5c76ee6
ZW
19613/* Set longcall attributes on all functions declared when
19614 rs6000_default_long_calls is true. */
19615static void
a2369ed3 19616rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19617{
19618 if (rs6000_default_long_calls
19619 && (TREE_CODE (type) == FUNCTION_TYPE
19620 || TREE_CODE (type) == METHOD_TYPE))
19621 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19622 NULL_TREE,
19623 TYPE_ATTRIBUTES (type));
16d6f994
EC
19624
19625#if TARGET_MACHO
19626 darwin_set_default_type_attributes (type);
19627#endif
a5c76ee6
ZW
19628}
19629
3cb999d8
DE
19630/* Return a reference suitable for calling a function with the
19631 longcall attribute. */
a4f6c312 19632
9390387d 19633rtx
a2369ed3 19634rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19635{
d330fd93 19636 const char *call_name;
6a4cee5f
MM
19637 tree node;
19638
19639 if (GET_CODE (call_ref) != SYMBOL_REF)
19640 return call_ref;
19641
19642 /* System V adds '.' to the internal name, so skip them. */
19643 call_name = XSTR (call_ref, 0);
19644 if (*call_name == '.')
19645 {
19646 while (*call_name == '.')
19647 call_name++;
19648
19649 node = get_identifier (call_name);
39403d82 19650 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19651 }
19652
19653 return force_reg (Pmode, call_ref);
19654}
7509c759 19655\f
77ccdfed
EC
19656#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19657#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19658#endif
19659
19660/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19661 struct attribute_spec.handler. */
19662static tree
19663rs6000_handle_struct_attribute (tree *node, tree name,
19664 tree args ATTRIBUTE_UNUSED,
19665 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19666{
19667 tree *type = NULL;
19668 if (DECL_P (*node))
19669 {
19670 if (TREE_CODE (*node) == TYPE_DECL)
19671 type = &TREE_TYPE (*node);
19672 }
19673 else
19674 type = node;
19675
19676 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19677 || TREE_CODE (*type) == UNION_TYPE)))
19678 {
19679 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19680 *no_add_attrs = true;
19681 }
19682
19683 else if ((is_attribute_p ("ms_struct", name)
19684 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19685 || ((is_attribute_p ("gcc_struct", name)
19686 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19687 {
19688 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19689 IDENTIFIER_POINTER (name));
19690 *no_add_attrs = true;
19691 }
19692
19693 return NULL_TREE;
19694}
19695
19696static bool
3101faab 19697rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19698{
19699 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19700 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19701 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19702}
19703\f
b64a1b53
RH
19704#ifdef USING_ELFOS_H
19705
d6b5193b 19706/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19707
d6b5193b
RS
19708static void
19709rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19710{
19711 if (DEFAULT_ABI == ABI_AIX
19712 && TARGET_MINIMAL_TOC
19713 && !TARGET_RELOCATABLE)
19714 {
19715 if (!toc_initialized)
19716 {
19717 toc_initialized = 1;
19718 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19719 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19720 fprintf (asm_out_file, "\t.tc ");
19721 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19722 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19723 fprintf (asm_out_file, "\n");
19724
19725 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19726 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19727 fprintf (asm_out_file, " = .+32768\n");
19728 }
19729 else
19730 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19731 }
19732 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19733 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19734 else
19735 {
19736 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19737 if (!toc_initialized)
19738 {
19739 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19740 fprintf (asm_out_file, " = .+32768\n");
19741 toc_initialized = 1;
19742 }
19743 }
19744}
19745
19746/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19747
b64a1b53 19748static void
d6b5193b
RS
19749rs6000_elf_asm_init_sections (void)
19750{
19751 toc_section
19752 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19753
19754 sdata2_section
19755 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19756 SDATA2_SECTION_ASM_OP);
19757}
19758
19759/* Implement TARGET_SELECT_RTX_SECTION. */
19760
19761static section *
f676971a 19762rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19763 unsigned HOST_WIDE_INT align)
7509c759 19764{
a9098fd0 19765 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19766 return toc_section;
7509c759 19767 else
d6b5193b 19768 return default_elf_select_rtx_section (mode, x, align);
7509c759 19769}
d9407988 19770\f
d1908feb
JJ
19771/* For a SYMBOL_REF, set generic flags and then perform some
19772 target-specific processing.
19773
d1908feb
JJ
19774 When the AIX ABI is requested on a non-AIX system, replace the
19775 function name with the real name (with a leading .) rather than the
19776 function descriptor name. This saves a lot of overriding code to
19777 read the prefixes. */
d9407988 19778
fb49053f 19779static void
a2369ed3 19780rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19781{
d1908feb 19782 default_encode_section_info (decl, rtl, first);
b2003250 19783
d1908feb
JJ
19784 if (first
19785 && TREE_CODE (decl) == FUNCTION_DECL
19786 && !TARGET_AIX
19787 && DEFAULT_ABI == ABI_AIX)
d9407988 19788 {
c6a2438a 19789 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19790 size_t len = strlen (XSTR (sym_ref, 0));
19791 char *str = alloca (len + 2);
19792 str[0] = '.';
19793 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19794 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19795 }
d9407988
MM
19796}
19797
21d9bb3f
PB
19798static inline bool
19799compare_section_name (const char *section, const char *template)
19800{
19801 int len;
19802
19803 len = strlen (template);
19804 return (strncmp (section, template, len) == 0
19805 && (section[len] == 0 || section[len] == '.'));
19806}
19807
c1b7d95a 19808bool
3101faab 19809rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
19810{
19811 if (rs6000_sdata == SDATA_NONE)
19812 return false;
19813
7482ad25
AF
19814 /* We want to merge strings, so we never consider them small data. */
19815 if (TREE_CODE (decl) == STRING_CST)
19816 return false;
19817
19818 /* Functions are never in the small data area. */
19819 if (TREE_CODE (decl) == FUNCTION_DECL)
19820 return false;
19821
0e5dbd9b
DE
19822 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
19823 {
19824 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
19825 if (compare_section_name (section, ".sdata")
19826 || compare_section_name (section, ".sdata2")
19827 || compare_section_name (section, ".gnu.linkonce.s")
19828 || compare_section_name (section, ".sbss")
19829 || compare_section_name (section, ".sbss2")
19830 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
19831 || strcmp (section, ".PPC.EMB.sdata0") == 0
19832 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19833 return true;
19834 }
19835 else
19836 {
19837 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19838
19839 if (size > 0
307b599c 19840 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19841 /* If it's not public, and we're not going to reference it there,
19842 there's no need to put it in the small data section. */
0e5dbd9b
DE
19843 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19844 return true;
19845 }
19846
19847 return false;
19848}
19849
b91da81f 19850#endif /* USING_ELFOS_H */
aacd3885
RS
19851\f
19852/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19853
aacd3885 19854static bool
3101faab 19855rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
19856{
19857 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19858}
a6c2a102 19859\f
000034eb 19860/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19861 ADDR can be effectively incremented by incrementing REG.
19862
19863 r0 is special and we must not select it as an address
19864 register by this routine since our caller will try to
19865 increment the returned register via an "la" instruction. */
000034eb 19866
9390387d 19867rtx
a2369ed3 19868find_addr_reg (rtx addr)
000034eb
DE
19869{
19870 while (GET_CODE (addr) == PLUS)
19871 {
02441cd6
JL
19872 if (GET_CODE (XEXP (addr, 0)) == REG
19873 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19874 addr = XEXP (addr, 0);
02441cd6
JL
19875 else if (GET_CODE (XEXP (addr, 1)) == REG
19876 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19877 addr = XEXP (addr, 1);
19878 else if (CONSTANT_P (XEXP (addr, 0)))
19879 addr = XEXP (addr, 1);
19880 else if (CONSTANT_P (XEXP (addr, 1)))
19881 addr = XEXP (addr, 0);
19882 else
37409796 19883 gcc_unreachable ();
000034eb 19884 }
37409796
NS
19885 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19886 return addr;
000034eb
DE
19887}
19888
a6c2a102 19889void
a2369ed3 19890rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19891{
19892 fatal_insn ("bad address", op);
19893}
c8023011 19894
ee890fe2
SS
19895#if TARGET_MACHO
19896
efdba735 19897static tree branch_island_list = 0;
ee890fe2 19898
efdba735
SH
19899/* Remember to generate a branch island for far calls to the given
19900 function. */
ee890fe2 19901
f676971a 19902static void
c4ad648e
AM
19903add_compiler_branch_island (tree label_name, tree function_name,
19904 int line_number)
ee890fe2 19905{
efdba735 19906 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19907 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19908 TREE_CHAIN (branch_island) = branch_island_list;
19909 branch_island_list = branch_island;
ee890fe2
SS
19910}
19911
efdba735
SH
19912#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19913#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19914#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19915 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19916
efdba735
SH
19917/* Generate far-jump branch islands for everything on the
19918 branch_island_list. Invoked immediately after the last instruction
19919 of the epilogue has been emitted; the branch-islands must be
19920 appended to, and contiguous with, the function body. Mach-O stubs
19921 are generated in machopic_output_stub(). */
ee890fe2 19922
efdba735
SH
19923static void
19924macho_branch_islands (void)
19925{
19926 char tmp_buf[512];
19927 tree branch_island;
19928
19929 for (branch_island = branch_island_list;
19930 branch_island;
19931 branch_island = TREE_CHAIN (branch_island))
19932 {
19933 const char *label =
19934 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19935 const char *name =
11abc112 19936 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19937 char name_buf[512];
19938 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19939 if (name[0] == '*' || name[0] == '&')
19940 strcpy (name_buf, name+1);
19941 else
19942 {
19943 name_buf[0] = '_';
19944 strcpy (name_buf+1, name);
19945 }
19946 strcpy (tmp_buf, "\n");
19947 strcat (tmp_buf, label);
ee890fe2 19948#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19949 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19950 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19951#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19952 if (flag_pic)
19953 {
19954 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19955 strcat (tmp_buf, label);
19956 strcat (tmp_buf, "_pic\n");
19957 strcat (tmp_buf, label);
19958 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19959
efdba735
SH
19960 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19961 strcat (tmp_buf, name_buf);
19962 strcat (tmp_buf, " - ");
19963 strcat (tmp_buf, label);
19964 strcat (tmp_buf, "_pic)\n");
f676971a 19965
efdba735 19966 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19967
efdba735
SH
19968 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19969 strcat (tmp_buf, name_buf);
19970 strcat (tmp_buf, " - ");
19971 strcat (tmp_buf, label);
19972 strcat (tmp_buf, "_pic)\n");
f676971a 19973
efdba735
SH
19974 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19975 }
19976 else
19977 {
19978 strcat (tmp_buf, ":\nlis r12,hi16(");
19979 strcat (tmp_buf, name_buf);
19980 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19981 strcat (tmp_buf, name_buf);
19982 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19983 }
19984 output_asm_insn (tmp_buf, 0);
ee890fe2 19985#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19986 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19987 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19988#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19989 }
ee890fe2 19990
efdba735 19991 branch_island_list = 0;
ee890fe2
SS
19992}
19993
19994/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19995 already there or not. */
19996
efdba735 19997static int
a2369ed3 19998no_previous_def (tree function_name)
ee890fe2 19999{
efdba735
SH
20000 tree branch_island;
20001 for (branch_island = branch_island_list;
20002 branch_island;
20003 branch_island = TREE_CHAIN (branch_island))
20004 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20005 return 0;
20006 return 1;
20007}
20008
20009/* GET_PREV_LABEL gets the label name from the previous definition of
20010 the function. */
20011
efdba735 20012static tree
a2369ed3 20013get_prev_label (tree function_name)
ee890fe2 20014{
efdba735
SH
20015 tree branch_island;
20016 for (branch_island = branch_island_list;
20017 branch_island;
20018 branch_island = TREE_CHAIN (branch_island))
20019 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20020 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20021 return 0;
20022}
20023
75b1b789
MS
20024#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20025#define DARWIN_LINKER_GENERATES_ISLANDS 0
20026#endif
20027
20028/* KEXTs still need branch islands. */
20029#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20030 || flag_mkernel || flag_apple_kext)
20031
ee890fe2 20032/* INSN is either a function call or a millicode call. It may have an
f676971a 20033 unconditional jump in its delay slot.
ee890fe2
SS
20034
20035 CALL_DEST is the routine we are calling. */
20036
20037char *
c4ad648e
AM
20038output_call (rtx insn, rtx *operands, int dest_operand_number,
20039 int cookie_operand_number)
ee890fe2
SS
20040{
20041 static char buf[256];
75b1b789
MS
20042 if (DARWIN_GENERATE_ISLANDS
20043 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20044 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20045 {
20046 tree labelname;
efdba735 20047 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20048
ee890fe2
SS
20049 if (no_previous_def (funname))
20050 {
ee890fe2
SS
20051 rtx label_rtx = gen_label_rtx ();
20052 char *label_buf, temp_buf[256];
20053 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20054 CODE_LABEL_NUMBER (label_rtx));
20055 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20056 labelname = get_identifier (label_buf);
a38e7aa5 20057 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20058 }
20059 else
20060 labelname = get_prev_label (funname);
20061
efdba735
SH
20062 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20063 instruction will reach 'foo', otherwise link as 'bl L42'".
20064 "L42" should be a 'branch island', that will do a far jump to
20065 'foo'. Branch islands are generated in
20066 macho_branch_islands(). */
ee890fe2 20067 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20068 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20069 }
20070 else
efdba735
SH
20071 sprintf (buf, "bl %%z%d", dest_operand_number);
20072 return buf;
ee890fe2
SS
20073}
20074
ee890fe2
SS
20075/* Generate PIC and indirect symbol stubs. */
20076
20077void
a2369ed3 20078machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20079{
20080 unsigned int length;
a4f6c312
SS
20081 char *symbol_name, *lazy_ptr_name;
20082 char *local_label_0;
ee890fe2
SS
20083 static int label = 0;
20084
df56a27f 20085 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20086 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20087
ee890fe2 20088
ee890fe2
SS
20089 length = strlen (symb);
20090 symbol_name = alloca (length + 32);
20091 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20092
20093 lazy_ptr_name = alloca (length + 32);
20094 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20095
ee890fe2 20096 if (flag_pic == 2)
56c779bc 20097 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20098 else
56c779bc 20099 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20100
20101 if (flag_pic == 2)
20102 {
d974312d
DJ
20103 fprintf (file, "\t.align 5\n");
20104
20105 fprintf (file, "%s:\n", stub);
20106 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20107
876455fa 20108 label++;
89da1f32 20109 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 20110 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20111
ee890fe2
SS
20112 fprintf (file, "\tmflr r0\n");
20113 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20114 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20115 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20116 lazy_ptr_name, local_label_0);
20117 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20118 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20119 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20120 lazy_ptr_name, local_label_0);
20121 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20122 fprintf (file, "\tbctr\n");
20123 }
20124 else
d974312d
DJ
20125 {
20126 fprintf (file, "\t.align 4\n");
20127
20128 fprintf (file, "%s:\n", stub);
20129 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20130
20131 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20132 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20133 (TARGET_64BIT ? "ldu" : "lwzu"),
20134 lazy_ptr_name);
d974312d
DJ
20135 fprintf (file, "\tmtctr r12\n");
20136 fprintf (file, "\tbctr\n");
20137 }
f676971a 20138
56c779bc 20139 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20140 fprintf (file, "%s:\n", lazy_ptr_name);
20141 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20142 fprintf (file, "%sdyld_stub_binding_helper\n",
20143 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20144}
20145
20146/* Legitimize PIC addresses. If the address is already
20147 position-independent, we return ORIG. Newly generated
20148 position-independent addresses go into a reg. This is REG if non
20149 zero, otherwise we allocate register(s) as necessary. */
20150
4fbbe694 20151#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20152
20153rtx
f676971a 20154rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20155 rtx reg)
ee890fe2
SS
20156{
20157 rtx base, offset;
20158
20159 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20160 reg = gen_reg_rtx (Pmode);
20161
20162 if (GET_CODE (orig) == CONST)
20163 {
37409796
NS
20164 rtx reg_temp;
20165
ee890fe2
SS
20166 if (GET_CODE (XEXP (orig, 0)) == PLUS
20167 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20168 return orig;
20169
37409796 20170 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20171
37409796
NS
20172 /* Use a different reg for the intermediate value, as
20173 it will be marked UNCHANGING. */
b3a13419 20174 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20175 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20176 Pmode, reg_temp);
20177 offset =
20178 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20179 Pmode, reg);
bb8df8a6 20180
ee890fe2
SS
20181 if (GET_CODE (offset) == CONST_INT)
20182 {
20183 if (SMALL_INT (offset))
ed8908e7 20184 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20185 else if (! reload_in_progress && ! reload_completed)
20186 offset = force_reg (Pmode, offset);
20187 else
c859cda6
DJ
20188 {
20189 rtx mem = force_const_mem (Pmode, orig);
20190 return machopic_legitimize_pic_address (mem, Pmode, reg);
20191 }
ee890fe2 20192 }
f1c25d3b 20193 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20194 }
20195
20196 /* Fall back on generic machopic code. */
20197 return machopic_legitimize_pic_address (orig, mode, reg);
20198}
20199
c4e18b1c
GK
20200/* Output a .machine directive for the Darwin assembler, and call
20201 the generic start_file routine. */
20202
20203static void
20204rs6000_darwin_file_start (void)
20205{
94ff898d 20206 static const struct
c4e18b1c
GK
20207 {
20208 const char *arg;
20209 const char *name;
20210 int if_set;
20211 } mapping[] = {
55dbfb48 20212 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20213 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20214 { "power4", "ppc970", 0 },
20215 { "G5", "ppc970", 0 },
20216 { "7450", "ppc7450", 0 },
20217 { "7400", "ppc7400", MASK_ALTIVEC },
20218 { "G4", "ppc7400", 0 },
20219 { "750", "ppc750", 0 },
20220 { "740", "ppc750", 0 },
20221 { "G3", "ppc750", 0 },
20222 { "604e", "ppc604e", 0 },
20223 { "604", "ppc604", 0 },
20224 { "603e", "ppc603", 0 },
20225 { "603", "ppc603", 0 },
20226 { "601", "ppc601", 0 },
20227 { NULL, "ppc", 0 } };
20228 const char *cpu_id = "";
20229 size_t i;
94ff898d 20230
9390387d 20231 rs6000_file_start ();
192d0f89 20232 darwin_file_start ();
c4e18b1c
GK
20233
20234 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20235 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20236 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20237 && rs6000_select[i].string[0] != '\0')
20238 cpu_id = rs6000_select[i].string;
20239
20240 /* Look through the mapping array. Pick the first name that either
20241 matches the argument, has a bit set in IF_SET that is also set
20242 in the target flags, or has a NULL name. */
20243
20244 i = 0;
20245 while (mapping[i].arg != NULL
20246 && strcmp (mapping[i].arg, cpu_id) != 0
20247 && (mapping[i].if_set & target_flags) == 0)
20248 i++;
20249
20250 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20251}
20252
ee890fe2 20253#endif /* TARGET_MACHO */
7c262518
RH
20254
20255#if TARGET_ELF
9b580a0b
RH
20256static int
20257rs6000_elf_reloc_rw_mask (void)
7c262518 20258{
9b580a0b
RH
20259 if (flag_pic)
20260 return 3;
20261 else if (DEFAULT_ABI == ABI_AIX)
20262 return 2;
20263 else
20264 return 0;
7c262518 20265}
d9f6800d
RH
20266
20267/* Record an element in the table of global constructors. SYMBOL is
20268 a SYMBOL_REF of the function to be called; PRIORITY is a number
20269 between 0 and MAX_INIT_PRIORITY.
20270
20271 This differs from default_named_section_asm_out_constructor in
20272 that we have special handling for -mrelocatable. */
20273
20274static void
a2369ed3 20275rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20276{
20277 const char *section = ".ctors";
20278 char buf[16];
20279
20280 if (priority != DEFAULT_INIT_PRIORITY)
20281 {
20282 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20283 /* Invert the numbering so the linker puts us in the proper
20284 order; constructors are run from right to left, and the
20285 linker sorts in increasing order. */
20286 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20287 section = buf;
20288 }
20289
d6b5193b 20290 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20291 assemble_align (POINTER_SIZE);
d9f6800d
RH
20292
20293 if (TARGET_RELOCATABLE)
20294 {
20295 fputs ("\t.long (", asm_out_file);
20296 output_addr_const (asm_out_file, symbol);
20297 fputs (")@fixup\n", asm_out_file);
20298 }
20299 else
c8af3574 20300 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20301}
20302
20303static void
a2369ed3 20304rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20305{
20306 const char *section = ".dtors";
20307 char buf[16];
20308
20309 if (priority != DEFAULT_INIT_PRIORITY)
20310 {
20311 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20312 /* Invert the numbering so the linker puts us in the proper
20313 order; constructors are run from right to left, and the
20314 linker sorts in increasing order. */
20315 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20316 section = buf;
20317 }
20318
d6b5193b 20319 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20320 assemble_align (POINTER_SIZE);
d9f6800d
RH
20321
20322 if (TARGET_RELOCATABLE)
20323 {
20324 fputs ("\t.long (", asm_out_file);
20325 output_addr_const (asm_out_file, symbol);
20326 fputs (")@fixup\n", asm_out_file);
20327 }
20328 else
c8af3574 20329 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20330}
9739c90c
JJ
20331
20332void
a2369ed3 20333rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20334{
20335 if (TARGET_64BIT)
20336 {
20337 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20338 ASM_OUTPUT_LABEL (file, name);
20339 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20340 rs6000_output_function_entry (file, name);
20341 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20342 if (DOT_SYMBOLS)
9739c90c 20343 {
85b776df 20344 fputs ("\t.size\t", file);
9739c90c 20345 assemble_name (file, name);
85b776df
AM
20346 fputs (",24\n\t.type\t.", file);
20347 assemble_name (file, name);
20348 fputs (",@function\n", file);
20349 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20350 {
20351 fputs ("\t.globl\t.", file);
20352 assemble_name (file, name);
20353 putc ('\n', file);
20354 }
9739c90c 20355 }
85b776df
AM
20356 else
20357 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20358 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20359 rs6000_output_function_entry (file, name);
20360 fputs (":\n", file);
9739c90c
JJ
20361 return;
20362 }
20363
20364 if (TARGET_RELOCATABLE
7f970b70 20365 && !TARGET_SECURE_PLT
9739c90c 20366 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20367 && uses_TOC ())
9739c90c
JJ
20368 {
20369 char buf[256];
20370
20371 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20372
20373 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20374 fprintf (file, "\t.long ");
20375 assemble_name (file, buf);
20376 putc ('-', file);
20377 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20378 assemble_name (file, buf);
20379 putc ('\n', file);
20380 }
20381
20382 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20383 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20384
20385 if (DEFAULT_ABI == ABI_AIX)
20386 {
20387 const char *desc_name, *orig_name;
20388
20389 orig_name = (*targetm.strip_name_encoding) (name);
20390 desc_name = orig_name;
20391 while (*desc_name == '.')
20392 desc_name++;
20393
20394 if (TREE_PUBLIC (decl))
20395 fprintf (file, "\t.globl %s\n", desc_name);
20396
20397 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20398 fprintf (file, "%s:\n", desc_name);
20399 fprintf (file, "\t.long %s\n", orig_name);
20400 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20401 if (DEFAULT_ABI == ABI_AIX)
20402 fputs ("\t.long 0\n", file);
20403 fprintf (file, "\t.previous\n");
20404 }
20405 ASM_OUTPUT_LABEL (file, name);
20406}
1334b570
AM
20407
20408static void
20409rs6000_elf_end_indicate_exec_stack (void)
20410{
20411 if (TARGET_32BIT)
20412 file_end_indicate_exec_stack ();
20413}
7c262518
RH
20414#endif
20415
cbaaba19 20416#if TARGET_XCOFF
0d5817b2
DE
20417static void
20418rs6000_xcoff_asm_output_anchor (rtx symbol)
20419{
20420 char buffer[100];
20421
20422 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20423 SYMBOL_REF_BLOCK_OFFSET (symbol));
20424 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20425}
20426
7c262518 20427static void
a2369ed3 20428rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20429{
20430 fputs (GLOBAL_ASM_OP, stream);
20431 RS6000_OUTPUT_BASENAME (stream, name);
20432 putc ('\n', stream);
20433}
20434
d6b5193b
RS
20435/* A get_unnamed_decl callback, used for read-only sections. PTR
20436 points to the section string variable. */
20437
20438static void
20439rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20440{
890f9edf
OH
20441 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20442 *(const char *const *) directive,
20443 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20444}
20445
20446/* Likewise for read-write sections. */
20447
20448static void
20449rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20450{
890f9edf
OH
20451 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20452 *(const char *const *) directive,
20453 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20454}
20455
20456/* A get_unnamed_section callback, used for switching to toc_section. */
20457
20458static void
20459rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20460{
20461 if (TARGET_MINIMAL_TOC)
20462 {
20463 /* toc_section is always selected at least once from
20464 rs6000_xcoff_file_start, so this is guaranteed to
20465 always be defined once and only once in each file. */
20466 if (!toc_initialized)
20467 {
20468 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20469 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20470 toc_initialized = 1;
20471 }
20472 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20473 (TARGET_32BIT ? "" : ",3"));
20474 }
20475 else
20476 fputs ("\t.toc\n", asm_out_file);
20477}
20478
20479/* Implement TARGET_ASM_INIT_SECTIONS. */
20480
20481static void
20482rs6000_xcoff_asm_init_sections (void)
20483{
20484 read_only_data_section
20485 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20486 &xcoff_read_only_section_name);
20487
20488 private_data_section
20489 = get_unnamed_section (SECTION_WRITE,
20490 rs6000_xcoff_output_readwrite_section_asm_op,
20491 &xcoff_private_data_section_name);
20492
20493 read_only_private_data_section
20494 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20495 &xcoff_private_data_section_name);
20496
20497 toc_section
20498 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20499
20500 readonly_data_section = read_only_data_section;
20501 exception_section = data_section;
20502}
20503
9b580a0b
RH
20504static int
20505rs6000_xcoff_reloc_rw_mask (void)
20506{
20507 return 3;
20508}
20509
b275d088 20510static void
c18a5b6c
MM
20511rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20512 tree decl ATTRIBUTE_UNUSED)
7c262518 20513{
0e5dbd9b
DE
20514 int smclass;
20515 static const char * const suffix[3] = { "PR", "RO", "RW" };
20516
20517 if (flags & SECTION_CODE)
20518 smclass = 0;
20519 else if (flags & SECTION_WRITE)
20520 smclass = 2;
20521 else
20522 smclass = 1;
20523
5b5198f7 20524 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20525 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20526 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20527}
ae46c4e0 20528
d6b5193b 20529static section *
f676971a 20530rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20531 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20532{
9b580a0b 20533 if (decl_readonly_section (decl, reloc))
ae46c4e0 20534 {
0e5dbd9b 20535 if (TREE_PUBLIC (decl))
d6b5193b 20536 return read_only_data_section;
ae46c4e0 20537 else
d6b5193b 20538 return read_only_private_data_section;
ae46c4e0
RH
20539 }
20540 else
20541 {
0e5dbd9b 20542 if (TREE_PUBLIC (decl))
d6b5193b 20543 return data_section;
ae46c4e0 20544 else
d6b5193b 20545 return private_data_section;
ae46c4e0
RH
20546 }
20547}
20548
20549static void
a2369ed3 20550rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20551{
20552 const char *name;
ae46c4e0 20553
5b5198f7
DE
20554 /* Use select_section for private and uninitialized data. */
20555 if (!TREE_PUBLIC (decl)
20556 || DECL_COMMON (decl)
0e5dbd9b
DE
20557 || DECL_INITIAL (decl) == NULL_TREE
20558 || DECL_INITIAL (decl) == error_mark_node
20559 || (flag_zero_initialized_in_bss
20560 && initializer_zerop (DECL_INITIAL (decl))))
20561 return;
20562
20563 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20564 name = (*targetm.strip_name_encoding) (name);
20565 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20566}
b64a1b53 20567
fb49053f
RH
20568/* Select section for constant in constant pool.
20569
20570 On RS/6000, all constants are in the private read-only data area.
20571 However, if this is being placed in the TOC it must be output as a
20572 toc entry. */
20573
d6b5193b 20574static section *
f676971a 20575rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20576 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20577{
20578 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20579 return toc_section;
b64a1b53 20580 else
d6b5193b 20581 return read_only_private_data_section;
b64a1b53 20582}
772c5265
RH
20583
20584/* Remove any trailing [DS] or the like from the symbol name. */
20585
20586static const char *
a2369ed3 20587rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20588{
20589 size_t len;
20590 if (*name == '*')
20591 name++;
20592 len = strlen (name);
20593 if (name[len - 1] == ']')
20594 return ggc_alloc_string (name, len - 4);
20595 else
20596 return name;
20597}
20598
5add3202
DE
20599/* Section attributes. AIX is always PIC. */
20600
20601static unsigned int
a2369ed3 20602rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20603{
5b5198f7 20604 unsigned int align;
9b580a0b 20605 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20606
20607 /* Align to at least UNIT size. */
20608 if (flags & SECTION_CODE)
20609 align = MIN_UNITS_PER_WORD;
20610 else
20611 /* Increase alignment of large objects if not already stricter. */
20612 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20613 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20614 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20615
20616 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20617}
a5fe455b 20618
1bc7c5b6
ZW
20619/* Output at beginning of assembler file.
20620
20621 Initialize the section names for the RS/6000 at this point.
20622
20623 Specify filename, including full path, to assembler.
20624
20625 We want to go into the TOC section so at least one .toc will be emitted.
20626 Also, in order to output proper .bs/.es pairs, we need at least one static
20627 [RW] section emitted.
20628
20629 Finally, declare mcount when profiling to make the assembler happy. */
20630
20631static void
863d938c 20632rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20633{
20634 rs6000_gen_section_name (&xcoff_bss_section_name,
20635 main_input_filename, ".bss_");
20636 rs6000_gen_section_name (&xcoff_private_data_section_name,
20637 main_input_filename, ".rw_");
20638 rs6000_gen_section_name (&xcoff_read_only_section_name,
20639 main_input_filename, ".ro_");
20640
20641 fputs ("\t.file\t", asm_out_file);
20642 output_quoted_string (asm_out_file, main_input_filename);
20643 fputc ('\n', asm_out_file);
1bc7c5b6 20644 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20645 switch_to_section (private_data_section);
20646 switch_to_section (text_section);
1bc7c5b6
ZW
20647 if (profile_flag)
20648 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20649 rs6000_file_start ();
20650}
20651
a5fe455b
ZW
20652/* Output at end of assembler file.
20653 On the RS/6000, referencing data should automatically pull in text. */
20654
20655static void
863d938c 20656rs6000_xcoff_file_end (void)
a5fe455b 20657{
d6b5193b 20658 switch_to_section (text_section);
a5fe455b 20659 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20660 switch_to_section (data_section);
a5fe455b
ZW
20661 fputs (TARGET_32BIT
20662 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20663 asm_out_file);
20664}
f1384257 20665#endif /* TARGET_XCOFF */
0e5dbd9b 20666
3c50106f
RH
20667/* Compute a (partial) cost for rtx X. Return true if the complete
20668 cost has been computed, and false if subexpressions should be
20669 scanned. In either case, *TOTAL contains the cost result. */
20670
20671static bool
1494c534 20672rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20673{
f0517163
RS
20674 enum machine_mode mode = GET_MODE (x);
20675
3c50106f
RH
20676 switch (code)
20677 {
30a555d9 20678 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20679 case CONST_INT:
066cd967
DE
20680 if (((outer_code == SET
20681 || outer_code == PLUS
20682 || outer_code == MINUS)
279bb624
DE
20683 && (satisfies_constraint_I (x)
20684 || satisfies_constraint_L (x)))
066cd967 20685 || (outer_code == AND
279bb624
DE
20686 && (satisfies_constraint_K (x)
20687 || (mode == SImode
20688 ? satisfies_constraint_L (x)
20689 : satisfies_constraint_J (x))
1990cd79
AM
20690 || mask_operand (x, mode)
20691 || (mode == DImode
20692 && mask64_operand (x, DImode))))
22e54023 20693 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20694 && (satisfies_constraint_K (x)
20695 || (mode == SImode
20696 ? satisfies_constraint_L (x)
20697 : satisfies_constraint_J (x))))
066cd967
DE
20698 || outer_code == ASHIFT
20699 || outer_code == ASHIFTRT
20700 || outer_code == LSHIFTRT
20701 || outer_code == ROTATE
20702 || outer_code == ROTATERT
d5861a7a 20703 || outer_code == ZERO_EXTRACT
066cd967 20704 || (outer_code == MULT
279bb624 20705 && satisfies_constraint_I (x))
22e54023
DE
20706 || ((outer_code == DIV || outer_code == UDIV
20707 || outer_code == MOD || outer_code == UMOD)
20708 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20709 || (outer_code == COMPARE
279bb624
DE
20710 && (satisfies_constraint_I (x)
20711 || satisfies_constraint_K (x)))
22e54023 20712 || (outer_code == EQ
279bb624
DE
20713 && (satisfies_constraint_I (x)
20714 || satisfies_constraint_K (x)
20715 || (mode == SImode
20716 ? satisfies_constraint_L (x)
20717 : satisfies_constraint_J (x))))
22e54023 20718 || (outer_code == GTU
279bb624 20719 && satisfies_constraint_I (x))
22e54023 20720 || (outer_code == LTU
279bb624 20721 && satisfies_constraint_P (x)))
066cd967
DE
20722 {
20723 *total = 0;
20724 return true;
20725 }
20726 else if ((outer_code == PLUS
4ae234b0 20727 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20728 || (outer_code == MINUS
4ae234b0 20729 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20730 || ((outer_code == SET
20731 || outer_code == IOR
20732 || outer_code == XOR)
20733 && (INTVAL (x)
20734 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20735 {
20736 *total = COSTS_N_INSNS (1);
20737 return true;
20738 }
20739 /* FALLTHRU */
20740
20741 case CONST_DOUBLE:
f6fe3a22 20742 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20743 {
f6fe3a22
DE
20744 if ((outer_code == IOR || outer_code == XOR)
20745 && CONST_DOUBLE_HIGH (x) == 0
20746 && (CONST_DOUBLE_LOW (x)
20747 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20748 {
20749 *total = 0;
20750 return true;
20751 }
20752 else if ((outer_code == AND && and64_2_operand (x, DImode))
20753 || ((outer_code == SET
20754 || outer_code == IOR
20755 || outer_code == XOR)
20756 && CONST_DOUBLE_HIGH (x) == 0))
20757 {
20758 *total = COSTS_N_INSNS (1);
20759 return true;
20760 }
066cd967
DE
20761 }
20762 /* FALLTHRU */
20763
3c50106f 20764 case CONST:
066cd967 20765 case HIGH:
3c50106f 20766 case SYMBOL_REF:
066cd967
DE
20767 case MEM:
20768 /* When optimizing for size, MEM should be slightly more expensive
20769 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20770 L1 cache latency is about two instructions. */
066cd967 20771 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20772 return true;
20773
30a555d9
DE
20774 case LABEL_REF:
20775 *total = 0;
20776 return true;
20777
3c50106f 20778 case PLUS:
f0517163 20779 if (mode == DFmode)
066cd967
DE
20780 {
20781 if (GET_CODE (XEXP (x, 0)) == MULT)
20782 {
20783 /* FNMA accounted in outer NEG. */
20784 if (outer_code == NEG)
20785 *total = rs6000_cost->dmul - rs6000_cost->fp;
20786 else
20787 *total = rs6000_cost->dmul;
20788 }
20789 else
20790 *total = rs6000_cost->fp;
20791 }
f0517163 20792 else if (mode == SFmode)
066cd967
DE
20793 {
20794 /* FNMA accounted in outer NEG. */
20795 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20796 *total = 0;
20797 else
20798 *total = rs6000_cost->fp;
20799 }
f0517163 20800 else
066cd967
DE
20801 *total = COSTS_N_INSNS (1);
20802 return false;
3c50106f 20803
52190329 20804 case MINUS:
f0517163 20805 if (mode == DFmode)
066cd967 20806 {
762c919f
JM
20807 if (GET_CODE (XEXP (x, 0)) == MULT
20808 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20809 {
20810 /* FNMA accounted in outer NEG. */
20811 if (outer_code == NEG)
762c919f 20812 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20813 else
20814 *total = rs6000_cost->dmul;
20815 }
20816 else
20817 *total = rs6000_cost->fp;
20818 }
f0517163 20819 else if (mode == SFmode)
066cd967
DE
20820 {
20821 /* FNMA accounted in outer NEG. */
20822 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20823 *total = 0;
20824 else
20825 *total = rs6000_cost->fp;
20826 }
f0517163 20827 else
c4ad648e 20828 *total = COSTS_N_INSNS (1);
066cd967 20829 return false;
3c50106f
RH
20830
20831 case MULT:
c9dbf840 20832 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 20833 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 20834 {
8b897cfa
RS
20835 if (INTVAL (XEXP (x, 1)) >= -256
20836 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20837 *total = rs6000_cost->mulsi_const9;
8b897cfa 20838 else
06a67bdd 20839 *total = rs6000_cost->mulsi_const;
3c50106f 20840 }
066cd967
DE
20841 /* FMA accounted in outer PLUS/MINUS. */
20842 else if ((mode == DFmode || mode == SFmode)
20843 && (outer_code == PLUS || outer_code == MINUS))
20844 *total = 0;
f0517163 20845 else if (mode == DFmode)
06a67bdd 20846 *total = rs6000_cost->dmul;
f0517163 20847 else if (mode == SFmode)
06a67bdd 20848 *total = rs6000_cost->fp;
f0517163 20849 else if (mode == DImode)
06a67bdd 20850 *total = rs6000_cost->muldi;
8b897cfa 20851 else
06a67bdd 20852 *total = rs6000_cost->mulsi;
066cd967 20853 return false;
3c50106f
RH
20854
20855 case DIV:
20856 case MOD:
f0517163
RS
20857 if (FLOAT_MODE_P (mode))
20858 {
06a67bdd
RS
20859 *total = mode == DFmode ? rs6000_cost->ddiv
20860 : rs6000_cost->sdiv;
066cd967 20861 return false;
f0517163 20862 }
5efb1046 20863 /* FALLTHRU */
3c50106f
RH
20864
20865 case UDIV:
20866 case UMOD:
627b6fe2
DJ
20867 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20868 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20869 {
20870 if (code == DIV || code == MOD)
20871 /* Shift, addze */
20872 *total = COSTS_N_INSNS (2);
20873 else
20874 /* Shift */
20875 *total = COSTS_N_INSNS (1);
20876 }
c4ad648e 20877 else
627b6fe2
DJ
20878 {
20879 if (GET_MODE (XEXP (x, 1)) == DImode)
20880 *total = rs6000_cost->divdi;
20881 else
20882 *total = rs6000_cost->divsi;
20883 }
20884 /* Add in shift and subtract for MOD. */
20885 if (code == MOD || code == UMOD)
20886 *total += COSTS_N_INSNS (2);
066cd967 20887 return false;
3c50106f 20888
32f56aad 20889 case CTZ:
3c50106f
RH
20890 case FFS:
20891 *total = COSTS_N_INSNS (4);
066cd967 20892 return false;
3c50106f 20893
32f56aad
DE
20894 case POPCOUNT:
20895 *total = COSTS_N_INSNS (6);
20896 return false;
20897
06a67bdd 20898 case NOT:
066cd967
DE
20899 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20900 {
20901 *total = 0;
20902 return false;
20903 }
20904 /* FALLTHRU */
20905
20906 case AND:
32f56aad 20907 case CLZ:
066cd967
DE
20908 case IOR:
20909 case XOR:
d5861a7a
DE
20910 case ZERO_EXTRACT:
20911 *total = COSTS_N_INSNS (1);
20912 return false;
20913
066cd967
DE
20914 case ASHIFT:
20915 case ASHIFTRT:
20916 case LSHIFTRT:
20917 case ROTATE:
20918 case ROTATERT:
d5861a7a 20919 /* Handle mul_highpart. */
066cd967
DE
20920 if (outer_code == TRUNCATE
20921 && GET_CODE (XEXP (x, 0)) == MULT)
20922 {
20923 if (mode == DImode)
20924 *total = rs6000_cost->muldi;
20925 else
20926 *total = rs6000_cost->mulsi;
20927 return true;
20928 }
d5861a7a
DE
20929 else if (outer_code == AND)
20930 *total = 0;
20931 else
20932 *total = COSTS_N_INSNS (1);
20933 return false;
20934
20935 case SIGN_EXTEND:
20936 case ZERO_EXTEND:
20937 if (GET_CODE (XEXP (x, 0)) == MEM)
20938 *total = 0;
20939 else
20940 *total = COSTS_N_INSNS (1);
066cd967 20941 return false;
06a67bdd 20942
066cd967
DE
20943 case COMPARE:
20944 case NEG:
20945 case ABS:
20946 if (!FLOAT_MODE_P (mode))
20947 {
20948 *total = COSTS_N_INSNS (1);
20949 return false;
20950 }
20951 /* FALLTHRU */
20952
20953 case FLOAT:
20954 case UNSIGNED_FLOAT:
20955 case FIX:
20956 case UNSIGNED_FIX:
06a67bdd
RS
20957 case FLOAT_TRUNCATE:
20958 *total = rs6000_cost->fp;
066cd967 20959 return false;
06a67bdd 20960
a2af5043
DJ
20961 case FLOAT_EXTEND:
20962 if (mode == DFmode)
20963 *total = 0;
20964 else
20965 *total = rs6000_cost->fp;
20966 return false;
20967
06a67bdd
RS
20968 case UNSPEC:
20969 switch (XINT (x, 1))
20970 {
20971 case UNSPEC_FRSP:
20972 *total = rs6000_cost->fp;
20973 return true;
20974
20975 default:
20976 break;
20977 }
20978 break;
20979
20980 case CALL:
20981 case IF_THEN_ELSE:
20982 if (optimize_size)
20983 {
20984 *total = COSTS_N_INSNS (1);
20985 return true;
20986 }
066cd967
DE
20987 else if (FLOAT_MODE_P (mode)
20988 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20989 {
20990 *total = rs6000_cost->fp;
20991 return false;
20992 }
06a67bdd
RS
20993 break;
20994
c0600ecd
DE
20995 case EQ:
20996 case GTU:
20997 case LTU:
22e54023
DE
20998 /* Carry bit requires mode == Pmode.
20999 NEG or PLUS already counted so only add one. */
21000 if (mode == Pmode
21001 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21002 {
22e54023
DE
21003 *total = COSTS_N_INSNS (1);
21004 return true;
21005 }
21006 if (outer_code == SET)
21007 {
21008 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21009 {
22e54023 21010 *total = COSTS_N_INSNS (2);
c0600ecd 21011 return true;
c0600ecd 21012 }
22e54023
DE
21013 else if (mode == Pmode)
21014 {
21015 *total = COSTS_N_INSNS (3);
21016 return false;
21017 }
21018 }
21019 /* FALLTHRU */
21020
21021 case GT:
21022 case LT:
21023 case UNORDERED:
21024 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21025 {
21026 *total = COSTS_N_INSNS (2);
21027 return true;
c0600ecd 21028 }
22e54023
DE
21029 /* CC COMPARE. */
21030 if (outer_code == COMPARE)
21031 {
21032 *total = 0;
21033 return true;
21034 }
21035 break;
c0600ecd 21036
3c50106f 21037 default:
06a67bdd 21038 break;
3c50106f 21039 }
06a67bdd
RS
21040
21041 return false;
3c50106f
RH
21042}
21043
34bb030a
DE
21044/* A C expression returning the cost of moving data from a register of class
21045 CLASS1 to one of CLASS2. */
21046
21047int
f676971a 21048rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21049 enum reg_class from, enum reg_class to)
34bb030a
DE
21050{
21051 /* Moves from/to GENERAL_REGS. */
21052 if (reg_classes_intersect_p (to, GENERAL_REGS)
21053 || reg_classes_intersect_p (from, GENERAL_REGS))
21054 {
21055 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21056 from = to;
21057
21058 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21059 return (rs6000_memory_move_cost (mode, from, 0)
21060 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21061
c4ad648e
AM
21062 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21063 shift. */
34bb030a
DE
21064 else if (from == CR_REGS)
21065 return 4;
21066
21067 else
c4ad648e 21068 /* A move will cost one instruction per GPR moved. */
c8b622ff 21069 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21070 }
21071
c4ad648e 21072 /* Moving between two similar registers is just one instruction. */
34bb030a 21073 else if (reg_classes_intersect_p (to, from))
7393f7f8 21074 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21075
c4ad648e 21076 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21077 else
f676971a 21078 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21079 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21080}
21081
21082/* A C expressions returning the cost of moving data of MODE from a register to
21083 or from memory. */
21084
21085int
f676971a 21086rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21087 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21088{
21089 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21090 return 4 * hard_regno_nregs[0][mode];
34bb030a 21091 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21092 return 4 * hard_regno_nregs[32][mode];
34bb030a 21093 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21094 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21095 else
21096 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21097}
21098
9c78b944
DE
21099/* Returns a code for a target-specific builtin that implements
21100 reciprocal of the function, or NULL_TREE if not available. */
21101
21102static tree
21103rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21104 bool sqrt ATTRIBUTE_UNUSED)
21105{
21106 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21107 && flag_finite_math_only && !flag_trapping_math
21108 && flag_unsafe_math_optimizations))
21109 return NULL_TREE;
21110
21111 if (md_fn)
21112 return NULL_TREE;
21113 else
21114 switch (fn)
21115 {
21116 case BUILT_IN_SQRTF:
21117 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21118
21119 default:
21120 return NULL_TREE;
21121 }
21122}
21123
ef765ea9
DE
21124/* Newton-Raphson approximation of single-precision floating point divide n/d.
21125 Assumes no trapping math and finite arguments. */
21126
21127void
9c78b944 21128rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21129{
21130 rtx x0, e0, e1, y1, u0, v0, one;
21131
21132 x0 = gen_reg_rtx (SFmode);
21133 e0 = gen_reg_rtx (SFmode);
21134 e1 = gen_reg_rtx (SFmode);
21135 y1 = gen_reg_rtx (SFmode);
21136 u0 = gen_reg_rtx (SFmode);
21137 v0 = gen_reg_rtx (SFmode);
21138 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21139
21140 /* x0 = 1./d estimate */
21141 emit_insn (gen_rtx_SET (VOIDmode, x0,
21142 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21143 UNSPEC_FRES)));
21144 /* e0 = 1. - d * x0 */
21145 emit_insn (gen_rtx_SET (VOIDmode, e0,
21146 gen_rtx_MINUS (SFmode, one,
21147 gen_rtx_MULT (SFmode, d, x0))));
21148 /* e1 = e0 + e0 * e0 */
21149 emit_insn (gen_rtx_SET (VOIDmode, e1,
21150 gen_rtx_PLUS (SFmode,
21151 gen_rtx_MULT (SFmode, e0, e0), e0)));
21152 /* y1 = x0 + e1 * x0 */
21153 emit_insn (gen_rtx_SET (VOIDmode, y1,
21154 gen_rtx_PLUS (SFmode,
21155 gen_rtx_MULT (SFmode, e1, x0), x0)));
21156 /* u0 = n * y1 */
21157 emit_insn (gen_rtx_SET (VOIDmode, u0,
21158 gen_rtx_MULT (SFmode, n, y1)));
21159 /* v0 = n - d * u0 */
21160 emit_insn (gen_rtx_SET (VOIDmode, v0,
21161 gen_rtx_MINUS (SFmode, n,
21162 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21163 /* dst = u0 + v0 * y1 */
21164 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21165 gen_rtx_PLUS (SFmode,
21166 gen_rtx_MULT (SFmode, v0, y1), u0)));
21167}
21168
21169/* Newton-Raphson approximation of double-precision floating point divide n/d.
21170 Assumes no trapping math and finite arguments. */
21171
21172void
9c78b944 21173rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21174{
21175 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21176
21177 x0 = gen_reg_rtx (DFmode);
21178 e0 = gen_reg_rtx (DFmode);
21179 e1 = gen_reg_rtx (DFmode);
21180 e2 = gen_reg_rtx (DFmode);
21181 y1 = gen_reg_rtx (DFmode);
21182 y2 = gen_reg_rtx (DFmode);
21183 y3 = gen_reg_rtx (DFmode);
21184 u0 = gen_reg_rtx (DFmode);
21185 v0 = gen_reg_rtx (DFmode);
21186 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21187
21188 /* x0 = 1./d estimate */
21189 emit_insn (gen_rtx_SET (VOIDmode, x0,
21190 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21191 UNSPEC_FRES)));
21192 /* e0 = 1. - d * x0 */
21193 emit_insn (gen_rtx_SET (VOIDmode, e0,
21194 gen_rtx_MINUS (DFmode, one,
21195 gen_rtx_MULT (SFmode, d, x0))));
21196 /* y1 = x0 + e0 * x0 */
21197 emit_insn (gen_rtx_SET (VOIDmode, y1,
21198 gen_rtx_PLUS (DFmode,
21199 gen_rtx_MULT (DFmode, e0, x0), x0)));
21200 /* e1 = e0 * e0 */
21201 emit_insn (gen_rtx_SET (VOIDmode, e1,
21202 gen_rtx_MULT (DFmode, e0, e0)));
21203 /* y2 = y1 + e1 * y1 */
21204 emit_insn (gen_rtx_SET (VOIDmode, y2,
21205 gen_rtx_PLUS (DFmode,
21206 gen_rtx_MULT (DFmode, e1, y1), y1)));
21207 /* e2 = e1 * e1 */
21208 emit_insn (gen_rtx_SET (VOIDmode, e2,
21209 gen_rtx_MULT (DFmode, e1, e1)));
21210 /* y3 = y2 + e2 * y2 */
21211 emit_insn (gen_rtx_SET (VOIDmode, y3,
21212 gen_rtx_PLUS (DFmode,
21213 gen_rtx_MULT (DFmode, e2, y2), y2)));
21214 /* u0 = n * y3 */
21215 emit_insn (gen_rtx_SET (VOIDmode, u0,
21216 gen_rtx_MULT (DFmode, n, y3)));
21217 /* v0 = n - d * u0 */
21218 emit_insn (gen_rtx_SET (VOIDmode, v0,
21219 gen_rtx_MINUS (DFmode, n,
21220 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21221 /* dst = u0 + v0 * y3 */
21222 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21223 gen_rtx_PLUS (DFmode,
21224 gen_rtx_MULT (DFmode, v0, y3), u0)));
21225}
21226
565ef4ba 21227
9c78b944
DE
21228/* Newton-Raphson approximation of single-precision floating point rsqrt.
21229 Assumes no trapping math and finite arguments. */
21230
21231void
21232rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21233{
21234 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21235 half, one, halfthree, c1, cond, label;
21236
21237 x0 = gen_reg_rtx (SFmode);
21238 x1 = gen_reg_rtx (SFmode);
21239 x2 = gen_reg_rtx (SFmode);
21240 y1 = gen_reg_rtx (SFmode);
21241 u0 = gen_reg_rtx (SFmode);
21242 u1 = gen_reg_rtx (SFmode);
21243 u2 = gen_reg_rtx (SFmode);
21244 v0 = gen_reg_rtx (SFmode);
21245 v1 = gen_reg_rtx (SFmode);
21246 v2 = gen_reg_rtx (SFmode);
21247 t0 = gen_reg_rtx (SFmode);
21248 halfthree = gen_reg_rtx (SFmode);
21249 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21250 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21251
21252 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21253 emit_insn (gen_rtx_SET (VOIDmode, t0,
21254 gen_rtx_MULT (SFmode, src, src)));
21255
21256 emit_insn (gen_rtx_SET (VOIDmode, cond,
21257 gen_rtx_COMPARE (CCFPmode, t0, src)));
21258 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21259 emit_unlikely_jump (c1, label);
21260
21261 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21262 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21263
21264 /* halfthree = 1.5 = 1.0 + 0.5 */
21265 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21266 gen_rtx_PLUS (SFmode, one, half)));
21267
21268 /* x0 = rsqrt estimate */
21269 emit_insn (gen_rtx_SET (VOIDmode, x0,
21270 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21271 UNSPEC_RSQRT)));
21272
21273 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21274 emit_insn (gen_rtx_SET (VOIDmode, y1,
21275 gen_rtx_MINUS (SFmode,
21276 gen_rtx_MULT (SFmode, src, halfthree),
21277 src)));
21278
21279 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21280 emit_insn (gen_rtx_SET (VOIDmode, u0,
21281 gen_rtx_MULT (SFmode, x0, x0)));
21282 emit_insn (gen_rtx_SET (VOIDmode, v0,
21283 gen_rtx_MINUS (SFmode,
21284 halfthree,
21285 gen_rtx_MULT (SFmode, y1, u0))));
21286 emit_insn (gen_rtx_SET (VOIDmode, x1,
21287 gen_rtx_MULT (SFmode, x0, v0)));
21288
21289 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21290 emit_insn (gen_rtx_SET (VOIDmode, u1,
21291 gen_rtx_MULT (SFmode, x1, x1)));
21292 emit_insn (gen_rtx_SET (VOIDmode, v1,
21293 gen_rtx_MINUS (SFmode,
21294 halfthree,
21295 gen_rtx_MULT (SFmode, y1, u1))));
21296 emit_insn (gen_rtx_SET (VOIDmode, x2,
21297 gen_rtx_MULT (SFmode, x1, v1)));
21298
21299 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21300 emit_insn (gen_rtx_SET (VOIDmode, u2,
21301 gen_rtx_MULT (SFmode, x2, x2)));
21302 emit_insn (gen_rtx_SET (VOIDmode, v2,
21303 gen_rtx_MINUS (SFmode,
21304 halfthree,
21305 gen_rtx_MULT (SFmode, y1, u2))));
21306 emit_insn (gen_rtx_SET (VOIDmode, dst,
21307 gen_rtx_MULT (SFmode, x2, v2)));
21308
21309 emit_label (XEXP (label, 0));
21310}
21311
565ef4ba
RS
21312/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21313 target, and SRC is the argument operand. */
21314
21315void
21316rs6000_emit_popcount (rtx dst, rtx src)
21317{
21318 enum machine_mode mode = GET_MODE (dst);
21319 rtx tmp1, tmp2;
21320
21321 tmp1 = gen_reg_rtx (mode);
21322
21323 if (mode == SImode)
21324 {
21325 emit_insn (gen_popcntbsi2 (tmp1, src));
21326 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21327 NULL_RTX, 0);
21328 tmp2 = force_reg (SImode, tmp2);
21329 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21330 }
21331 else
21332 {
21333 emit_insn (gen_popcntbdi2 (tmp1, src));
21334 tmp2 = expand_mult (DImode, tmp1,
21335 GEN_INT ((HOST_WIDE_INT)
21336 0x01010101 << 32 | 0x01010101),
21337 NULL_RTX, 0);
21338 tmp2 = force_reg (DImode, tmp2);
21339 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21340 }
21341}
21342
21343
21344/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21345 target, and SRC is the argument operand. */
21346
21347void
21348rs6000_emit_parity (rtx dst, rtx src)
21349{
21350 enum machine_mode mode = GET_MODE (dst);
21351 rtx tmp;
21352
21353 tmp = gen_reg_rtx (mode);
21354 if (mode == SImode)
21355 {
21356 /* Is mult+shift >= shift+xor+shift+xor? */
21357 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21358 {
21359 rtx tmp1, tmp2, tmp3, tmp4;
21360
21361 tmp1 = gen_reg_rtx (SImode);
21362 emit_insn (gen_popcntbsi2 (tmp1, src));
21363
21364 tmp2 = gen_reg_rtx (SImode);
21365 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21366 tmp3 = gen_reg_rtx (SImode);
21367 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21368
21369 tmp4 = gen_reg_rtx (SImode);
21370 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21371 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21372 }
21373 else
21374 rs6000_emit_popcount (tmp, src);
21375 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21376 }
21377 else
21378 {
21379 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21380 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21381 {
21382 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21383
21384 tmp1 = gen_reg_rtx (DImode);
21385 emit_insn (gen_popcntbdi2 (tmp1, src));
21386
21387 tmp2 = gen_reg_rtx (DImode);
21388 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21389 tmp3 = gen_reg_rtx (DImode);
21390 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21391
21392 tmp4 = gen_reg_rtx (DImode);
21393 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21394 tmp5 = gen_reg_rtx (DImode);
21395 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21396
21397 tmp6 = gen_reg_rtx (DImode);
21398 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21399 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21400 }
21401 else
21402 rs6000_emit_popcount (tmp, src);
21403 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21404 }
21405}
21406
ded9bf77
AH
21407/* Return an RTX representing where to find the function value of a
21408 function returning MODE. */
21409static rtx
21410rs6000_complex_function_value (enum machine_mode mode)
21411{
21412 unsigned int regno;
21413 rtx r1, r2;
21414 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21415 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21416
18f63bfa
AH
21417 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21418 regno = FP_ARG_RETURN;
354ed18f
AH
21419 else
21420 {
18f63bfa 21421 regno = GP_ARG_RETURN;
ded9bf77 21422
18f63bfa
AH
21423 /* 32-bit is OK since it'll go in r3/r4. */
21424 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21425 return gen_rtx_REG (mode, regno);
21426 }
21427
18f63bfa
AH
21428 if (inner_bytes >= 8)
21429 return gen_rtx_REG (mode, regno);
21430
ded9bf77
AH
21431 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21432 const0_rtx);
21433 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21434 GEN_INT (inner_bytes));
ded9bf77
AH
21435 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21436}
21437
a6ebc39a
AH
21438/* Define how to find the value returned by a function.
21439 VALTYPE is the data type of the value (as a tree).
21440 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21441 otherwise, FUNC is 0.
21442
21443 On the SPE, both FPs and vectors are returned in r3.
21444
21445 On RS/6000 an integer value is in r3 and a floating-point value is in
21446 fp1, unless -msoft-float. */
21447
21448rtx
586de218 21449rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21450{
21451 enum machine_mode mode;
2a8fa26c 21452 unsigned int regno;
a6ebc39a 21453
594a51fe
SS
21454 /* Special handling for structs in darwin64. */
21455 if (rs6000_darwin64_abi
21456 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21457 && TREE_CODE (valtype) == RECORD_TYPE
21458 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21459 {
21460 CUMULATIVE_ARGS valcum;
21461 rtx valret;
21462
0b5383eb 21463 valcum.words = 0;
594a51fe
SS
21464 valcum.fregno = FP_ARG_MIN_REG;
21465 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21466 /* Do a trial code generation as if this were going to be passed as
21467 an argument; if any part goes in memory, we return NULL. */
21468 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21469 if (valret)
21470 return valret;
21471 /* Otherwise fall through to standard ABI rules. */
21472 }
21473
0e67400a
FJ
21474 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21475 {
21476 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21477 return gen_rtx_PARALLEL (DImode,
21478 gen_rtvec (2,
21479 gen_rtx_EXPR_LIST (VOIDmode,
21480 gen_rtx_REG (SImode, GP_ARG_RETURN),
21481 const0_rtx),
21482 gen_rtx_EXPR_LIST (VOIDmode,
21483 gen_rtx_REG (SImode,
21484 GP_ARG_RETURN + 1),
21485 GEN_INT (4))));
21486 }
0f086e42
FJ
21487 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21488 {
21489 return gen_rtx_PARALLEL (DCmode,
21490 gen_rtvec (4,
21491 gen_rtx_EXPR_LIST (VOIDmode,
21492 gen_rtx_REG (SImode, GP_ARG_RETURN),
21493 const0_rtx),
21494 gen_rtx_EXPR_LIST (VOIDmode,
21495 gen_rtx_REG (SImode,
21496 GP_ARG_RETURN + 1),
21497 GEN_INT (4)),
21498 gen_rtx_EXPR_LIST (VOIDmode,
21499 gen_rtx_REG (SImode,
21500 GP_ARG_RETURN + 2),
21501 GEN_INT (8)),
21502 gen_rtx_EXPR_LIST (VOIDmode,
21503 gen_rtx_REG (SImode,
21504 GP_ARG_RETURN + 3),
21505 GEN_INT (12))));
21506 }
602ea4d3 21507
7348aa7f
FXC
21508 mode = TYPE_MODE (valtype);
21509 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21510 || POINTER_TYPE_P (valtype))
b78d48dd 21511 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21512
00b79d54 21513 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21514 {
21515 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21516 {
21517 switch (mode)
21518 {
21519 default:
21520 gcc_unreachable ();
21521 case SDmode:
21522 regno = GP_ARG_RETURN;
21523 break;
21524 case DDmode:
21525 regno = FP_ARG_RETURN;
21526 break;
21527 case TDmode:
21528 /* Use f2:f3 specified by the ABI. */
21529 regno = FP_ARG_RETURN + 1;
21530 break;
21531 }
21532 }
21533 else
21534 regno = GP_ARG_RETURN;
21535 }
00b79d54 21536 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21537 regno = FP_ARG_RETURN;
ded9bf77 21538 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21539 && targetm.calls.split_complex_arg)
ded9bf77 21540 return rs6000_complex_function_value (mode);
44688022 21541 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21542 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21543 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21544 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21545 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21546 && (mode == DFmode || mode == DDmode || mode == DCmode
21547 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21548 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21549 else
21550 regno = GP_ARG_RETURN;
21551
21552 return gen_rtx_REG (mode, regno);
21553}
21554
ded9bf77
AH
21555/* Define how to find the value returned by a library function
21556 assuming the value has mode MODE. */
21557rtx
21558rs6000_libcall_value (enum machine_mode mode)
21559{
21560 unsigned int regno;
21561
2e6c9641
FJ
21562 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21563 {
21564 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21565 return gen_rtx_PARALLEL (DImode,
21566 gen_rtvec (2,
21567 gen_rtx_EXPR_LIST (VOIDmode,
21568 gen_rtx_REG (SImode, GP_ARG_RETURN),
21569 const0_rtx),
21570 gen_rtx_EXPR_LIST (VOIDmode,
21571 gen_rtx_REG (SImode,
21572 GP_ARG_RETURN + 1),
21573 GEN_INT (4))));
21574 }
21575
00b79d54 21576 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21577 {
21578 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21579 {
21580 switch (mode)
21581 {
21582 default:
21583 gcc_unreachable ();
21584 case SDmode:
21585 regno = GP_ARG_RETURN;
21586 break;
21587 case DDmode:
21588 regno = FP_ARG_RETURN;
21589 break;
21590 case TDmode:
21591 /* Use f2:f3 specified by the ABI. */
21592 regno = FP_ARG_RETURN + 1;
21593 break;
21594 }
21595 }
21596 else
21597 regno = GP_ARG_RETURN;
21598 }
00b79d54 21599 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21600 && TARGET_HARD_FLOAT && TARGET_FPRS)
21601 regno = FP_ARG_RETURN;
44688022
AM
21602 else if (ALTIVEC_VECTOR_MODE (mode)
21603 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21604 regno = ALTIVEC_ARG_RETURN;
42ba5130 21605 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21606 return rs6000_complex_function_value (mode);
18f63bfa 21607 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4d4447b5
PB
21608 && (mode == DFmode || mode == DDmode || mode == DCmode
21609 || mode == TFmode || mode == TDmode || mode == TCmode))
18f63bfa 21610 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21611 else
21612 regno = GP_ARG_RETURN;
21613
21614 return gen_rtx_REG (mode, regno);
21615}
21616
d1d0c603
JJ
21617/* Define the offset between two registers, FROM to be eliminated and its
21618 replacement TO, at the start of a routine. */
21619HOST_WIDE_INT
21620rs6000_initial_elimination_offset (int from, int to)
21621{
21622 rs6000_stack_t *info = rs6000_stack_info ();
21623 HOST_WIDE_INT offset;
21624
7d5175e1 21625 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21626 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21627 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21628 {
21629 offset = info->push_p ? 0 : -info->total_size;
21630 if (FRAME_GROWS_DOWNWARD)
5b667039 21631 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21632 }
21633 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21634 offset = FRAME_GROWS_DOWNWARD
5b667039 21635 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21636 : 0;
21637 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21638 offset = info->total_size;
21639 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21640 offset = info->push_p ? info->total_size : 0;
21641 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21642 offset = 0;
21643 else
37409796 21644 gcc_unreachable ();
d1d0c603
JJ
21645
21646 return offset;
21647}
21648
58646b77 21649/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21650
c8e4f0e9 21651static bool
3101faab 21652rs6000_is_opaque_type (const_tree type)
62e1dfcf 21653{
58646b77 21654 return (type == opaque_V2SI_type_node
2abe3e28 21655 || type == opaque_V2SF_type_node
58646b77
PB
21656 || type == opaque_p_V2SI_type_node
21657 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21658}
21659
96714395 21660static rtx
a2369ed3 21661rs6000_dwarf_register_span (rtx reg)
96714395
AH
21662{
21663 unsigned regno;
21664
4d4cbc0e
AH
21665 if (TARGET_SPE
21666 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
21667 || (TARGET_E500_DOUBLE
21668 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
21669 ;
21670 else
96714395
AH
21671 return NULL_RTX;
21672
21673 regno = REGNO (reg);
21674
21675 /* The duality of the SPE register size wreaks all kinds of havoc.
21676 This is a way of distinguishing r0 in 32-bits from r0 in
21677 64-bits. */
21678 return
21679 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21680 BYTES_BIG_ENDIAN
21681 ? gen_rtvec (2,
21682 gen_rtx_REG (SImode, regno + 1200),
21683 gen_rtx_REG (SImode, regno))
21684 : gen_rtvec (2,
21685 gen_rtx_REG (SImode, regno),
21686 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21687}
21688
37ea0b7e
JM
21689/* Fill in sizes for SPE register high parts in table used by unwinder. */
21690
21691static void
21692rs6000_init_dwarf_reg_sizes_extra (tree address)
21693{
21694 if (TARGET_SPE)
21695 {
21696 int i;
21697 enum machine_mode mode = TYPE_MODE (char_type_node);
21698 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21699 rtx mem = gen_rtx_MEM (BLKmode, addr);
21700 rtx value = gen_int_mode (4, mode);
21701
21702 for (i = 1201; i < 1232; i++)
21703 {
21704 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21705 HOST_WIDE_INT offset
21706 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21707
21708 emit_move_insn (adjust_address (mem, mode, offset), value);
21709 }
21710 }
21711}
21712
93c9d1ba
AM
21713/* Map internal gcc register numbers to DWARF2 register numbers. */
21714
21715unsigned int
21716rs6000_dbx_register_number (unsigned int regno)
21717{
21718 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21719 return regno;
21720 if (regno == MQ_REGNO)
21721 return 100;
1de43f85 21722 if (regno == LR_REGNO)
93c9d1ba 21723 return 108;
1de43f85 21724 if (regno == CTR_REGNO)
93c9d1ba
AM
21725 return 109;
21726 if (CR_REGNO_P (regno))
21727 return regno - CR0_REGNO + 86;
21728 if (regno == XER_REGNO)
21729 return 101;
21730 if (ALTIVEC_REGNO_P (regno))
21731 return regno - FIRST_ALTIVEC_REGNO + 1124;
21732 if (regno == VRSAVE_REGNO)
21733 return 356;
21734 if (regno == VSCR_REGNO)
21735 return 67;
21736 if (regno == SPE_ACC_REGNO)
21737 return 99;
21738 if (regno == SPEFSCR_REGNO)
21739 return 612;
21740 /* SPE high reg number. We get these values of regno from
21741 rs6000_dwarf_register_span. */
37409796
NS
21742 gcc_assert (regno >= 1200 && regno < 1232);
21743 return regno;
93c9d1ba
AM
21744}
21745
93f90be6 21746/* target hook eh_return_filter_mode */
f676971a 21747static enum machine_mode
93f90be6
FJ
21748rs6000_eh_return_filter_mode (void)
21749{
21750 return TARGET_32BIT ? SImode : word_mode;
21751}
21752
00b79d54
BE
21753/* Target hook for scalar_mode_supported_p. */
21754static bool
21755rs6000_scalar_mode_supported_p (enum machine_mode mode)
21756{
21757 if (DECIMAL_FLOAT_MODE_P (mode))
21758 return true;
21759 else
21760 return default_scalar_mode_supported_p (mode);
21761}
21762
f676971a
EC
21763/* Target hook for vector_mode_supported_p. */
21764static bool
21765rs6000_vector_mode_supported_p (enum machine_mode mode)
21766{
21767
96038623
DE
21768 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21769 return true;
21770
f676971a
EC
21771 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21772 return true;
21773
21774 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21775 return true;
21776
21777 else
21778 return false;
21779}
21780
bb8df8a6
EC
21781/* Target hook for invalid_arg_for_unprototyped_fn. */
21782static const char *
3101faab 21783invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21784{
21785 return (!rs6000_darwin64_abi
21786 && typelist == 0
21787 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21788 && (funcdecl == NULL_TREE
21789 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21790 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21791 ? N_("AltiVec argument passed to unprototyped function")
21792 : NULL;
21793}
21794
3aebbe5f
JJ
21795/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21796 setup by using __stack_chk_fail_local hidden function instead of
21797 calling __stack_chk_fail directly. Otherwise it is better to call
21798 __stack_chk_fail directly. */
21799
21800static tree
21801rs6000_stack_protect_fail (void)
21802{
21803 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21804 ? default_hidden_stack_protect_fail ()
21805 : default_external_stack_protect_fail ();
21806}
21807
17211ab5 21808#include "gt-rs6000.h"