]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
Add missing entry.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
279bb624 58#include "tm-constrs.h"
1bc7c5b6
ZW
59#if TARGET_XCOFF
60#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61#endif
93a27b7b
ZW
62#if TARGET_MACHO
63#include "gstab.h" /* for N_SLINE */
64#endif
9b30bae2 65
7509c759
MM
66#ifndef TARGET_NO_PROTOTYPE
67#define TARGET_NO_PROTOTYPE 0
68#endif
69
9878760c
RK
70#define min(A,B) ((A) < (B) ? (A) : (B))
71#define max(A,B) ((A) > (B) ? (A) : (B))
72
d1d0c603
JJ
73/* Structure used to define the rs6000 stack */
74typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
d1d0c603
JJ
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
111} rs6000_stack_t;
112
5b667039
JJ
113/* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */
115typedef struct machine_function GTY(())
116{
117 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
118 int ra_needs_full_frame;
119 /* Some local-dynamic symbol. */
120 const char *some_ld_name;
121 /* Whether the instruction chain has been scanned already. */
122 int insn_chain_scanned_p;
123 /* Flags if __builtin_return_address (0) was used. */
124 int ra_need_lr;
125 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126 varargs save area. */
127 HOST_WIDE_INT varargs_save_offset;
128} machine_function;
129
5248c961
RK
130/* Target cpu type */
131
132enum processor_type rs6000_cpu;
8e3f41e7
MM
133struct rs6000_cpu_select rs6000_select[3] =
134{
815cdc52
MM
135 /* switch name, tune arch */
136 { (const char *)0, "--with-cpu=", 1, 1 },
137 { (const char *)0, "-mcpu=", 1, 1 },
138 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 139};
5248c961 140
d296e02e
AP
141static GTY(()) bool rs6000_cell_dont_microcode;
142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
44cd321e
PS
149/* Align branch targets. */
150static GTY(()) bool rs6000_align_branch_targets;
151
569fa502
DN
152/* Support for -msched-costly-dep option. */
153const char *rs6000_sched_costly_dep_str;
154enum rs6000_dependence_cost rs6000_sched_costly_dep;
155
cbe26ab8
DN
156/* Support for -minsert-sched-nops option. */
157const char *rs6000_sched_insert_nops_str;
158enum rs6000_nop_insertion rs6000_sched_insert_nops;
159
7ccf35ed 160/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 161static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 162
602ea4d3 163/* Size of long double. */
6fa3f289
ZW
164int rs6000_long_double_type_size;
165
602ea4d3
JJ
166/* IEEE quad extended precision long double. */
167int rs6000_ieeequad;
168
169/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
170int rs6000_altivec_abi;
171
a3170dc6
AH
172/* Nonzero if we want SPE ABI extensions. */
173int rs6000_spe_abi;
174
5da702b1
AH
175/* Nonzero if floating point operations are done in the GPRs. */
176int rs6000_float_gprs = 0;
177
594a51fe
SS
178/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
179int rs6000_darwin64_abi;
180
a0ab749a 181/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 182static GTY(()) int common_mode_defined;
c81bebd7 183
9878760c
RK
184/* Save information from a "cmpxx" operation until the branch or scc is
185 emitted. */
9878760c
RK
186rtx rs6000_compare_op0, rs6000_compare_op1;
187int rs6000_compare_fp_p;
874a0744 188
874a0744
MM
189/* Label number of label created for -mrelocatable, to call to so we can
190 get the address of the GOT section */
191int rs6000_pic_labelno;
c81bebd7 192
b91da81f 193#ifdef USING_ELFOS_H
c81bebd7 194/* Which abi to adhere to */
9739c90c 195const char *rs6000_abi_name;
d9407988
MM
196
197/* Semantics of the small data area */
198enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199
200/* Which small data model to use */
815cdc52 201const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
202
203/* Counter for labels which are to be placed in .fixup. */
204int fixuplabelno = 0;
874a0744 205#endif
4697a36c 206
c4501e62
JJ
207/* Bit size of immediate TLS offsets and string from which it is decoded. */
208int rs6000_tls_size = 32;
209const char *rs6000_tls_size_string;
210
b6c9286a
MM
211/* ABI enumeration available for subtarget to use. */
212enum rs6000_abi rs6000_current_abi;
213
85b776df
AM
214/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
215int dot_symbols;
216
38c1f2d7 217/* Debug flags */
815cdc52 218const char *rs6000_debug_name;
38c1f2d7
MM
219int rs6000_debug_stack; /* debug stack applications */
220int rs6000_debug_arg; /* debug argument handling */
221
aabcd309 222/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
223bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
224
58646b77
PB
225/* Built in types. */
226
227tree rs6000_builtin_types[RS6000_BTI_MAX];
228tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 229
57ac7be9
AM
230const char *rs6000_traceback_name;
231static enum {
232 traceback_default = 0,
233 traceback_none,
234 traceback_part,
235 traceback_full
236} rs6000_traceback;
237
38c1f2d7
MM
238/* Flag to say the TOC is initialized */
239int toc_initialized;
9ebbca7d 240char toc_label_name[10];
38c1f2d7 241
44cd321e
PS
242/* Cached value of rs6000_variable_issue. This is cached in
243 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
244static short cached_can_issue_more;
245
d6b5193b
RS
246static GTY(()) section *read_only_data_section;
247static GTY(()) section *private_data_section;
248static GTY(()) section *read_only_private_data_section;
249static GTY(()) section *sdata2_section;
250static GTY(()) section *toc_section;
251
a3c9585f
KH
252/* Control alignment for fields within structures. */
253/* String from -malign-XXXXX. */
025d9908
KH
254int rs6000_alignment_flags;
255
78f5898b
AH
256/* True for any options that were explicitly set. */
257struct {
df01da37 258 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 259 bool alignment; /* True if -malign- was used. */
d3603e8c 260 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
261 bool spe; /* True if -mspe= was used. */
262 bool float_gprs; /* True if -mfloat-gprs= was used. */
263 bool isel; /* True if -misel was used. */
264 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 265 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
266} rs6000_explicit_options;
267
a3170dc6
AH
268struct builtin_description
269{
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
273 unsigned int mask;
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
277};
8b897cfa
RS
278\f
279/* Target cpu costs. */
280
281struct processor_costs {
c4ad648e 282 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
283 const int mulsi_const; /* cost of SImode multiplication by constant. */
284 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
285 const int muldi; /* cost of DImode multiplication. */
286 const int divsi; /* cost of SImode division. */
287 const int divdi; /* cost of DImode division. */
288 const int fp; /* cost of simple SFmode and DFmode insns. */
289 const int dmul; /* cost of DFmode multiplication (and fmadd). */
290 const int sdiv; /* cost of SFmode division (fdivs). */
291 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
292 const int cache_line_size; /* cache line size in bytes. */
293 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
294 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
295 const int simultaneous_prefetches; /* number of parallel prefetch
296 operations. */
8b897cfa
RS
297};
298
299const struct processor_costs *rs6000_cost;
300
301/* Processor costs (relative to an add) */
302
303/* Instruction size costs on 32bit processors. */
304static const
305struct processor_costs size32_cost = {
06a67bdd
RS
306 COSTS_N_INSNS (1), /* mulsi */
307 COSTS_N_INSNS (1), /* mulsi_const */
308 COSTS_N_INSNS (1), /* mulsi_const9 */
309 COSTS_N_INSNS (1), /* muldi */
310 COSTS_N_INSNS (1), /* divsi */
311 COSTS_N_INSNS (1), /* divdi */
312 COSTS_N_INSNS (1), /* fp */
313 COSTS_N_INSNS (1), /* dmul */
314 COSTS_N_INSNS (1), /* sdiv */
315 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
316 32,
317 0,
318 0,
5f732aba 319 0,
8b897cfa
RS
320};
321
322/* Instruction size costs on 64bit processors. */
323static const
324struct processor_costs size64_cost = {
06a67bdd
RS
325 COSTS_N_INSNS (1), /* mulsi */
326 COSTS_N_INSNS (1), /* mulsi_const */
327 COSTS_N_INSNS (1), /* mulsi_const9 */
328 COSTS_N_INSNS (1), /* muldi */
329 COSTS_N_INSNS (1), /* divsi */
330 COSTS_N_INSNS (1), /* divdi */
331 COSTS_N_INSNS (1), /* fp */
332 COSTS_N_INSNS (1), /* dmul */
333 COSTS_N_INSNS (1), /* sdiv */
334 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
335 128,
336 0,
337 0,
5f732aba 338 0,
8b897cfa
RS
339};
340
341/* Instruction costs on RIOS1 processors. */
342static const
343struct processor_costs rios1_cost = {
06a67bdd
RS
344 COSTS_N_INSNS (5), /* mulsi */
345 COSTS_N_INSNS (4), /* mulsi_const */
346 COSTS_N_INSNS (3), /* mulsi_const9 */
347 COSTS_N_INSNS (5), /* muldi */
348 COSTS_N_INSNS (19), /* divsi */
349 COSTS_N_INSNS (19), /* divdi */
350 COSTS_N_INSNS (2), /* fp */
351 COSTS_N_INSNS (2), /* dmul */
352 COSTS_N_INSNS (19), /* sdiv */
353 COSTS_N_INSNS (19), /* ddiv */
5f732aba
DE
354 128,
355 64, /* l1 cache */
356 512, /* l2 cache */
0b11da67 357 0, /* streams */
8b897cfa
RS
358};
359
360/* Instruction costs on RIOS2 processors. */
361static const
362struct processor_costs rios2_cost = {
06a67bdd
RS
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (13), /* divsi */
368 COSTS_N_INSNS (13), /* divdi */
369 COSTS_N_INSNS (2), /* fp */
370 COSTS_N_INSNS (2), /* dmul */
371 COSTS_N_INSNS (17), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
373 256,
374 256, /* l1 cache */
375 1024, /* l2 cache */
0b11da67 376 0, /* streams */
8b897cfa
RS
377};
378
379/* Instruction costs on RS64A processors. */
380static const
381struct processor_costs rs64a_cost = {
06a67bdd
RS
382 COSTS_N_INSNS (20), /* mulsi */
383 COSTS_N_INSNS (12), /* mulsi_const */
384 COSTS_N_INSNS (8), /* mulsi_const9 */
385 COSTS_N_INSNS (34), /* muldi */
386 COSTS_N_INSNS (65), /* divsi */
387 COSTS_N_INSNS (67), /* divdi */
388 COSTS_N_INSNS (4), /* fp */
389 COSTS_N_INSNS (4), /* dmul */
390 COSTS_N_INSNS (31), /* sdiv */
391 COSTS_N_INSNS (31), /* ddiv */
0b11da67 392 128,
5f732aba
DE
393 128, /* l1 cache */
394 2048, /* l2 cache */
0b11da67 395 1, /* streams */
8b897cfa
RS
396};
397
398/* Instruction costs on MPCCORE processors. */
399static const
400struct processor_costs mpccore_cost = {
06a67bdd
RS
401 COSTS_N_INSNS (2), /* mulsi */
402 COSTS_N_INSNS (2), /* mulsi_const */
403 COSTS_N_INSNS (2), /* mulsi_const9 */
404 COSTS_N_INSNS (2), /* muldi */
405 COSTS_N_INSNS (6), /* divsi */
406 COSTS_N_INSNS (6), /* divdi */
407 COSTS_N_INSNS (4), /* fp */
408 COSTS_N_INSNS (5), /* dmul */
409 COSTS_N_INSNS (10), /* sdiv */
410 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
411 32,
412 4, /* l1 cache */
413 16, /* l2 cache */
0b11da67 414 1, /* streams */
8b897cfa
RS
415};
416
417/* Instruction costs on PPC403 processors. */
418static const
419struct processor_costs ppc403_cost = {
06a67bdd
RS
420 COSTS_N_INSNS (4), /* mulsi */
421 COSTS_N_INSNS (4), /* mulsi_const */
422 COSTS_N_INSNS (4), /* mulsi_const9 */
423 COSTS_N_INSNS (4), /* muldi */
424 COSTS_N_INSNS (33), /* divsi */
425 COSTS_N_INSNS (33), /* divdi */
426 COSTS_N_INSNS (11), /* fp */
427 COSTS_N_INSNS (11), /* dmul */
428 COSTS_N_INSNS (11), /* sdiv */
429 COSTS_N_INSNS (11), /* ddiv */
0b11da67 430 32,
5f732aba
DE
431 4, /* l1 cache */
432 16, /* l2 cache */
0b11da67 433 1, /* streams */
8b897cfa
RS
434};
435
436/* Instruction costs on PPC405 processors. */
437static const
438struct processor_costs ppc405_cost = {
06a67bdd
RS
439 COSTS_N_INSNS (5), /* mulsi */
440 COSTS_N_INSNS (4), /* mulsi_const */
441 COSTS_N_INSNS (3), /* mulsi_const9 */
442 COSTS_N_INSNS (5), /* muldi */
443 COSTS_N_INSNS (35), /* divsi */
444 COSTS_N_INSNS (35), /* divdi */
445 COSTS_N_INSNS (11), /* fp */
446 COSTS_N_INSNS (11), /* dmul */
447 COSTS_N_INSNS (11), /* sdiv */
448 COSTS_N_INSNS (11), /* ddiv */
0b11da67 449 32,
5f732aba
DE
450 16, /* l1 cache */
451 128, /* l2 cache */
0b11da67 452 1, /* streams */
8b897cfa
RS
453};
454
455/* Instruction costs on PPC440 processors. */
456static const
457struct processor_costs ppc440_cost = {
06a67bdd
RS
458 COSTS_N_INSNS (3), /* mulsi */
459 COSTS_N_INSNS (2), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (3), /* muldi */
462 COSTS_N_INSNS (34), /* divsi */
463 COSTS_N_INSNS (34), /* divdi */
464 COSTS_N_INSNS (5), /* fp */
465 COSTS_N_INSNS (5), /* dmul */
466 COSTS_N_INSNS (19), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
0b11da67 468 32,
5f732aba
DE
469 32, /* l1 cache */
470 256, /* l2 cache */
0b11da67 471 1, /* streams */
8b897cfa
RS
472};
473
474/* Instruction costs on PPC601 processors. */
475static const
476struct processor_costs ppc601_cost = {
06a67bdd
RS
477 COSTS_N_INSNS (5), /* mulsi */
478 COSTS_N_INSNS (5), /* mulsi_const */
479 COSTS_N_INSNS (5), /* mulsi_const9 */
480 COSTS_N_INSNS (5), /* muldi */
481 COSTS_N_INSNS (36), /* divsi */
482 COSTS_N_INSNS (36), /* divdi */
483 COSTS_N_INSNS (4), /* fp */
484 COSTS_N_INSNS (5), /* dmul */
485 COSTS_N_INSNS (17), /* sdiv */
486 COSTS_N_INSNS (31), /* ddiv */
0b11da67 487 32,
5f732aba
DE
488 32, /* l1 cache */
489 256, /* l2 cache */
0b11da67 490 1, /* streams */
8b897cfa
RS
491};
492
493/* Instruction costs on PPC603 processors. */
494static const
495struct processor_costs ppc603_cost = {
06a67bdd
RS
496 COSTS_N_INSNS (5), /* mulsi */
497 COSTS_N_INSNS (3), /* mulsi_const */
498 COSTS_N_INSNS (2), /* mulsi_const9 */
499 COSTS_N_INSNS (5), /* muldi */
500 COSTS_N_INSNS (37), /* divsi */
501 COSTS_N_INSNS (37), /* divdi */
502 COSTS_N_INSNS (3), /* fp */
503 COSTS_N_INSNS (4), /* dmul */
504 COSTS_N_INSNS (18), /* sdiv */
505 COSTS_N_INSNS (33), /* ddiv */
0b11da67 506 32,
5f732aba
DE
507 8, /* l1 cache */
508 64, /* l2 cache */
0b11da67 509 1, /* streams */
8b897cfa
RS
510};
511
512/* Instruction costs on PPC604 processors. */
513static const
514struct processor_costs ppc604_cost = {
06a67bdd
RS
515 COSTS_N_INSNS (4), /* mulsi */
516 COSTS_N_INSNS (4), /* mulsi_const */
517 COSTS_N_INSNS (4), /* mulsi_const9 */
518 COSTS_N_INSNS (4), /* muldi */
519 COSTS_N_INSNS (20), /* divsi */
520 COSTS_N_INSNS (20), /* divdi */
521 COSTS_N_INSNS (3), /* fp */
522 COSTS_N_INSNS (3), /* dmul */
523 COSTS_N_INSNS (18), /* sdiv */
524 COSTS_N_INSNS (32), /* ddiv */
0b11da67 525 32,
5f732aba
DE
526 16, /* l1 cache */
527 512, /* l2 cache */
0b11da67 528 1, /* streams */
8b897cfa
RS
529};
530
531/* Instruction costs on PPC604e processors. */
532static const
533struct processor_costs ppc604e_cost = {
06a67bdd
RS
534 COSTS_N_INSNS (2), /* mulsi */
535 COSTS_N_INSNS (2), /* mulsi_const */
536 COSTS_N_INSNS (2), /* mulsi_const9 */
537 COSTS_N_INSNS (2), /* muldi */
538 COSTS_N_INSNS (20), /* divsi */
539 COSTS_N_INSNS (20), /* divdi */
540 COSTS_N_INSNS (3), /* fp */
541 COSTS_N_INSNS (3), /* dmul */
542 COSTS_N_INSNS (18), /* sdiv */
543 COSTS_N_INSNS (32), /* ddiv */
0b11da67 544 32,
5f732aba
DE
545 32, /* l1 cache */
546 1024, /* l2 cache */
0b11da67 547 1, /* streams */
8b897cfa
RS
548};
549
f0517163 550/* Instruction costs on PPC620 processors. */
8b897cfa
RS
551static const
552struct processor_costs ppc620_cost = {
06a67bdd
RS
553 COSTS_N_INSNS (5), /* mulsi */
554 COSTS_N_INSNS (4), /* mulsi_const */
555 COSTS_N_INSNS (3), /* mulsi_const9 */
556 COSTS_N_INSNS (7), /* muldi */
557 COSTS_N_INSNS (21), /* divsi */
558 COSTS_N_INSNS (37), /* divdi */
559 COSTS_N_INSNS (3), /* fp */
560 COSTS_N_INSNS (3), /* dmul */
561 COSTS_N_INSNS (18), /* sdiv */
562 COSTS_N_INSNS (32), /* ddiv */
0b11da67 563 128,
5f732aba
DE
564 32, /* l1 cache */
565 1024, /* l2 cache */
0b11da67 566 1, /* streams */
f0517163
RS
567};
568
569/* Instruction costs on PPC630 processors. */
570static const
571struct processor_costs ppc630_cost = {
06a67bdd
RS
572 COSTS_N_INSNS (5), /* mulsi */
573 COSTS_N_INSNS (4), /* mulsi_const */
574 COSTS_N_INSNS (3), /* mulsi_const9 */
575 COSTS_N_INSNS (7), /* muldi */
576 COSTS_N_INSNS (21), /* divsi */
577 COSTS_N_INSNS (37), /* divdi */
578 COSTS_N_INSNS (3), /* fp */
579 COSTS_N_INSNS (3), /* dmul */
580 COSTS_N_INSNS (17), /* sdiv */
581 COSTS_N_INSNS (21), /* ddiv */
0b11da67 582 128,
5f732aba
DE
583 64, /* l1 cache */
584 1024, /* l2 cache */
0b11da67 585 1, /* streams */
8b897cfa
RS
586};
587
d296e02e
AP
588/* Instruction costs on Cell processor. */
589/* COSTS_N_INSNS (1) ~ one add. */
590static const
591struct processor_costs ppccell_cost = {
592 COSTS_N_INSNS (9/2)+2, /* mulsi */
593 COSTS_N_INSNS (6/2), /* mulsi_const */
594 COSTS_N_INSNS (6/2), /* mulsi_const9 */
595 COSTS_N_INSNS (15/2)+2, /* muldi */
596 COSTS_N_INSNS (38/2), /* divsi */
597 COSTS_N_INSNS (70/2), /* divdi */
598 COSTS_N_INSNS (10/2), /* fp */
599 COSTS_N_INSNS (10/2), /* dmul */
600 COSTS_N_INSNS (74/2), /* sdiv */
601 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67 602 128,
5f732aba
DE
603 32, /* l1 cache */
604 512, /* l2 cache */
605 6, /* streams */
d296e02e
AP
606};
607
8b897cfa
RS
608/* Instruction costs on PPC750 and PPC7400 processors. */
609static const
610struct processor_costs ppc750_cost = {
06a67bdd
RS
611 COSTS_N_INSNS (5), /* mulsi */
612 COSTS_N_INSNS (3), /* mulsi_const */
613 COSTS_N_INSNS (2), /* mulsi_const9 */
614 COSTS_N_INSNS (5), /* muldi */
615 COSTS_N_INSNS (17), /* divsi */
616 COSTS_N_INSNS (17), /* divdi */
617 COSTS_N_INSNS (3), /* fp */
618 COSTS_N_INSNS (3), /* dmul */
619 COSTS_N_INSNS (17), /* sdiv */
620 COSTS_N_INSNS (31), /* ddiv */
0b11da67 621 32,
5f732aba
DE
622 32, /* l1 cache */
623 512, /* l2 cache */
0b11da67 624 1, /* streams */
8b897cfa
RS
625};
626
627/* Instruction costs on PPC7450 processors. */
628static const
629struct processor_costs ppc7450_cost = {
06a67bdd
RS
630 COSTS_N_INSNS (4), /* mulsi */
631 COSTS_N_INSNS (3), /* mulsi_const */
632 COSTS_N_INSNS (3), /* mulsi_const9 */
633 COSTS_N_INSNS (4), /* muldi */
634 COSTS_N_INSNS (23), /* divsi */
635 COSTS_N_INSNS (23), /* divdi */
636 COSTS_N_INSNS (5), /* fp */
637 COSTS_N_INSNS (5), /* dmul */
638 COSTS_N_INSNS (21), /* sdiv */
639 COSTS_N_INSNS (35), /* ddiv */
0b11da67 640 32,
5f732aba
DE
641 32, /* l1 cache */
642 1024, /* l2 cache */
0b11da67 643 1, /* streams */
8b897cfa 644};
a3170dc6 645
8b897cfa
RS
646/* Instruction costs on PPC8540 processors. */
647static const
648struct processor_costs ppc8540_cost = {
06a67bdd
RS
649 COSTS_N_INSNS (4), /* mulsi */
650 COSTS_N_INSNS (4), /* mulsi_const */
651 COSTS_N_INSNS (4), /* mulsi_const9 */
652 COSTS_N_INSNS (4), /* muldi */
653 COSTS_N_INSNS (19), /* divsi */
654 COSTS_N_INSNS (19), /* divdi */
655 COSTS_N_INSNS (4), /* fp */
656 COSTS_N_INSNS (4), /* dmul */
657 COSTS_N_INSNS (29), /* sdiv */
658 COSTS_N_INSNS (29), /* ddiv */
0b11da67 659 32,
5f732aba
DE
660 32, /* l1 cache */
661 256, /* l2 cache */
0b11da67 662 1, /* prefetch streams /*/
8b897cfa
RS
663};
664
665/* Instruction costs on POWER4 and POWER5 processors. */
666static const
667struct processor_costs power4_cost = {
06a67bdd
RS
668 COSTS_N_INSNS (3), /* mulsi */
669 COSTS_N_INSNS (2), /* mulsi_const */
670 COSTS_N_INSNS (2), /* mulsi_const9 */
671 COSTS_N_INSNS (4), /* muldi */
672 COSTS_N_INSNS (18), /* divsi */
673 COSTS_N_INSNS (34), /* divdi */
674 COSTS_N_INSNS (3), /* fp */
675 COSTS_N_INSNS (3), /* dmul */
676 COSTS_N_INSNS (17), /* sdiv */
677 COSTS_N_INSNS (17), /* ddiv */
0b11da67 678 128,
5f732aba
DE
679 32, /* l1 cache */
680 1024, /* l2 cache */
0b11da67 681 8, /* prefetch streams /*/
8b897cfa
RS
682};
683
44cd321e
PS
684/* Instruction costs on POWER6 processors. */
685static const
686struct processor_costs power6_cost = {
687 COSTS_N_INSNS (8), /* mulsi */
688 COSTS_N_INSNS (8), /* mulsi_const */
689 COSTS_N_INSNS (8), /* mulsi_const9 */
690 COSTS_N_INSNS (8), /* muldi */
691 COSTS_N_INSNS (22), /* divsi */
692 COSTS_N_INSNS (28), /* divdi */
693 COSTS_N_INSNS (3), /* fp */
694 COSTS_N_INSNS (3), /* dmul */
695 COSTS_N_INSNS (13), /* sdiv */
696 COSTS_N_INSNS (16), /* ddiv */
0b11da67 697 128,
5f732aba
DE
698 64, /* l1 cache */
699 2048, /* l2 cache */
0b11da67 700 16, /* prefetch streams */
44cd321e
PS
701};
702
8b897cfa 703\f
a2369ed3 704static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 705static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 706static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
707static void rs6000_emit_stack_tie (void);
708static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
709static rtx spe_synthesize_frame_save (rtx);
710static bool spe_func_has_64bit_regs_p (void);
b20a9cca 711static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 712 int, HOST_WIDE_INT);
a2369ed3
DJ
713static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
714static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
715static unsigned rs6000_hash_constant (rtx);
716static unsigned toc_hash_function (const void *);
717static int toc_hash_eq (const void *, const void *);
718static int constant_pool_expr_1 (rtx, int *, int *);
719static bool constant_pool_expr_p (rtx);
d04b6e6e 720static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
721static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
722static struct machine_function * rs6000_init_machine_status (void);
723static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 724static bool no_global_regs_above (int);
5add3202 725#ifdef HAVE_GAS_HIDDEN
a2369ed3 726static void rs6000_assemble_visibility (tree, int);
5add3202 727#endif
a2369ed3
DJ
728static int rs6000_ra_ever_killed (void);
729static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 730static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 731static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 732static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 733static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 734static const char *rs6000_mangle_type (const_tree);
b86fe7b4 735extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 736static void rs6000_set_default_type_attributes (tree);
52ff33d0 737static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
738static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
739static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
740static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
741 tree);
a2369ed3 742static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 743static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 744static void rs6000_file_start (void);
7c262518 745#if TARGET_ELF
9b580a0b 746static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
747static void rs6000_elf_asm_out_constructor (rtx, int);
748static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 749static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 750static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
751static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
752 unsigned HOST_WIDE_INT);
a56d7372 753static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 754 ATTRIBUTE_UNUSED;
7c262518 755#endif
3101faab 756static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
cbaaba19 757#if TARGET_XCOFF
0d5817b2 758static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 759static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 760static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 761static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 762static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 763static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 764 unsigned HOST_WIDE_INT);
d6b5193b
RS
765static void rs6000_xcoff_unique_section (tree, int);
766static section *rs6000_xcoff_select_rtx_section
767 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
768static const char * rs6000_xcoff_strip_name_encoding (const char *);
769static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
770static void rs6000_xcoff_file_start (void);
771static void rs6000_xcoff_file_end (void);
f1384257 772#endif
a2369ed3
DJ
773static int rs6000_variable_issue (FILE *, int, rtx, int);
774static bool rs6000_rtx_costs (rtx, int, int, int *);
775static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 776static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 777static bool is_microcoded_insn (rtx);
d296e02e 778static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
779static bool is_cracked_insn (rtx);
780static bool is_branch_slot_insn (rtx);
44cd321e 781static bool is_load_insn (rtx);
e3a0e200 782static rtx get_store_dest (rtx pat);
44cd321e
PS
783static bool is_store_insn (rtx);
784static bool set_to_load_agen (rtx,rtx);
982afe02 785static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
786static int rs6000_adjust_priority (rtx, int);
787static int rs6000_issue_rate (void);
b198261f 788static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
789static rtx get_next_active_insn (rtx, rtx);
790static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
791static bool insn_must_be_first_in_group (rtx);
792static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
793static bool is_costly_group (rtx *, rtx);
794static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
795static int redefine_groups (FILE *, int, rtx, rtx);
796static int pad_groups (FILE *, int, rtx, rtx);
797static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
798static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
799static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 800static int rs6000_use_sched_lookahead (void);
d296e02e 801static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 802static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 803static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
804static tree rs6000_builtin_mul_widen_even (tree);
805static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 806static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 807
58646b77 808static void def_builtin (int, const char *, tree, int);
3101faab 809static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
810static void rs6000_init_builtins (void);
811static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
812static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
813static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
814static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
815static void altivec_init_builtins (void);
816static void rs6000_common_init_builtins (void);
c15c90bb 817static void rs6000_init_libfuncs (void);
a2369ed3 818
96038623
DE
819static void paired_init_builtins (void);
820static rtx paired_expand_builtin (tree, rtx, bool *);
821static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
822static rtx paired_expand_stv_builtin (enum insn_code, tree);
823static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
824
b20a9cca
AM
825static void enable_mask_for_builtins (struct builtin_description *, int,
826 enum rs6000_builtins,
827 enum rs6000_builtins);
7c62e993 828static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
829static void spe_init_builtins (void);
830static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 831static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
832static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
833static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
834static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
835static rs6000_stack_t *rs6000_stack_info (void);
836static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
837
838static rtx altivec_expand_builtin (tree, rtx, bool *);
839static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
840static rtx altivec_expand_st_builtin (tree, rtx, bool *);
841static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
842static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 843static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 844 const char *, tree, rtx);
b4a62fa0 845static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 846static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
847static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
848static rtx altivec_expand_vec_set_builtin (tree);
849static rtx altivec_expand_vec_ext_builtin (tree, rtx);
850static int get_element_number (tree, tree);
78f5898b 851static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 852static void rs6000_parse_tls_size_option (void);
5da702b1 853static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
854static int first_altivec_reg_to_save (void);
855static unsigned int compute_vrsave_mask (void);
9390387d 856static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
857static void is_altivec_return_reg (rtx, void *);
858static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
859int easy_vector_constant (rtx, enum machine_mode);
3101faab 860static bool rs6000_is_opaque_type (const_tree);
a2369ed3 861static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 862static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 863static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 864static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
865static rtx rs6000_tls_get_addr (void);
866static rtx rs6000_got_sym (void);
9390387d 867static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
868static const char *rs6000_get_some_local_dynamic_name (void);
869static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 870static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 871static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 872 enum machine_mode, tree);
0b5383eb
DJ
873static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
874 HOST_WIDE_INT);
875static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
876 tree, HOST_WIDE_INT);
877static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
878 HOST_WIDE_INT,
879 rtx[], int *);
880static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
881 const_tree, HOST_WIDE_INT,
882 rtx[], int *);
883static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 884static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 885static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
886static void setup_incoming_varargs (CUMULATIVE_ARGS *,
887 enum machine_mode, tree,
888 int *, int);
8cd5a4e0 889static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 890 const_tree, bool);
78a52f11
RH
891static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
892 tree, bool);
3101faab 893static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
894#if TARGET_MACHO
895static void macho_branch_islands (void);
efdba735
SH
896static int no_previous_def (tree function_name);
897static tree get_prev_label (tree function_name);
c4e18b1c 898static void rs6000_darwin_file_start (void);
efdba735
SH
899#endif
900
c35d187f 901static tree rs6000_build_builtin_va_list (void);
23a60a04 902static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 903static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 904static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 905static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 906static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 907 enum machine_mode);
94ff898d 908static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
909 enum machine_mode);
910static int get_vsel_insn (enum machine_mode);
911static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 912static tree rs6000_stack_protect_fail (void);
21213b4c
DP
913
914const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
915static enum machine_mode rs6000_eh_return_filter_mode (void);
916
17211ab5
GK
917/* Hash table stuff for keeping track of TOC entries. */
918
919struct toc_hash_struct GTY(())
920{
921 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
922 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
923 rtx key;
924 enum machine_mode key_mode;
925 int labelno;
926};
927
928static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
929\f
930/* Default register names. */
931char rs6000_reg_names[][8] =
932{
802a0058
MM
933 "0", "1", "2", "3", "4", "5", "6", "7",
934 "8", "9", "10", "11", "12", "13", "14", "15",
935 "16", "17", "18", "19", "20", "21", "22", "23",
936 "24", "25", "26", "27", "28", "29", "30", "31",
937 "0", "1", "2", "3", "4", "5", "6", "7",
938 "8", "9", "10", "11", "12", "13", "14", "15",
939 "16", "17", "18", "19", "20", "21", "22", "23",
940 "24", "25", "26", "27", "28", "29", "30", "31",
941 "mq", "lr", "ctr","ap",
942 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
943 "xer",
944 /* AltiVec registers. */
0cd5e3a1
AH
945 "0", "1", "2", "3", "4", "5", "6", "7",
946 "8", "9", "10", "11", "12", "13", "14", "15",
947 "16", "17", "18", "19", "20", "21", "22", "23",
948 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
949 "vrsave", "vscr",
950 /* SPE registers. */
7d5175e1
JJ
951 "spe_acc", "spefscr",
952 /* Soft frame pointer. */
953 "sfp"
c81bebd7
MM
954};
955
956#ifdef TARGET_REGNAMES
8b60264b 957static const char alt_reg_names[][8] =
c81bebd7 958{
802a0058
MM
959 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
960 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
961 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
962 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
963 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
964 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
965 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
966 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
967 "mq", "lr", "ctr", "ap",
968 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 969 "xer",
59a4c851 970 /* AltiVec registers. */
0ac081f6 971 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
972 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
973 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
974 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
975 "vrsave", "vscr",
976 /* SPE registers. */
7d5175e1
JJ
977 "spe_acc", "spefscr",
978 /* Soft frame pointer. */
979 "sfp"
c81bebd7
MM
980};
981#endif
9878760c 982\f
daf11973
MM
983#ifndef MASK_STRICT_ALIGN
984#define MASK_STRICT_ALIGN 0
985#endif
ffcfcb5f
AM
986#ifndef TARGET_PROFILE_KERNEL
987#define TARGET_PROFILE_KERNEL 0
988#endif
3961e8fe
RH
989
990/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
991#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
992\f
993/* Initialize the GCC target structure. */
91d231cb
JM
994#undef TARGET_ATTRIBUTE_TABLE
995#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
996#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
997#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 998
301d03af
RS
999#undef TARGET_ASM_ALIGNED_DI_OP
1000#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1001
1002/* Default unaligned ops are only provided for ELF. Find the ops needed
1003 for non-ELF systems. */
1004#ifndef OBJECT_FORMAT_ELF
cbaaba19 1005#if TARGET_XCOFF
ae6c1efd 1006/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1007 64-bit targets. */
1008#undef TARGET_ASM_UNALIGNED_HI_OP
1009#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1010#undef TARGET_ASM_UNALIGNED_SI_OP
1011#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1012#undef TARGET_ASM_UNALIGNED_DI_OP
1013#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1014#else
1015/* For Darwin. */
1016#undef TARGET_ASM_UNALIGNED_HI_OP
1017#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1018#undef TARGET_ASM_UNALIGNED_SI_OP
1019#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1020#undef TARGET_ASM_UNALIGNED_DI_OP
1021#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1022#undef TARGET_ASM_ALIGNED_DI_OP
1023#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1024#endif
1025#endif
1026
1027/* This hook deals with fixups for relocatable code and DI-mode objects
1028 in 64-bit code. */
1029#undef TARGET_ASM_INTEGER
1030#define TARGET_ASM_INTEGER rs6000_assemble_integer
1031
93638d7a
AM
1032#ifdef HAVE_GAS_HIDDEN
1033#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1034#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1035#endif
1036
c4501e62
JJ
1037#undef TARGET_HAVE_TLS
1038#define TARGET_HAVE_TLS HAVE_AS_TLS
1039
1040#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1041#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1042
08c148a8
NB
1043#undef TARGET_ASM_FUNCTION_PROLOGUE
1044#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1045#undef TARGET_ASM_FUNCTION_EPILOGUE
1046#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1047
b54cf83a
DE
1048#undef TARGET_SCHED_VARIABLE_ISSUE
1049#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1050
c237e94a
ZW
1051#undef TARGET_SCHED_ISSUE_RATE
1052#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1053#undef TARGET_SCHED_ADJUST_COST
1054#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1055#undef TARGET_SCHED_ADJUST_PRIORITY
1056#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1057#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1058#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1059#undef TARGET_SCHED_INIT
1060#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1061#undef TARGET_SCHED_FINISH
1062#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1063#undef TARGET_SCHED_REORDER
1064#define TARGET_SCHED_REORDER rs6000_sched_reorder
1065#undef TARGET_SCHED_REORDER2
1066#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1067
be12c2b0
VM
1068#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1069#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1070
d296e02e
AP
1071#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1072#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1073
7ccf35ed
DN
1074#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1075#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1076#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1077#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1078#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1079#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1080#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1081#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1082
5b900a4c
DN
1083#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1084#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1085
0ac081f6
AH
1086#undef TARGET_INIT_BUILTINS
1087#define TARGET_INIT_BUILTINS rs6000_init_builtins
1088
1089#undef TARGET_EXPAND_BUILTIN
1090#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1091
608063c3
JB
1092#undef TARGET_MANGLE_TYPE
1093#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1094
c15c90bb
ZW
1095#undef TARGET_INIT_LIBFUNCS
1096#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1097
f1384257 1098#if TARGET_MACHO
0e5dbd9b 1099#undef TARGET_BINDS_LOCAL_P
31920d83 1100#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1101#endif
0e5dbd9b 1102
77ccdfed
EC
1103#undef TARGET_MS_BITFIELD_LAYOUT_P
1104#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1105
3961e8fe
RH
1106#undef TARGET_ASM_OUTPUT_MI_THUNK
1107#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1108
3961e8fe 1109#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1110#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1111
4977bab6
ZW
1112#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1113#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1114
2e3f0db6
DJ
1115#undef TARGET_INVALID_WITHIN_DOLOOP
1116#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1117
3c50106f
RH
1118#undef TARGET_RTX_COSTS
1119#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1120#undef TARGET_ADDRESS_COST
1121#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1122
c8e4f0e9 1123#undef TARGET_VECTOR_OPAQUE_P
58646b77 1124#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1125
96714395
AH
1126#undef TARGET_DWARF_REGISTER_SPAN
1127#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1128
37ea0b7e
JM
1129#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1130#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1131
c6e8c921
GK
1132/* On rs6000, function arguments are promoted, as are function return
1133 values. */
1134#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1135#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1136#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1137#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1138
c6e8c921
GK
1139#undef TARGET_RETURN_IN_MEMORY
1140#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1141
1142#undef TARGET_SETUP_INCOMING_VARARGS
1143#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1144
1145/* Always strict argument naming on rs6000. */
1146#undef TARGET_STRICT_ARGUMENT_NAMING
1147#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1148#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1149#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1150#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1151#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1152#undef TARGET_MUST_PASS_IN_STACK
1153#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1154#undef TARGET_PASS_BY_REFERENCE
1155#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1156#undef TARGET_ARG_PARTIAL_BYTES
1157#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1158
c35d187f
RH
1159#undef TARGET_BUILD_BUILTIN_VA_LIST
1160#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1161
cd3ce9b4
JM
1162#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1163#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1164
93f90be6
FJ
1165#undef TARGET_EH_RETURN_FILTER_MODE
1166#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1167
00b79d54
BE
1168#undef TARGET_SCALAR_MODE_SUPPORTED_P
1169#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1170
f676971a
EC
1171#undef TARGET_VECTOR_MODE_SUPPORTED_P
1172#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1173
4d3e6fae
FJ
1174#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1175#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1176
78f5898b
AH
1177#undef TARGET_HANDLE_OPTION
1178#define TARGET_HANDLE_OPTION rs6000_handle_option
1179
1180#undef TARGET_DEFAULT_TARGET_FLAGS
1181#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1182 (TARGET_DEFAULT)
78f5898b 1183
3aebbe5f
JJ
1184#undef TARGET_STACK_PROTECT_FAIL
1185#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1186
445cf5eb
JM
1187/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1188 The PowerPC architecture requires only weak consistency among
1189 processors--that is, memory accesses between processors need not be
1190 sequentially consistent and memory accesses among processors can occur
1191 in any order. The ability to order memory accesses weakly provides
1192 opportunities for more efficient use of the system bus. Unless a
1193 dependency exists, the 604e allows read operations to precede store
1194 operations. */
1195#undef TARGET_RELAXED_ORDERING
1196#define TARGET_RELAXED_ORDERING true
1197
fdbe66f2
EB
1198#ifdef HAVE_AS_TLS
1199#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1200#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1201#endif
1202
aacd3885
RS
1203/* Use a 32-bit anchor range. This leads to sequences like:
1204
1205 addis tmp,anchor,high
1206 add dest,tmp,low
1207
1208 where tmp itself acts as an anchor, and can be shared between
1209 accesses to the same 64k page. */
1210#undef TARGET_MIN_ANCHOR_OFFSET
1211#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1212#undef TARGET_MAX_ANCHOR_OFFSET
1213#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1214#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1215#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1216
9c78b944
DE
1217#undef TARGET_BUILTIN_RECIPROCAL
1218#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1219
f6897b10 1220struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1221\f
0d1fbc8c
AH
1222
1223/* Value is 1 if hard register REGNO can hold a value of machine-mode
1224 MODE. */
1225static int
1226rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1227{
1228 /* The GPRs can hold any mode, but values bigger than one register
1229 cannot go past R31. */
1230 if (INT_REGNO_P (regno))
1231 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1232
a5a97921 1233 /* The float registers can only hold floating modes and DImode.
7393f7f8 1234 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1235 if (FP_REGNO_P (regno))
1236 return
96038623 1237 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1238 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1239 && mode != SDmode
0d1fbc8c
AH
1240 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1241 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1242 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1243 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1244 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1245
1246 /* The CR register can only hold CC modes. */
1247 if (CR_REGNO_P (regno))
1248 return GET_MODE_CLASS (mode) == MODE_CC;
1249
1250 if (XER_REGNO_P (regno))
1251 return mode == PSImode;
1252
1253 /* AltiVec only in AldyVec registers. */
1254 if (ALTIVEC_REGNO_P (regno))
1255 return ALTIVEC_VECTOR_MODE (mode);
1256
1257 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1258 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1259 return 1;
1260
1261 /* We cannot put TImode anywhere except general register and it must be
1262 able to fit within the register set. */
1263
1264 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1265}
1266
1267/* Initialize rs6000_hard_regno_mode_ok_p table. */
1268static void
1269rs6000_init_hard_regno_mode_ok (void)
1270{
1271 int r, m;
1272
1273 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1274 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1275 if (rs6000_hard_regno_mode_ok (r, m))
1276 rs6000_hard_regno_mode_ok_p[m][r] = true;
1277}
1278
e4cad568
GK
1279#if TARGET_MACHO
1280/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1281
1282static void
1283darwin_rs6000_override_options (void)
1284{
1285 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1286 off. */
1287 rs6000_altivec_abi = 1;
1288 TARGET_ALTIVEC_VRSAVE = 1;
1289 if (DEFAULT_ABI == ABI_DARWIN)
1290 {
1291 if (MACHO_DYNAMIC_NO_PIC_P)
1292 {
1293 if (flag_pic)
1294 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1295 flag_pic = 0;
1296 }
1297 else if (flag_pic == 1)
1298 {
1299 flag_pic = 2;
1300 }
1301 }
1302 if (TARGET_64BIT && ! TARGET_POWERPC64)
1303 {
1304 target_flags |= MASK_POWERPC64;
1305 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1306 }
1307 if (flag_mkernel)
1308 {
1309 rs6000_default_long_calls = 1;
1310 target_flags |= MASK_SOFT_FLOAT;
1311 }
1312
1313 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1314 Altivec. */
1315 if (!flag_mkernel && !flag_apple_kext
1316 && TARGET_64BIT
1317 && ! (target_flags_explicit & MASK_ALTIVEC))
1318 target_flags |= MASK_ALTIVEC;
1319
1320 /* Unless the user (not the configurer) has explicitly overridden
1321 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1322 G4 unless targetting the kernel. */
1323 if (!flag_mkernel
1324 && !flag_apple_kext
1325 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1326 && ! (target_flags_explicit & MASK_ALTIVEC)
1327 && ! rs6000_select[1].string)
1328 {
1329 target_flags |= MASK_ALTIVEC;
1330 }
1331}
1332#endif
1333
c1e55850
GK
1334/* If not otherwise specified by a target, make 'long double' equivalent to
1335 'double'. */
1336
1337#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1338#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1339#endif
1340
5248c961
RK
1341/* Override command line options. Mostly we process the processor
1342 type and sometimes adjust other TARGET_ options. */
1343
1344void
d779d0dc 1345rs6000_override_options (const char *default_cpu)
5248c961 1346{
c4d38ccb 1347 size_t i, j;
8e3f41e7 1348 struct rs6000_cpu_select *ptr;
66188a7e 1349 int set_masks;
5248c961 1350
66188a7e 1351 /* Simplifications for entries below. */
85638c0d 1352
66188a7e
GK
1353 enum {
1354 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1355 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1356 };
85638c0d 1357
66188a7e
GK
1358 /* This table occasionally claims that a processor does not support
1359 a particular feature even though it does, but the feature is slower
1360 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1361 complete description of the processor's support.
66188a7e
GK
1362
1363 Please keep this list in order, and don't forget to update the
1364 documentation in invoke.texi when adding a new processor or
1365 flag. */
5248c961
RK
1366 static struct ptt
1367 {
8b60264b
KG
1368 const char *const name; /* Canonical processor name. */
1369 const enum processor_type processor; /* Processor type enum value. */
1370 const int target_enable; /* Target flags to enable. */
8b60264b 1371 } const processor_target_table[]
66188a7e 1372 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1373 {"403", PROCESSOR_PPC403,
66188a7e 1374 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1375 {"405", PROCESSOR_PPC405,
716019c0
JM
1376 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1377 {"405fp", PROCESSOR_PPC405,
1378 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1379 {"440", PROCESSOR_PPC440,
716019c0
JM
1380 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1381 {"440fp", PROCESSOR_PPC440,
1382 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1383 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1384 {"601", PROCESSOR_PPC601,
66188a7e
GK
1385 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1386 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1387 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1388 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1389 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1390 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1391 {"620", PROCESSOR_PPC620,
1392 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1393 {"630", PROCESSOR_PPC630,
1394 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1395 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1396 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1397 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1398 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1399 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1400 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1401 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1402 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1403 /* 8548 has a dummy entry for now. */
a45bce6e 1404 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1405 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1406 {"970", PROCESSOR_POWER4,
66188a7e 1407 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1408 {"cell", PROCESSOR_CELL,
1409 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1410 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1411 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1412 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1413 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1414 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1415 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1416 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1417 {"power2", PROCESSOR_POWER,
1418 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1419 {"power3", PROCESSOR_PPC630,
1420 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1421 {"power4", PROCESSOR_POWER4,
fc091c8e 1422 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1423 {"power5", PROCESSOR_POWER5,
432218ba
DE
1424 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1425 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1426 {"power5+", PROCESSOR_POWER5,
1427 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1428 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1429 {"power6", PROCESSOR_POWER6,
e118597e 1430 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1431 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1432 {"power6x", PROCESSOR_POWER6,
1433 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1434 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1435 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1436 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1437 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1438 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1439 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1440 {"rios2", PROCESSOR_RIOS2,
1441 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1442 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1443 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1444 {"rs64", PROCESSOR_RS64A,
1445 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1446 };
5248c961 1447
ca7558fc 1448 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1449
66188a7e
GK
1450 /* Some OSs don't support saving the high part of 64-bit registers on
1451 context switch. Other OSs don't support saving Altivec registers.
1452 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1453 settings; if the user wants either, the user must explicitly specify
1454 them and we won't interfere with the user's specification. */
1455
1456 enum {
1457 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1458 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1459 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1460 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1461 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1462 };
0d1fbc8c
AH
1463
1464 rs6000_init_hard_regno_mode_ok ();
1465
c4ad648e 1466 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1467#ifdef OS_MISSING_POWERPC64
1468 if (OS_MISSING_POWERPC64)
1469 set_masks &= ~MASK_POWERPC64;
1470#endif
1471#ifdef OS_MISSING_ALTIVEC
1472 if (OS_MISSING_ALTIVEC)
1473 set_masks &= ~MASK_ALTIVEC;
1474#endif
1475
768875a8
AM
1476 /* Don't override by the processor default if given explicitly. */
1477 set_masks &= ~target_flags_explicit;
957211c3 1478
a4f6c312 1479 /* Identify the processor type. */
8e3f41e7 1480 rs6000_select[0].string = default_cpu;
3cb999d8 1481 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1482
b6a1cbae 1483 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1484 {
8e3f41e7
MM
1485 ptr = &rs6000_select[i];
1486 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1487 {
8e3f41e7
MM
1488 for (j = 0; j < ptt_size; j++)
1489 if (! strcmp (ptr->string, processor_target_table[j].name))
1490 {
1491 if (ptr->set_tune_p)
1492 rs6000_cpu = processor_target_table[j].processor;
1493
1494 if (ptr->set_arch_p)
1495 {
66188a7e
GK
1496 target_flags &= ~set_masks;
1497 target_flags |= (processor_target_table[j].target_enable
1498 & set_masks);
8e3f41e7
MM
1499 }
1500 break;
1501 }
1502
4406229e 1503 if (j == ptt_size)
8e3f41e7 1504 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1505 }
1506 }
8a61d227 1507
993f19a8 1508 if (TARGET_E500)
a3170dc6
AH
1509 rs6000_isel = 1;
1510
dff9f1b6
DE
1511 /* If we are optimizing big endian systems for space, use the load/store
1512 multiple and string instructions. */
ef792183 1513 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1514 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1515
a4f6c312
SS
1516 /* Don't allow -mmultiple or -mstring on little endian systems
1517 unless the cpu is a 750, because the hardware doesn't support the
1518 instructions used in little endian mode, and causes an alignment
1519 trap. The 750 does not cause an alignment trap (except when the
1520 target is unaligned). */
bef84347 1521
b21fb038 1522 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1523 {
1524 if (TARGET_MULTIPLE)
1525 {
1526 target_flags &= ~MASK_MULTIPLE;
b21fb038 1527 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1528 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1529 }
1530
1531 if (TARGET_STRING)
1532 {
1533 target_flags &= ~MASK_STRING;
b21fb038 1534 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1535 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1536 }
1537 }
3933e0e1 1538
38c1f2d7
MM
1539 /* Set debug flags */
1540 if (rs6000_debug_name)
1541 {
bfc79d3b 1542 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1543 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1544 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1545 rs6000_debug_stack = 1;
bfc79d3b 1546 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1547 rs6000_debug_arg = 1;
1548 else
c725bd79 1549 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1550 }
1551
57ac7be9
AM
1552 if (rs6000_traceback_name)
1553 {
1554 if (! strncmp (rs6000_traceback_name, "full", 4))
1555 rs6000_traceback = traceback_full;
1556 else if (! strncmp (rs6000_traceback_name, "part", 4))
1557 rs6000_traceback = traceback_part;
1558 else if (! strncmp (rs6000_traceback_name, "no", 2))
1559 rs6000_traceback = traceback_none;
1560 else
9e637a26 1561 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1562 rs6000_traceback_name);
1563 }
1564
78f5898b
AH
1565 if (!rs6000_explicit_options.long_double)
1566 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1567
602ea4d3 1568#ifndef POWERPC_LINUX
d3603e8c 1569 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1570 rs6000_ieeequad = 1;
1571#endif
1572
0db747be
DE
1573 /* Enable Altivec ABI for AIX -maltivec. */
1574 if (TARGET_XCOFF && TARGET_ALTIVEC)
1575 rs6000_altivec_abi = 1;
1576
1577 /* Set Altivec ABI as default for PowerPC64 Linux. */
6d0ef01e
HP
1578 if (TARGET_ELF && TARGET_64BIT)
1579 {
1580 rs6000_altivec_abi = 1;
78f5898b 1581 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1582 }
1583
594a51fe
SS
1584 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1585 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1586 {
1587 rs6000_darwin64_abi = 1;
9c7956fd 1588#if TARGET_MACHO
6ac49599 1589 darwin_one_byte_bool = 1;
9c7956fd 1590#endif
d9168963
SS
1591 /* Default to natural alignment, for better performance. */
1592 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1593 }
1594
194c524a
DE
1595 /* Place FP constants in the constant pool instead of TOC
1596 if section anchors enabled. */
1597 if (flag_section_anchors)
1598 TARGET_NO_FP_IN_TOC = 1;
1599
c4501e62
JJ
1600 /* Handle -mtls-size option. */
1601 rs6000_parse_tls_size_option ();
1602
a7ae18e2
AH
1603#ifdef SUBTARGET_OVERRIDE_OPTIONS
1604 SUBTARGET_OVERRIDE_OPTIONS;
1605#endif
1606#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1607 SUBSUBTARGET_OVERRIDE_OPTIONS;
1608#endif
4d4cbc0e
AH
1609#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1610 SUB3TARGET_OVERRIDE_OPTIONS;
1611#endif
a7ae18e2 1612
5da702b1
AH
1613 if (TARGET_E500)
1614 {
1615 /* The e500 does not have string instructions, and we set
1616 MASK_STRING above when optimizing for size. */
1617 if ((target_flags & MASK_STRING) != 0)
1618 target_flags = target_flags & ~MASK_STRING;
1619 }
1620 else if (rs6000_select[1].string != NULL)
1621 {
1622 /* For the powerpc-eabispe configuration, we set all these by
1623 default, so let's unset them if we manually set another
1624 CPU that is not the E500. */
78f5898b 1625 if (!rs6000_explicit_options.abi)
5da702b1 1626 rs6000_spe_abi = 0;
78f5898b 1627 if (!rs6000_explicit_options.spe)
5da702b1 1628 rs6000_spe = 0;
78f5898b 1629 if (!rs6000_explicit_options.float_gprs)
5da702b1 1630 rs6000_float_gprs = 0;
78f5898b 1631 if (!rs6000_explicit_options.isel)
5da702b1
AH
1632 rs6000_isel = 0;
1633 }
b5044283 1634
eca0d5e8
JM
1635 /* Detect invalid option combinations with E500. */
1636 CHECK_E500_OPTIONS;
1637
ec507f2d 1638 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1639 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1640 && rs6000_cpu != PROCESSOR_POWER6
1641 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1642 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1643 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1644 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1645 || rs6000_cpu == PROCESSOR_POWER5
1646 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1647
ec507f2d
DE
1648 rs6000_sched_restricted_insns_priority
1649 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1650
569fa502 1651 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1652 rs6000_sched_costly_dep
1653 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1654
569fa502
DN
1655 if (rs6000_sched_costly_dep_str)
1656 {
f676971a 1657 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1658 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1659 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1660 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1661 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1662 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1663 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1664 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1665 else
c4ad648e 1666 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1667 }
1668
1669 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1670 rs6000_sched_insert_nops
1671 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1672
cbe26ab8
DN
1673 if (rs6000_sched_insert_nops_str)
1674 {
1675 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1676 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1677 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1678 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1679 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1680 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1681 else
c4ad648e 1682 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1683 }
1684
c81bebd7 1685#ifdef TARGET_REGNAMES
a4f6c312
SS
1686 /* If the user desires alternate register names, copy in the
1687 alternate names now. */
c81bebd7 1688 if (TARGET_REGNAMES)
4e135bdd 1689 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1690#endif
1691
df01da37 1692 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1693 If -maix-struct-return or -msvr4-struct-return was explicitly
1694 used, don't override with the ABI default. */
df01da37
DE
1695 if (!rs6000_explicit_options.aix_struct_ret)
1696 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1697
602ea4d3 1698 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1699 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1700
f676971a 1701 if (TARGET_TOC)
9ebbca7d 1702 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1703
301d03af
RS
1704 /* We can only guarantee the availability of DI pseudo-ops when
1705 assembling for 64-bit targets. */
ae6c1efd 1706 if (!TARGET_64BIT)
301d03af
RS
1707 {
1708 targetm.asm_out.aligned_op.di = NULL;
1709 targetm.asm_out.unaligned_op.di = NULL;
1710 }
1711
1494c534
DE
1712 /* Set branch target alignment, if not optimizing for size. */
1713 if (!optimize_size)
1714 {
d296e02e
AP
1715 /* Cell wants to be aligned 8byte for dual issue. */
1716 if (rs6000_cpu == PROCESSOR_CELL)
1717 {
1718 if (align_functions <= 0)
1719 align_functions = 8;
1720 if (align_jumps <= 0)
1721 align_jumps = 8;
1722 if (align_loops <= 0)
1723 align_loops = 8;
1724 }
44cd321e 1725 if (rs6000_align_branch_targets)
1494c534
DE
1726 {
1727 if (align_functions <= 0)
1728 align_functions = 16;
1729 if (align_jumps <= 0)
1730 align_jumps = 16;
1731 if (align_loops <= 0)
1732 align_loops = 16;
1733 }
1734 if (align_jumps_max_skip <= 0)
1735 align_jumps_max_skip = 15;
1736 if (align_loops_max_skip <= 0)
1737 align_loops_max_skip = 15;
1738 }
2792d578 1739
71f123ca
FS
1740 /* Arrange to save and restore machine status around nested functions. */
1741 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1742
1743 /* We should always be splitting complex arguments, but we can't break
1744 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1745 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1746 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1747
1748 /* Initialize rs6000_cost with the appropriate target costs. */
1749 if (optimize_size)
1750 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1751 else
1752 switch (rs6000_cpu)
1753 {
1754 case PROCESSOR_RIOS1:
1755 rs6000_cost = &rios1_cost;
1756 break;
1757
1758 case PROCESSOR_RIOS2:
1759 rs6000_cost = &rios2_cost;
1760 break;
1761
1762 case PROCESSOR_RS64A:
1763 rs6000_cost = &rs64a_cost;
1764 break;
1765
1766 case PROCESSOR_MPCCORE:
1767 rs6000_cost = &mpccore_cost;
1768 break;
1769
1770 case PROCESSOR_PPC403:
1771 rs6000_cost = &ppc403_cost;
1772 break;
1773
1774 case PROCESSOR_PPC405:
1775 rs6000_cost = &ppc405_cost;
1776 break;
1777
1778 case PROCESSOR_PPC440:
1779 rs6000_cost = &ppc440_cost;
1780 break;
1781
1782 case PROCESSOR_PPC601:
1783 rs6000_cost = &ppc601_cost;
1784 break;
1785
1786 case PROCESSOR_PPC603:
1787 rs6000_cost = &ppc603_cost;
1788 break;
1789
1790 case PROCESSOR_PPC604:
1791 rs6000_cost = &ppc604_cost;
1792 break;
1793
1794 case PROCESSOR_PPC604e:
1795 rs6000_cost = &ppc604e_cost;
1796 break;
1797
1798 case PROCESSOR_PPC620:
8b897cfa
RS
1799 rs6000_cost = &ppc620_cost;
1800 break;
1801
f0517163
RS
1802 case PROCESSOR_PPC630:
1803 rs6000_cost = &ppc630_cost;
1804 break;
1805
982afe02 1806 case PROCESSOR_CELL:
d296e02e
AP
1807 rs6000_cost = &ppccell_cost;
1808 break;
1809
8b897cfa
RS
1810 case PROCESSOR_PPC750:
1811 case PROCESSOR_PPC7400:
1812 rs6000_cost = &ppc750_cost;
1813 break;
1814
1815 case PROCESSOR_PPC7450:
1816 rs6000_cost = &ppc7450_cost;
1817 break;
1818
1819 case PROCESSOR_PPC8540:
1820 rs6000_cost = &ppc8540_cost;
1821 break;
1822
1823 case PROCESSOR_POWER4:
1824 case PROCESSOR_POWER5:
1825 rs6000_cost = &power4_cost;
1826 break;
1827
44cd321e
PS
1828 case PROCESSOR_POWER6:
1829 rs6000_cost = &power6_cost;
1830 break;
1831
8b897cfa 1832 default:
37409796 1833 gcc_unreachable ();
8b897cfa 1834 }
0b11da67
DE
1835
1836 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1837 set_param_value ("simultaneous-prefetches",
1838 rs6000_cost->simultaneous_prefetches);
1839 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1840 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1841 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1842 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1843 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1844 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
5248c961 1845}
5accd822 1846
7ccf35ed
DN
1847/* Implement targetm.vectorize.builtin_mask_for_load. */
1848static tree
1849rs6000_builtin_mask_for_load (void)
1850{
1851 if (TARGET_ALTIVEC)
1852 return altivec_builtin_mask_for_load;
1853 else
1854 return 0;
1855}
1856
f57d17f1
TM
1857/* Implement targetm.vectorize.builtin_conversion. */
1858static tree
1859rs6000_builtin_conversion (enum tree_code code, tree type)
1860{
1861 if (!TARGET_ALTIVEC)
1862 return NULL_TREE;
982afe02 1863
f57d17f1
TM
1864 switch (code)
1865 {
1866 case FLOAT_EXPR:
1867 switch (TYPE_MODE (type))
1868 {
1869 case V4SImode:
982afe02 1870 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1871 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1872 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1873 default:
1874 return NULL_TREE;
1875 }
1876 default:
1877 return NULL_TREE;
1878 }
1879}
1880
89d67cca
DN
1881/* Implement targetm.vectorize.builtin_mul_widen_even. */
1882static tree
1883rs6000_builtin_mul_widen_even (tree type)
1884{
1885 if (!TARGET_ALTIVEC)
1886 return NULL_TREE;
1887
1888 switch (TYPE_MODE (type))
1889 {
1890 case V8HImode:
982afe02 1891 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1892 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1893 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1894
1895 case V16QImode:
1896 return TYPE_UNSIGNED (type) ?
1897 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1898 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1899 default:
1900 return NULL_TREE;
1901 }
1902}
1903
1904/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1905static tree
1906rs6000_builtin_mul_widen_odd (tree type)
1907{
1908 if (!TARGET_ALTIVEC)
1909 return NULL_TREE;
1910
1911 switch (TYPE_MODE (type))
1912 {
1913 case V8HImode:
1914 return TYPE_UNSIGNED (type) ?
1915 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1916 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1917
1918 case V16QImode:
1919 return TYPE_UNSIGNED (type) ?
1920 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1921 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1922 default:
1923 return NULL_TREE;
1924 }
1925}
1926
5b900a4c
DN
1927
1928/* Return true iff, data reference of TYPE can reach vector alignment (16)
1929 after applying N number of iterations. This routine does not determine
1930 how may iterations are required to reach desired alignment. */
1931
1932static bool
3101faab 1933rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1934{
1935 if (is_packed)
1936 return false;
1937
1938 if (TARGET_32BIT)
1939 {
1940 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1941 return true;
1942
1943 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1944 return true;
1945
1946 return false;
1947 }
1948 else
1949 {
1950 if (TARGET_MACHO)
1951 return false;
1952
1953 /* Assuming that all other types are naturally aligned. CHECKME! */
1954 return true;
1955 }
1956}
1957
5da702b1
AH
1958/* Handle generic options of the form -mfoo=yes/no.
1959 NAME is the option name.
1960 VALUE is the option value.
1961 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1962 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1963static void
5da702b1 1964rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1965{
5da702b1 1966 if (value == 0)
993f19a8 1967 return;
5da702b1
AH
1968 else if (!strcmp (value, "yes"))
1969 *flag = 1;
1970 else if (!strcmp (value, "no"))
1971 *flag = 0;
08b57fb3 1972 else
5da702b1 1973 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1974}
1975
c4501e62
JJ
1976/* Validate and record the size specified with the -mtls-size option. */
1977
1978static void
863d938c 1979rs6000_parse_tls_size_option (void)
c4501e62
JJ
1980{
1981 if (rs6000_tls_size_string == 0)
1982 return;
1983 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1984 rs6000_tls_size = 16;
1985 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1986 rs6000_tls_size = 32;
1987 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1988 rs6000_tls_size = 64;
1989 else
9e637a26 1990 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1991}
1992
5accd822 1993void
a2369ed3 1994optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1995{
2e3f0db6
DJ
1996 if (DEFAULT_ABI == ABI_DARWIN)
1997 /* The Darwin libraries never set errno, so we might as well
1998 avoid calling them when that's the only reason we would. */
1999 flag_errno_math = 0;
59d6560b
DE
2000
2001 /* Double growth factor to counter reduced min jump length. */
2002 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2003
2004 /* Enable section anchors by default.
2005 Skip section anchors for Objective C and Objective C++
2006 until front-ends fixed. */
23f99493 2007 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2008 flag_section_anchors = 1;
5accd822 2009}
78f5898b
AH
2010
2011/* Implement TARGET_HANDLE_OPTION. */
2012
2013static bool
2014rs6000_handle_option (size_t code, const char *arg, int value)
2015{
2016 switch (code)
2017 {
2018 case OPT_mno_power:
2019 target_flags &= ~(MASK_POWER | MASK_POWER2
2020 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2021 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2022 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2023 break;
2024 case OPT_mno_powerpc:
2025 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2026 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2027 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2028 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2029 break;
2030 case OPT_mfull_toc:
d2894ab5
DE
2031 target_flags &= ~MASK_MINIMAL_TOC;
2032 TARGET_NO_FP_IN_TOC = 0;
2033 TARGET_NO_SUM_IN_TOC = 0;
2034 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2035#ifdef TARGET_USES_SYSV4_OPT
2036 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2037 just the same as -mminimal-toc. */
2038 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2039 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2040#endif
2041 break;
2042
2043#ifdef TARGET_USES_SYSV4_OPT
2044 case OPT_mtoc:
2045 /* Make -mtoc behave like -mminimal-toc. */
2046 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2047 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2048 break;
2049#endif
2050
2051#ifdef TARGET_USES_AIX64_OPT
2052 case OPT_maix64:
2053#else
2054 case OPT_m64:
2055#endif
2c9c9afd
AM
2056 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2057 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2058 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2059 break;
2060
2061#ifdef TARGET_USES_AIX64_OPT
2062 case OPT_maix32:
2063#else
2064 case OPT_m32:
2065#endif
2066 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2067 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2068 break;
2069
2070 case OPT_minsert_sched_nops_:
2071 rs6000_sched_insert_nops_str = arg;
2072 break;
2073
2074 case OPT_mminimal_toc:
2075 if (value == 1)
2076 {
d2894ab5
DE
2077 TARGET_NO_FP_IN_TOC = 0;
2078 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2079 }
2080 break;
2081
2082 case OPT_mpower:
2083 if (value == 1)
c2dba4ab
AH
2084 {
2085 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2086 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2087 }
78f5898b
AH
2088 break;
2089
2090 case OPT_mpower2:
2091 if (value == 1)
c2dba4ab
AH
2092 {
2093 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2094 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2095 }
78f5898b
AH
2096 break;
2097
2098 case OPT_mpowerpc_gpopt:
2099 case OPT_mpowerpc_gfxopt:
2100 if (value == 1)
c2dba4ab
AH
2101 {
2102 target_flags |= MASK_POWERPC;
2103 target_flags_explicit |= MASK_POWERPC;
2104 }
78f5898b
AH
2105 break;
2106
df01da37
DE
2107 case OPT_maix_struct_return:
2108 case OPT_msvr4_struct_return:
2109 rs6000_explicit_options.aix_struct_ret = true;
2110 break;
2111
78f5898b
AH
2112 case OPT_mvrsave_:
2113 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2114 break;
78f5898b
AH
2115
2116 case OPT_misel_:
2117 rs6000_explicit_options.isel = true;
2118 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2119 break;
2120
2121 case OPT_mspe_:
2122 rs6000_explicit_options.spe = true;
2123 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2124 break;
2125
2126 case OPT_mdebug_:
2127 rs6000_debug_name = arg;
2128 break;
2129
2130#ifdef TARGET_USES_SYSV4_OPT
2131 case OPT_mcall_:
2132 rs6000_abi_name = arg;
2133 break;
2134
2135 case OPT_msdata_:
2136 rs6000_sdata_name = arg;
2137 break;
2138
2139 case OPT_mtls_size_:
2140 rs6000_tls_size_string = arg;
2141 break;
2142
2143 case OPT_mrelocatable:
2144 if (value == 1)
c2dba4ab 2145 {
e0bf274f
AM
2146 target_flags |= MASK_MINIMAL_TOC;
2147 target_flags_explicit |= MASK_MINIMAL_TOC;
2148 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2149 }
78f5898b
AH
2150 break;
2151
2152 case OPT_mrelocatable_lib:
2153 if (value == 1)
c2dba4ab 2154 {
e0bf274f
AM
2155 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2156 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2157 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2158 }
78f5898b 2159 else
c2dba4ab
AH
2160 {
2161 target_flags &= ~MASK_RELOCATABLE;
2162 target_flags_explicit |= MASK_RELOCATABLE;
2163 }
78f5898b
AH
2164 break;
2165#endif
2166
2167 case OPT_mabi_:
78f5898b
AH
2168 if (!strcmp (arg, "altivec"))
2169 {
d3603e8c 2170 rs6000_explicit_options.abi = true;
78f5898b
AH
2171 rs6000_altivec_abi = 1;
2172 rs6000_spe_abi = 0;
2173 }
2174 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2175 {
2176 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2177 the default for rs6000_spe_abi to be chosen later. */
2178 rs6000_altivec_abi = 0;
2179 }
78f5898b
AH
2180 else if (! strcmp (arg, "spe"))
2181 {
d3603e8c 2182 rs6000_explicit_options.abi = true;
78f5898b
AH
2183 rs6000_spe_abi = 1;
2184 rs6000_altivec_abi = 0;
2185 if (!TARGET_SPE_ABI)
2186 error ("not configured for ABI: '%s'", arg);
2187 }
2188 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2189 {
2190 rs6000_explicit_options.abi = true;
2191 rs6000_spe_abi = 0;
2192 }
78f5898b
AH
2193
2194 /* These are here for testing during development only, do not
2195 document in the manual please. */
2196 else if (! strcmp (arg, "d64"))
2197 {
2198 rs6000_darwin64_abi = 1;
2199 warning (0, "Using darwin64 ABI");
2200 }
2201 else if (! strcmp (arg, "d32"))
2202 {
2203 rs6000_darwin64_abi = 0;
2204 warning (0, "Using old darwin ABI");
2205 }
2206
602ea4d3
JJ
2207 else if (! strcmp (arg, "ibmlongdouble"))
2208 {
d3603e8c 2209 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2210 rs6000_ieeequad = 0;
2211 warning (0, "Using IBM extended precision long double");
2212 }
2213 else if (! strcmp (arg, "ieeelongdouble"))
2214 {
d3603e8c 2215 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2216 rs6000_ieeequad = 1;
2217 warning (0, "Using IEEE extended precision long double");
2218 }
2219
78f5898b
AH
2220 else
2221 {
2222 error ("unknown ABI specified: '%s'", arg);
2223 return false;
2224 }
2225 break;
2226
2227 case OPT_mcpu_:
2228 rs6000_select[1].string = arg;
2229 break;
2230
2231 case OPT_mtune_:
2232 rs6000_select[2].string = arg;
2233 break;
2234
2235 case OPT_mtraceback_:
2236 rs6000_traceback_name = arg;
2237 break;
2238
2239 case OPT_mfloat_gprs_:
2240 rs6000_explicit_options.float_gprs = true;
2241 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2242 rs6000_float_gprs = 1;
2243 else if (! strcmp (arg, "double"))
2244 rs6000_float_gprs = 2;
2245 else if (! strcmp (arg, "no"))
2246 rs6000_float_gprs = 0;
2247 else
2248 {
2249 error ("invalid option for -mfloat-gprs: '%s'", arg);
2250 return false;
2251 }
2252 break;
2253
2254 case OPT_mlong_double_:
2255 rs6000_explicit_options.long_double = true;
2256 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2257 if (value != 64 && value != 128)
2258 {
2259 error ("Unknown switch -mlong-double-%s", arg);
2260 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2261 return false;
2262 }
2263 else
2264 rs6000_long_double_type_size = value;
2265 break;
2266
2267 case OPT_msched_costly_dep_:
2268 rs6000_sched_costly_dep_str = arg;
2269 break;
2270
2271 case OPT_malign_:
2272 rs6000_explicit_options.alignment = true;
2273 if (! strcmp (arg, "power"))
2274 {
2275 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2276 some C library functions, so warn about it. The flag may be
2277 useful for performance studies from time to time though, so
2278 don't disable it entirely. */
2279 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2280 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2281 " it is incompatible with the installed C and C++ libraries");
2282 rs6000_alignment_flags = MASK_ALIGN_POWER;
2283 }
2284 else if (! strcmp (arg, "natural"))
2285 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2286 else
2287 {
2288 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2289 return false;
2290 }
2291 break;
2292 }
2293 return true;
2294}
3cfa4909
MM
2295\f
2296/* Do anything needed at the start of the asm file. */
2297
1bc7c5b6 2298static void
863d938c 2299rs6000_file_start (void)
3cfa4909 2300{
c4d38ccb 2301 size_t i;
3cfa4909 2302 char buffer[80];
d330fd93 2303 const char *start = buffer;
3cfa4909 2304 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2305 const char *default_cpu = TARGET_CPU_DEFAULT;
2306 FILE *file = asm_out_file;
2307
2308 default_file_start ();
2309
2310#ifdef TARGET_BI_ARCH
2311 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2312 default_cpu = 0;
2313#endif
3cfa4909
MM
2314
2315 if (flag_verbose_asm)
2316 {
2317 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2318 rs6000_select[0].string = default_cpu;
2319
b6a1cbae 2320 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2321 {
2322 ptr = &rs6000_select[i];
2323 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2324 {
2325 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2326 start = "";
2327 }
2328 }
2329
9c6b4ed9 2330 if (PPC405_ERRATUM77)
b0bfee6e 2331 {
9c6b4ed9 2332 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2333 start = "";
2334 }
b0bfee6e 2335
b91da81f 2336#ifdef USING_ELFOS_H
3cfa4909
MM
2337 switch (rs6000_sdata)
2338 {
2339 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2340 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2341 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2342 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2343 }
2344
2345 if (rs6000_sdata && g_switch_value)
2346 {
307b599c
MK
2347 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2348 g_switch_value);
3cfa4909
MM
2349 start = "";
2350 }
2351#endif
2352
2353 if (*start == '\0')
949ea356 2354 putc ('\n', file);
3cfa4909 2355 }
b723e82f 2356
e51917ae
JM
2357#ifdef HAVE_AS_GNU_ATTRIBUTE
2358 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2359 {
2360 fprintf (file, "\t.gnu_attribute 4, %d\n",
2361 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2362 fprintf (file, "\t.gnu_attribute 8, %d\n",
2363 (TARGET_ALTIVEC_ABI ? 2
2364 : TARGET_SPE_ABI ? 3
2365 : 1));
2366 }
e51917ae
JM
2367#endif
2368
b723e82f
JJ
2369 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2370 {
d6b5193b
RS
2371 switch_to_section (toc_section);
2372 switch_to_section (text_section);
b723e82f 2373 }
3cfa4909 2374}
c4e18b1c 2375
5248c961 2376\f
a0ab749a 2377/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2378
2379int
863d938c 2380direct_return (void)
9878760c 2381{
4697a36c
MM
2382 if (reload_completed)
2383 {
2384 rs6000_stack_t *info = rs6000_stack_info ();
2385
2386 if (info->first_gp_reg_save == 32
2387 && info->first_fp_reg_save == 64
00b960c7 2388 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2389 && ! info->lr_save_p
2390 && ! info->cr_save_p
00b960c7 2391 && info->vrsave_mask == 0
c81fc13e 2392 && ! info->push_p)
4697a36c
MM
2393 return 1;
2394 }
2395
2396 return 0;
9878760c
RK
2397}
2398
4e74d8ec
MM
2399/* Return the number of instructions it takes to form a constant in an
2400 integer register. */
2401
48d72335 2402int
a2369ed3 2403num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2404{
2405 /* signed constant loadable with {cal|addi} */
547b216d 2406 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2407 return 1;
2408
4e74d8ec 2409 /* constant loadable with {cau|addis} */
547b216d
DE
2410 else if ((value & 0xffff) == 0
2411 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2412 return 1;
2413
5f59ecb7 2414#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2415 else if (TARGET_POWERPC64)
4e74d8ec 2416 {
a65c591c
DE
2417 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2418 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2419
a65c591c 2420 if (high == 0 || high == -1)
4e74d8ec
MM
2421 return 2;
2422
a65c591c 2423 high >>= 1;
4e74d8ec 2424
a65c591c 2425 if (low == 0)
4e74d8ec 2426 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2427 else
2428 return (num_insns_constant_wide (high)
e396202a 2429 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2430 }
2431#endif
2432
2433 else
2434 return 2;
2435}
2436
2437int
a2369ed3 2438num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2439{
37409796 2440 HOST_WIDE_INT low, high;
bb8df8a6 2441
37409796 2442 switch (GET_CODE (op))
0d30d435 2443 {
37409796 2444 case CONST_INT:
0d30d435 2445#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2446 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2447 && mask64_operand (op, mode))
c4ad648e 2448 return 2;
0d30d435
DE
2449 else
2450#endif
2451 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2452
37409796
NS
2453 case CONST_DOUBLE:
2454 if (mode == SFmode)
2455 {
2456 long l;
2457 REAL_VALUE_TYPE rv;
bb8df8a6 2458
37409796
NS
2459 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2460 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2461 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2462 }
a260abc9 2463
37409796
NS
2464 if (mode == VOIDmode || mode == DImode)
2465 {
2466 high = CONST_DOUBLE_HIGH (op);
2467 low = CONST_DOUBLE_LOW (op);
2468 }
2469 else
2470 {
2471 long l[2];
2472 REAL_VALUE_TYPE rv;
bb8df8a6 2473
37409796 2474 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2475 if (DECIMAL_FLOAT_MODE_P (mode))
2476 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2477 else
2478 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2479 high = l[WORDS_BIG_ENDIAN == 0];
2480 low = l[WORDS_BIG_ENDIAN != 0];
2481 }
47ad8c61 2482
37409796
NS
2483 if (TARGET_32BIT)
2484 return (num_insns_constant_wide (low)
2485 + num_insns_constant_wide (high));
2486 else
2487 {
2488 if ((high == 0 && low >= 0)
2489 || (high == -1 && low < 0))
2490 return num_insns_constant_wide (low);
bb8df8a6 2491
1990cd79 2492 else if (mask64_operand (op, mode))
37409796 2493 return 2;
bb8df8a6 2494
37409796
NS
2495 else if (low == 0)
2496 return num_insns_constant_wide (high) + 1;
bb8df8a6 2497
37409796
NS
2498 else
2499 return (num_insns_constant_wide (high)
2500 + num_insns_constant_wide (low) + 1);
2501 }
bb8df8a6 2502
37409796
NS
2503 default:
2504 gcc_unreachable ();
4e74d8ec 2505 }
4e74d8ec
MM
2506}
2507
0972012c
RS
2508/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2509 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2510 corresponding element of the vector, but for V4SFmode and V2SFmode,
2511 the corresponding "float" is interpreted as an SImode integer. */
2512
2513static HOST_WIDE_INT
2514const_vector_elt_as_int (rtx op, unsigned int elt)
2515{
2516 rtx tmp = CONST_VECTOR_ELT (op, elt);
2517 if (GET_MODE (op) == V4SFmode
2518 || GET_MODE (op) == V2SFmode)
2519 tmp = gen_lowpart (SImode, tmp);
2520 return INTVAL (tmp);
2521}
452a7d36 2522
77ccdfed 2523/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2524 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2525 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2526 all items are set to the same value and contain COPIES replicas of the
2527 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2528 operand and the others are set to the value of the operand's msb. */
2529
2530static bool
2531vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2532{
66180ff3
PB
2533 enum machine_mode mode = GET_MODE (op);
2534 enum machine_mode inner = GET_MODE_INNER (mode);
2535
2536 unsigned i;
2537 unsigned nunits = GET_MODE_NUNITS (mode);
2538 unsigned bitsize = GET_MODE_BITSIZE (inner);
2539 unsigned mask = GET_MODE_MASK (inner);
2540
0972012c 2541 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2542 HOST_WIDE_INT splat_val = val;
2543 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2544
2545 /* Construct the value to be splatted, if possible. If not, return 0. */
2546 for (i = 2; i <= copies; i *= 2)
452a7d36 2547 {
66180ff3
PB
2548 HOST_WIDE_INT small_val;
2549 bitsize /= 2;
2550 small_val = splat_val >> bitsize;
2551 mask >>= bitsize;
2552 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2553 return false;
2554 splat_val = small_val;
2555 }
c4ad648e 2556
66180ff3
PB
2557 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2558 if (EASY_VECTOR_15 (splat_val))
2559 ;
2560
2561 /* Also check if we can splat, and then add the result to itself. Do so if
2562 the value is positive, of if the splat instruction is using OP's mode;
2563 for splat_val < 0, the splat and the add should use the same mode. */
2564 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2565 && (splat_val >= 0 || (step == 1 && copies == 1)))
2566 ;
2567
2568 else
2569 return false;
2570
2571 /* Check if VAL is present in every STEP-th element, and the
2572 other elements are filled with its most significant bit. */
2573 for (i = 0; i < nunits - 1; ++i)
2574 {
2575 HOST_WIDE_INT desired_val;
2576 if (((i + 1) & (step - 1)) == 0)
2577 desired_val = val;
2578 else
2579 desired_val = msb_val;
2580
0972012c 2581 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2582 return false;
452a7d36 2583 }
66180ff3
PB
2584
2585 return true;
452a7d36
HP
2586}
2587
69ef87e2 2588
77ccdfed 2589/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2590 with a vspltisb, vspltish or vspltisw. */
2591
2592bool
2593easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2594{
66180ff3 2595 unsigned step, copies;
d744e06e 2596
66180ff3
PB
2597 if (mode == VOIDmode)
2598 mode = GET_MODE (op);
2599 else if (mode != GET_MODE (op))
2600 return false;
d744e06e 2601
66180ff3
PB
2602 /* Start with a vspltisw. */
2603 step = GET_MODE_NUNITS (mode) / 4;
2604 copies = 1;
2605
2606 if (vspltis_constant (op, step, copies))
2607 return true;
2608
2609 /* Then try with a vspltish. */
2610 if (step == 1)
2611 copies <<= 1;
2612 else
2613 step >>= 1;
2614
2615 if (vspltis_constant (op, step, copies))
2616 return true;
2617
2618 /* And finally a vspltisb. */
2619 if (step == 1)
2620 copies <<= 1;
2621 else
2622 step >>= 1;
2623
2624 if (vspltis_constant (op, step, copies))
2625 return true;
2626
2627 return false;
d744e06e
AH
2628}
2629
66180ff3
PB
2630/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2631 result is OP. Abort if it is not possible. */
d744e06e 2632
f676971a 2633rtx
66180ff3 2634gen_easy_altivec_constant (rtx op)
452a7d36 2635{
66180ff3
PB
2636 enum machine_mode mode = GET_MODE (op);
2637 int nunits = GET_MODE_NUNITS (mode);
2638 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2639 unsigned step = nunits / 4;
2640 unsigned copies = 1;
2641
2642 /* Start with a vspltisw. */
2643 if (vspltis_constant (op, step, copies))
2644 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2645
2646 /* Then try with a vspltish. */
2647 if (step == 1)
2648 copies <<= 1;
2649 else
2650 step >>= 1;
2651
2652 if (vspltis_constant (op, step, copies))
2653 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2654
2655 /* And finally a vspltisb. */
2656 if (step == 1)
2657 copies <<= 1;
2658 else
2659 step >>= 1;
2660
2661 if (vspltis_constant (op, step, copies))
2662 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2663
2664 gcc_unreachable ();
d744e06e
AH
2665}
2666
2667const char *
a2369ed3 2668output_vec_const_move (rtx *operands)
d744e06e
AH
2669{
2670 int cst, cst2;
2671 enum machine_mode mode;
2672 rtx dest, vec;
2673
2674 dest = operands[0];
2675 vec = operands[1];
d744e06e 2676 mode = GET_MODE (dest);
69ef87e2 2677
d744e06e
AH
2678 if (TARGET_ALTIVEC)
2679 {
66180ff3 2680 rtx splat_vec;
d744e06e
AH
2681 if (zero_constant (vec, mode))
2682 return "vxor %0,%0,%0";
37409796 2683
66180ff3
PB
2684 splat_vec = gen_easy_altivec_constant (vec);
2685 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2686 operands[1] = XEXP (splat_vec, 0);
2687 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2688 return "#";
bb8df8a6 2689
66180ff3 2690 switch (GET_MODE (splat_vec))
98ef3137 2691 {
37409796 2692 case V4SImode:
66180ff3 2693 return "vspltisw %0,%1";
c4ad648e 2694
37409796 2695 case V8HImode:
66180ff3 2696 return "vspltish %0,%1";
c4ad648e 2697
37409796 2698 case V16QImode:
66180ff3 2699 return "vspltisb %0,%1";
bb8df8a6 2700
37409796
NS
2701 default:
2702 gcc_unreachable ();
98ef3137 2703 }
69ef87e2
AH
2704 }
2705
37409796 2706 gcc_assert (TARGET_SPE);
bb8df8a6 2707
37409796
NS
2708 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2709 pattern of V1DI, V4HI, and V2SF.
2710
2711 FIXME: We should probably return # and add post reload
2712 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2713 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2714 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2715 operands[1] = CONST_VECTOR_ELT (vec, 0);
2716 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2717 if (cst == cst2)
2718 return "li %0,%1\n\tevmergelo %0,%0,%0";
2719 else
2720 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2721}
2722
f5027409
RE
2723/* Initialize TARGET of vector PAIRED to VALS. */
2724
2725void
2726paired_expand_vector_init (rtx target, rtx vals)
2727{
2728 enum machine_mode mode = GET_MODE (target);
2729 int n_elts = GET_MODE_NUNITS (mode);
2730 int n_var = 0;
2731 rtx x, new, tmp, constant_op, op1, op2;
2732 int i;
2733
2734 for (i = 0; i < n_elts; ++i)
2735 {
2736 x = XVECEXP (vals, 0, i);
2737 if (!CONSTANT_P (x))
2738 ++n_var;
2739 }
2740 if (n_var == 0)
2741 {
2742 /* Load from constant pool. */
2743 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2744 return;
2745 }
2746
2747 if (n_var == 2)
2748 {
2749 /* The vector is initialized only with non-constants. */
2750 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2751 XVECEXP (vals, 0, 1));
2752
2753 emit_move_insn (target, new);
2754 return;
2755 }
2756
2757 /* One field is non-constant and the other one is a constant. Load the
2758 constant from the constant pool and use ps_merge instruction to
2759 construct the whole vector. */
2760 op1 = XVECEXP (vals, 0, 0);
2761 op2 = XVECEXP (vals, 0, 1);
2762
2763 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2764
2765 tmp = gen_reg_rtx (GET_MODE (constant_op));
2766 emit_move_insn (tmp, constant_op);
2767
2768 if (CONSTANT_P (op1))
2769 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2770 else
2771 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2772
2773 emit_move_insn (target, new);
2774}
2775
e2e95f45
RE
2776void
2777paired_expand_vector_move (rtx operands[])
2778{
2779 rtx op0 = operands[0], op1 = operands[1];
2780
2781 emit_move_insn (op0, op1);
2782}
2783
2784/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2785 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2786 operands for the relation operation COND. This is a recursive
2787 function. */
2788
2789static void
2790paired_emit_vector_compare (enum rtx_code rcode,
2791 rtx dest, rtx op0, rtx op1,
2792 rtx cc_op0, rtx cc_op1)
2793{
2794 rtx tmp = gen_reg_rtx (V2SFmode);
2795 rtx tmp1, max, min, equal_zero;
2796
2797 gcc_assert (TARGET_PAIRED_FLOAT);
2798 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2799
2800 switch (rcode)
2801 {
2802 case LT:
2803 case LTU:
2804 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2805 return;
2806 case GE:
2807 case GEU:
2808 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2809 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2810 return;
2811 case LE:
2812 case LEU:
2813 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2814 return;
2815 case GT:
2816 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2817 return;
2818 case EQ:
2819 tmp1 = gen_reg_rtx (V2SFmode);
2820 max = gen_reg_rtx (V2SFmode);
2821 min = gen_reg_rtx (V2SFmode);
2822 equal_zero = gen_reg_rtx (V2SFmode);
2823
2824 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2825 emit_insn (gen_selv2sf4
2826 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2827 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2828 emit_insn (gen_selv2sf4
2829 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2830 emit_insn (gen_subv2sf3 (tmp1, min, max));
2831 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2832 return;
2833 case NE:
2834 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2835 return;
2836 case UNLE:
2837 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2838 return;
2839 case UNLT:
2840 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2841 return;
2842 case UNGE:
2843 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2844 return;
2845 case UNGT:
2846 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2847 return;
2848 default:
2849 gcc_unreachable ();
2850 }
2851
2852 return;
2853}
2854
2855/* Emit vector conditional expression.
2856 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2857 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2858
2859int
2860paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2861 rtx cond, rtx cc_op0, rtx cc_op1)
2862{
2863 enum rtx_code rcode = GET_CODE (cond);
2864
2865 if (!TARGET_PAIRED_FLOAT)
2866 return 0;
2867
2868 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2869
2870 return 1;
2871}
2872
7a4eca66
DE
2873/* Initialize vector TARGET to VALS. */
2874
2875void
2876rs6000_expand_vector_init (rtx target, rtx vals)
2877{
2878 enum machine_mode mode = GET_MODE (target);
2879 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2880 int n_elts = GET_MODE_NUNITS (mode);
2881 int n_var = 0, one_var = -1;
2882 bool all_same = true, all_const_zero = true;
2883 rtx x, mem;
2884 int i;
2885
2886 for (i = 0; i < n_elts; ++i)
2887 {
2888 x = XVECEXP (vals, 0, i);
2889 if (!CONSTANT_P (x))
2890 ++n_var, one_var = i;
2891 else if (x != CONST0_RTX (inner_mode))
2892 all_const_zero = false;
2893
2894 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2895 all_same = false;
2896 }
2897
2898 if (n_var == 0)
2899 {
2900 if (mode != V4SFmode && all_const_zero)
2901 {
2902 /* Zero register. */
2903 emit_insn (gen_rtx_SET (VOIDmode, target,
2904 gen_rtx_XOR (mode, target, target)));
2905 return;
2906 }
66180ff3 2907 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2908 {
2909 /* Splat immediate. */
66180ff3 2910 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2911 return;
2912 }
2913 else if (all_same)
2914 ; /* Splat vector element. */
2915 else
2916 {
2917 /* Load from constant pool. */
2918 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2919 return;
2920 }
2921 }
2922
2923 /* Store value to stack temp. Load vector element. Splat. */
2924 if (all_same)
2925 {
2926 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2927 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2928 XVECEXP (vals, 0, 0));
2929 x = gen_rtx_UNSPEC (VOIDmode,
2930 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2931 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2932 gen_rtvec (2,
2933 gen_rtx_SET (VOIDmode,
2934 target, mem),
2935 x)));
2936 x = gen_rtx_VEC_SELECT (inner_mode, target,
2937 gen_rtx_PARALLEL (VOIDmode,
2938 gen_rtvec (1, const0_rtx)));
2939 emit_insn (gen_rtx_SET (VOIDmode, target,
2940 gen_rtx_VEC_DUPLICATE (mode, x)));
2941 return;
2942 }
2943
2944 /* One field is non-constant. Load constant then overwrite
2945 varying field. */
2946 if (n_var == 1)
2947 {
2948 rtx copy = copy_rtx (vals);
2949
57b51d4d 2950 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2951 varying element. */
2952 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2953 rs6000_expand_vector_init (target, copy);
2954
2955 /* Insert variable. */
2956 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2957 return;
2958 }
2959
2960 /* Construct the vector in memory one field at a time
2961 and load the whole vector. */
2962 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2963 for (i = 0; i < n_elts; i++)
2964 emit_move_insn (adjust_address_nv (mem, inner_mode,
2965 i * GET_MODE_SIZE (inner_mode)),
2966 XVECEXP (vals, 0, i));
2967 emit_move_insn (target, mem);
2968}
2969
2970/* Set field ELT of TARGET to VAL. */
2971
2972void
2973rs6000_expand_vector_set (rtx target, rtx val, int elt)
2974{
2975 enum machine_mode mode = GET_MODE (target);
2976 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2977 rtx reg = gen_reg_rtx (mode);
2978 rtx mask, mem, x;
2979 int width = GET_MODE_SIZE (inner_mode);
2980 int i;
2981
2982 /* Load single variable value. */
2983 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2984 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2985 x = gen_rtx_UNSPEC (VOIDmode,
2986 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2987 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2988 gen_rtvec (2,
2989 gen_rtx_SET (VOIDmode,
2990 reg, mem),
2991 x)));
2992
2993 /* Linear sequence. */
2994 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2995 for (i = 0; i < 16; ++i)
2996 XVECEXP (mask, 0, i) = GEN_INT (i);
2997
2998 /* Set permute mask to insert element into target. */
2999 for (i = 0; i < width; ++i)
3000 XVECEXP (mask, 0, elt*width + i)
3001 = GEN_INT (i + 0x10);
3002 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3003 x = gen_rtx_UNSPEC (mode,
3004 gen_rtvec (3, target, reg,
3005 force_reg (V16QImode, x)),
3006 UNSPEC_VPERM);
3007 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3008}
3009
3010/* Extract field ELT from VEC into TARGET. */
3011
3012void
3013rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3014{
3015 enum machine_mode mode = GET_MODE (vec);
3016 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3017 rtx mem, x;
3018
3019 /* Allocate mode-sized buffer. */
3020 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3021
3022 /* Add offset to field within buffer matching vector element. */
3023 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3024
3025 /* Store single field into mode-sized buffer. */
3026 x = gen_rtx_UNSPEC (VOIDmode,
3027 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3028 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3029 gen_rtvec (2,
3030 gen_rtx_SET (VOIDmode,
3031 mem, vec),
3032 x)));
3033 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3034}
3035
0ba1b2ff
AM
3036/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3037 implement ANDing by the mask IN. */
3038void
a2369ed3 3039build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3040{
3041#if HOST_BITS_PER_WIDE_INT >= 64
3042 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3043 int shift;
3044
37409796 3045 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3046
3047 c = INTVAL (in);
3048 if (c & 1)
3049 {
3050 /* Assume c initially something like 0x00fff000000fffff. The idea
3051 is to rotate the word so that the middle ^^^^^^ group of zeros
3052 is at the MS end and can be cleared with an rldicl mask. We then
3053 rotate back and clear off the MS ^^ group of zeros with a
3054 second rldicl. */
3055 c = ~c; /* c == 0xff000ffffff00000 */
3056 lsb = c & -c; /* lsb == 0x0000000000100000 */
3057 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3058 c = ~c; /* c == 0x00fff000000fffff */
3059 c &= -lsb; /* c == 0x00fff00000000000 */
3060 lsb = c & -c; /* lsb == 0x0000100000000000 */
3061 c = ~c; /* c == 0xff000fffffffffff */
3062 c &= -lsb; /* c == 0xff00000000000000 */
3063 shift = 0;
3064 while ((lsb >>= 1) != 0)
3065 shift++; /* shift == 44 on exit from loop */
3066 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3067 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3068 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3069 }
3070 else
0ba1b2ff
AM
3071 {
3072 /* Assume c initially something like 0xff000f0000000000. The idea
3073 is to rotate the word so that the ^^^ middle group of zeros
3074 is at the LS end and can be cleared with an rldicr mask. We then
3075 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3076 a second rldicr. */
3077 lsb = c & -c; /* lsb == 0x0000010000000000 */
3078 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3079 c = ~c; /* c == 0x00fff0ffffffffff */
3080 c &= -lsb; /* c == 0x00fff00000000000 */
3081 lsb = c & -c; /* lsb == 0x0000100000000000 */
3082 c = ~c; /* c == 0xff000fffffffffff */
3083 c &= -lsb; /* c == 0xff00000000000000 */
3084 shift = 0;
3085 while ((lsb >>= 1) != 0)
3086 shift++; /* shift == 44 on exit from loop */
3087 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3088 m1 >>= shift; /* m1 == 0x0000000000000fff */
3089 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3090 }
3091
3092 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3093 masks will be all 1's. We are guaranteed more than one transition. */
3094 out[0] = GEN_INT (64 - shift);
3095 out[1] = GEN_INT (m1);
3096 out[2] = GEN_INT (shift);
3097 out[3] = GEN_INT (m2);
3098#else
045572c7
GK
3099 (void)in;
3100 (void)out;
37409796 3101 gcc_unreachable ();
0ba1b2ff 3102#endif
a260abc9
DE
3103}
3104
54b695e7 3105/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3106
3107bool
54b695e7
AH
3108invalid_e500_subreg (rtx op, enum machine_mode mode)
3109{
61c76239
JM
3110 if (TARGET_E500_DOUBLE)
3111 {
17caeff2
JM
3112 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3113 subreg:TI and reg:TF. */
61c76239 3114 if (GET_CODE (op) == SUBREG
17caeff2 3115 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3116 && REG_P (SUBREG_REG (op))
17caeff2
JM
3117 && (GET_MODE (SUBREG_REG (op)) == DFmode
3118 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3119 return true;
3120
17caeff2
JM
3121 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3122 reg:TI. */
61c76239 3123 if (GET_CODE (op) == SUBREG
17caeff2 3124 && (mode == DFmode || mode == TFmode)
61c76239 3125 && REG_P (SUBREG_REG (op))
17caeff2
JM
3126 && (GET_MODE (SUBREG_REG (op)) == DImode
3127 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3128 return true;
3129 }
54b695e7 3130
61c76239
JM
3131 if (TARGET_SPE
3132 && GET_CODE (op) == SUBREG
3133 && mode == SImode
54b695e7 3134 && REG_P (SUBREG_REG (op))
14502dad 3135 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3136 return true;
3137
3138 return false;
3139}
3140
58182de3 3141/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3142 field is an FP double while the FP fields remain word aligned. */
3143
19d66194 3144unsigned int
fa5b0972
AM
3145rs6000_special_round_type_align (tree type, unsigned int computed,
3146 unsigned int specified)
95727fb8 3147{
fa5b0972 3148 unsigned int align = MAX (computed, specified);
95727fb8 3149 tree field = TYPE_FIELDS (type);
95727fb8 3150
bb8df8a6 3151 /* Skip all non field decls */
85962ac8 3152 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3153 field = TREE_CHAIN (field);
3154
fa5b0972
AM
3155 if (field != NULL && field != type)
3156 {
3157 type = TREE_TYPE (field);
3158 while (TREE_CODE (type) == ARRAY_TYPE)
3159 type = TREE_TYPE (type);
3160
3161 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3162 align = MAX (align, 64);
3163 }
95727fb8 3164
fa5b0972 3165 return align;
95727fb8
AP
3166}
3167
58182de3
GK
3168/* Darwin increases record alignment to the natural alignment of
3169 the first field. */
3170
3171unsigned int
3172darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3173 unsigned int specified)
3174{
3175 unsigned int align = MAX (computed, specified);
3176
3177 if (TYPE_PACKED (type))
3178 return align;
3179
3180 /* Find the first field, looking down into aggregates. */
3181 do {
3182 tree field = TYPE_FIELDS (type);
3183 /* Skip all non field decls */
3184 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3185 field = TREE_CHAIN (field);
3186 if (! field)
3187 break;
3188 type = TREE_TYPE (field);
3189 while (TREE_CODE (type) == ARRAY_TYPE)
3190 type = TREE_TYPE (type);
3191 } while (AGGREGATE_TYPE_P (type));
3192
3193 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3194 align = MAX (align, TYPE_ALIGN (type));
3195
3196 return align;
3197}
3198
a4f6c312 3199/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3200
3201int
f676971a 3202small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3203 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3204{
38c1f2d7 3205#if TARGET_ELF
5f59ecb7 3206 rtx sym_ref;
7509c759 3207
d9407988 3208 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3209 return 0;
a54d04b7 3210
f607bc57 3211 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3212 return 0;
3213
88228c4b
MM
3214 if (GET_CODE (op) == SYMBOL_REF)
3215 sym_ref = op;
3216
3217 else if (GET_CODE (op) != CONST
3218 || GET_CODE (XEXP (op, 0)) != PLUS
3219 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3220 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3221 return 0;
3222
88228c4b 3223 else
dbf55e53
MM
3224 {
3225 rtx sum = XEXP (op, 0);
3226 HOST_WIDE_INT summand;
3227
3228 /* We have to be careful here, because it is the referenced address
c4ad648e 3229 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3230 summand = INTVAL (XEXP (sum, 1));
307b599c 3231 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3232 return 0;
dbf55e53
MM
3233
3234 sym_ref = XEXP (sum, 0);
3235 }
88228c4b 3236
20bfcd69 3237 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3238#else
3239 return 0;
3240#endif
7509c759 3241}
46c07df8 3242
3a1f863f 3243/* Return true if either operand is a general purpose register. */
46c07df8 3244
3a1f863f
DE
3245bool
3246gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3247{
3a1f863f
DE
3248 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3249 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3250}
3251
9ebbca7d 3252\f
4d588c14
RH
3253/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3254
f676971a
EC
3255static int
3256constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3257{
9390387d 3258 switch (GET_CODE (op))
9ebbca7d
GK
3259 {
3260 case SYMBOL_REF:
c4501e62
JJ
3261 if (RS6000_SYMBOL_REF_TLS_P (op))
3262 return 0;
3263 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3264 {
3265 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3266 {
3267 *have_sym = 1;
3268 return 1;
3269 }
3270 else
3271 return 0;
3272 }
3273 else if (! strcmp (XSTR (op, 0), toc_label_name))
3274 {
3275 *have_toc = 1;
3276 return 1;
3277 }
3278 else
3279 return 0;
9ebbca7d
GK
3280 case PLUS:
3281 case MINUS:
c1f11548
DE
3282 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3283 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3284 case CONST:
a4f6c312 3285 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3286 case CONST_INT:
a4f6c312 3287 return 1;
9ebbca7d 3288 default:
a4f6c312 3289 return 0;
9ebbca7d
GK
3290 }
3291}
3292
4d588c14 3293static bool
a2369ed3 3294constant_pool_expr_p (rtx op)
9ebbca7d
GK
3295{
3296 int have_sym = 0;
3297 int have_toc = 0;
3298 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3299}
3300
48d72335 3301bool
a2369ed3 3302toc_relative_expr_p (rtx op)
9ebbca7d 3303{
4d588c14
RH
3304 int have_sym = 0;
3305 int have_toc = 0;
3306 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3307}
3308
4d588c14 3309bool
a2369ed3 3310legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3311{
3312 return (TARGET_TOC
3313 && GET_CODE (x) == PLUS
3314 && GET_CODE (XEXP (x, 0)) == REG
3315 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3316 && constant_pool_expr_p (XEXP (x, 1)));
3317}
3318
d04b6e6e
EB
3319static bool
3320legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3321{
3322 return (DEFAULT_ABI == ABI_V4
3323 && !flag_pic && !TARGET_TOC
3324 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3325 && small_data_operand (x, mode));
3326}
3327
60cdabab
DE
3328/* SPE offset addressing is limited to 5-bits worth of double words. */
3329#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3330
76d2b81d
DJ
3331bool
3332rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3333{
3334 unsigned HOST_WIDE_INT offset, extra;
3335
3336 if (GET_CODE (x) != PLUS)
3337 return false;
3338 if (GET_CODE (XEXP (x, 0)) != REG)
3339 return false;
3340 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3341 return false;
60cdabab
DE
3342 if (legitimate_constant_pool_address_p (x))
3343 return true;
4d588c14
RH
3344 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3345 return false;
3346
3347 offset = INTVAL (XEXP (x, 1));
3348 extra = 0;
3349 switch (mode)
3350 {
3351 case V16QImode:
3352 case V8HImode:
3353 case V4SFmode:
3354 case V4SImode:
7a4eca66
DE
3355 /* AltiVec vector modes. Only reg+reg addressing is valid and
3356 constant offset zero should not occur due to canonicalization.
3357 Allow any offset when not strict before reload. */
3358 return !strict;
4d588c14
RH
3359
3360 case V4HImode:
3361 case V2SImode:
3362 case V1DImode:
3363 case V2SFmode:
d42a3bae
RE
3364 /* Paired vector modes. Only reg+reg addressing is valid and
3365 constant offset zero should not occur due to canonicalization.
3366 Allow any offset when not strict before reload. */
3367 if (TARGET_PAIRED_FLOAT)
3368 return !strict;
4d588c14
RH
3369 /* SPE vector modes. */
3370 return SPE_CONST_OFFSET_OK (offset);
3371
3372 case DFmode:
7393f7f8 3373 case DDmode:
4d4cbc0e
AH
3374 if (TARGET_E500_DOUBLE)
3375 return SPE_CONST_OFFSET_OK (offset);
3376
4d588c14 3377 case DImode:
54b695e7
AH
3378 /* On e500v2, we may have:
3379
3380 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3381
3382 Which gets addressed with evldd instructions. */
3383 if (TARGET_E500_DOUBLE)
3384 return SPE_CONST_OFFSET_OK (offset);
3385
7393f7f8 3386 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3387 extra = 4;
3388 else if (offset & 3)
3389 return false;
3390 break;
3391
3392 case TFmode:
17caeff2
JM
3393 if (TARGET_E500_DOUBLE)
3394 return (SPE_CONST_OFFSET_OK (offset)
3395 && SPE_CONST_OFFSET_OK (offset + 8));
3396
4d588c14 3397 case TImode:
7393f7f8
BE
3398 case TDmode:
3399 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3400 extra = 12;
3401 else if (offset & 3)
3402 return false;
3403 else
3404 extra = 8;
3405 break;
3406
3407 default:
3408 break;
3409 }
3410
b1917422
AM
3411 offset += 0x8000;
3412 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3413}
3414
6fb5fa3c 3415bool
a2369ed3 3416legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3417{
3418 rtx op0, op1;
3419
3420 if (GET_CODE (x) != PLUS)
3421 return false;
850e8d3d 3422
4d588c14
RH
3423 op0 = XEXP (x, 0);
3424 op1 = XEXP (x, 1);
3425
bf00cc0f 3426 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3427 replaced with proper base and index regs. */
3428 if (!strict
3429 && reload_in_progress
3430 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3431 && REG_P (op1))
3432 return true;
3433
3434 return (REG_P (op0) && REG_P (op1)
3435 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3436 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3437 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3438 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3439}
3440
48d72335 3441inline bool
a2369ed3 3442legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3443{
3444 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3445}
3446
48d72335 3447bool
4c81e946
FJ
3448macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3449{
c4ad648e 3450 if (!TARGET_MACHO || !flag_pic
9390387d 3451 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3452 return false;
3453 x = XEXP (x, 0);
4c81e946
FJ
3454
3455 if (GET_CODE (x) != LO_SUM)
3456 return false;
3457 if (GET_CODE (XEXP (x, 0)) != REG)
3458 return false;
3459 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3460 return false;
3461 x = XEXP (x, 1);
3462
3463 return CONSTANT_P (x);
3464}
3465
4d588c14 3466static bool
a2369ed3 3467legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3468{
3469 if (GET_CODE (x) != LO_SUM)
3470 return false;
3471 if (GET_CODE (XEXP (x, 0)) != REG)
3472 return false;
3473 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3474 return false;
54b695e7 3475 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3476 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3477 || mode == DImode))
f82f556d 3478 return false;
4d588c14
RH
3479 x = XEXP (x, 1);
3480
8622e235 3481 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3482 {
a29077da 3483 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3484 return false;
3485 if (TARGET_TOC)
3486 return false;
3487 if (GET_MODE_NUNITS (mode) != 1)
3488 return false;
5e5f01b9 3489 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
3490 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
3491 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
3492 return false;
3493
3494 return CONSTANT_P (x);
3495 }
3496
3497 return false;
3498}
3499
3500
9ebbca7d
GK
3501/* Try machine-dependent ways of modifying an illegitimate address
3502 to be legitimate. If we find one, return the new, valid address.
3503 This is used from only one place: `memory_address' in explow.c.
3504
a4f6c312
SS
3505 OLDX is the address as it was before break_out_memory_refs was
3506 called. In some cases it is useful to look at this to decide what
3507 needs to be done.
9ebbca7d 3508
a4f6c312 3509 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3510
a4f6c312
SS
3511 It is always safe for this function to do nothing. It exists to
3512 recognize opportunities to optimize the output.
9ebbca7d
GK
3513
3514 On RS/6000, first check for the sum of a register with a constant
3515 integer that is out of range. If so, generate code to add the
3516 constant with the low-order 16 bits masked to the register and force
3517 this result into another register (this can be done with `cau').
3518 Then generate an address of REG+(CONST&0xffff), allowing for the
3519 possibility of bit 16 being a one.
3520
3521 Then check for the sum of a register and something not constant, try to
3522 load the other things into a register and return the sum. */
4d588c14 3523
9ebbca7d 3524rtx
a2369ed3
DJ
3525rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3526 enum machine_mode mode)
0ac081f6 3527{
c4501e62
JJ
3528 if (GET_CODE (x) == SYMBOL_REF)
3529 {
3530 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3531 if (model != 0)
3532 return rs6000_legitimize_tls_address (x, model);
3533 }
3534
f676971a 3535 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3536 && GET_CODE (XEXP (x, 0)) == REG
3537 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3538 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3539 && !(SPE_VECTOR_MODE (mode)
3540 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3541 || mode == DImode))))
f676971a 3542 {
9ebbca7d
GK
3543 HOST_WIDE_INT high_int, low_int;
3544 rtx sum;
a65c591c
DE
3545 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3546 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3547 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3548 GEN_INT (high_int)), 0);
3549 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3550 }
f676971a 3551 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3552 && GET_CODE (XEXP (x, 0)) == REG
3553 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3554 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3555 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3556 || TARGET_POWERPC64
7393f7f8
BE
3557 || (((mode != DImode && mode != DFmode && mode != DDmode)
3558 || TARGET_E500_DOUBLE)
3559 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3560 && (TARGET_POWERPC64 || mode != DImode)
3561 && mode != TImode)
3562 {
3563 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3564 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3565 }
0ac081f6
AH
3566 else if (ALTIVEC_VECTOR_MODE (mode))
3567 {
3568 rtx reg;
3569
3570 /* Make sure both operands are registers. */
3571 if (GET_CODE (x) == PLUS)
9f85ed45 3572 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3573 force_reg (Pmode, XEXP (x, 1)));
3574
3575 reg = force_reg (Pmode, x);
3576 return reg;
3577 }
4d4cbc0e 3578 else if (SPE_VECTOR_MODE (mode)
17caeff2 3579 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3580 || mode == DDmode || mode == TDmode
54b695e7 3581 || mode == DImode)))
a3170dc6 3582 {
54b695e7
AH
3583 if (mode == DImode)
3584 return NULL_RTX;
a3170dc6
AH
3585 /* We accept [reg + reg] and [reg + OFFSET]. */
3586
3587 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3588 {
3589 rtx op1 = XEXP (x, 0);
3590 rtx op2 = XEXP (x, 1);
a3170dc6 3591
c4ad648e 3592 op1 = force_reg (Pmode, op1);
a3170dc6 3593
c4ad648e
AM
3594 if (GET_CODE (op2) != REG
3595 && (GET_CODE (op2) != CONST_INT
3596 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3597 op2 = force_reg (Pmode, op2);
a3170dc6 3598
c4ad648e
AM
3599 return gen_rtx_PLUS (Pmode, op1, op2);
3600 }
a3170dc6
AH
3601
3602 return force_reg (Pmode, x);
3603 }
f1384257
AM
3604 else if (TARGET_ELF
3605 && TARGET_32BIT
3606 && TARGET_NO_TOC
3607 && ! flag_pic
9ebbca7d 3608 && GET_CODE (x) != CONST_INT
f676971a 3609 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3610 && CONSTANT_P (x)
6ac7bf2c
GK
3611 && GET_MODE_NUNITS (mode) == 1
3612 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 3613 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
3614 {
3615 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3616 emit_insn (gen_elf_high (reg, x));
3617 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3618 }
ee890fe2
SS
3619 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3620 && ! flag_pic
ab82a49f
AP
3621#if TARGET_MACHO
3622 && ! MACHO_DYNAMIC_NO_PIC_P
3623#endif
ee890fe2 3624 && GET_CODE (x) != CONST_INT
f676971a 3625 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3626 && CONSTANT_P (x)
f82f556d 3627 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 3628 && mode != DImode
ee890fe2
SS
3629 && mode != TImode)
3630 {
3631 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3632 emit_insn (gen_macho_high (reg, x));
3633 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3634 }
f676971a 3635 else if (TARGET_TOC
4d588c14 3636 && constant_pool_expr_p (x)
a9098fd0 3637 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3638 {
3639 return create_TOC_reference (x);
3640 }
3641 else
3642 return NULL_RTX;
3643}
258bfae2 3644
fdbe66f2 3645/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3646 We need to emit DTP-relative relocations. */
3647
fdbe66f2 3648static void
c973d557
JJ
3649rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3650{
3651 switch (size)
3652 {
3653 case 4:
3654 fputs ("\t.long\t", file);
3655 break;
3656 case 8:
3657 fputs (DOUBLE_INT_ASM_OP, file);
3658 break;
3659 default:
37409796 3660 gcc_unreachable ();
c973d557
JJ
3661 }
3662 output_addr_const (file, x);
3663 fputs ("@dtprel+0x8000", file);
3664}
3665
c4501e62
JJ
3666/* Construct the SYMBOL_REF for the tls_get_addr function. */
3667
3668static GTY(()) rtx rs6000_tls_symbol;
3669static rtx
863d938c 3670rs6000_tls_get_addr (void)
c4501e62
JJ
3671{
3672 if (!rs6000_tls_symbol)
3673 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3674
3675 return rs6000_tls_symbol;
3676}
3677
3678/* Construct the SYMBOL_REF for TLS GOT references. */
3679
3680static GTY(()) rtx rs6000_got_symbol;
3681static rtx
863d938c 3682rs6000_got_sym (void)
c4501e62
JJ
3683{
3684 if (!rs6000_got_symbol)
3685 {
3686 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3687 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3688 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3689 }
c4501e62
JJ
3690
3691 return rs6000_got_symbol;
3692}
3693
3694/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3695 this (thread-local) address. */
3696
3697static rtx
a2369ed3 3698rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3699{
3700 rtx dest, insn;
3701
3702 dest = gen_reg_rtx (Pmode);
3703 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3704 {
3705 rtx tlsreg;
3706
3707 if (TARGET_64BIT)
3708 {
3709 tlsreg = gen_rtx_REG (Pmode, 13);
3710 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3711 }
3712 else
3713 {
3714 tlsreg = gen_rtx_REG (Pmode, 2);
3715 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3716 }
3717 emit_insn (insn);
3718 }
3719 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3720 {
3721 rtx tlsreg, tmp;
3722
3723 tmp = gen_reg_rtx (Pmode);
3724 if (TARGET_64BIT)
3725 {
3726 tlsreg = gen_rtx_REG (Pmode, 13);
3727 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3728 }
3729 else
3730 {
3731 tlsreg = gen_rtx_REG (Pmode, 2);
3732 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3733 }
3734 emit_insn (insn);
3735 if (TARGET_64BIT)
3736 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3737 else
3738 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3739 emit_insn (insn);
3740 }
3741 else
3742 {
3743 rtx r3, got, tga, tmp1, tmp2, eqv;
3744
4fed8f8f
AM
3745 /* We currently use relocations like @got@tlsgd for tls, which
3746 means the linker will handle allocation of tls entries, placing
3747 them in the .got section. So use a pointer to the .got section,
3748 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3749 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3750 if (TARGET_64BIT)
972f427b 3751 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3752 else
3753 {
3754 if (flag_pic == 1)
3755 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3756 else
3757 {
3758 rtx gsym = rs6000_got_sym ();
3759 got = gen_reg_rtx (Pmode);
3760 if (flag_pic == 0)
3761 rs6000_emit_move (got, gsym, Pmode);
3762 else
3763 {
e65a3857 3764 rtx tmp3, mem;
c4501e62
JJ
3765 rtx first, last;
3766
c4501e62
JJ
3767 tmp1 = gen_reg_rtx (Pmode);
3768 tmp2 = gen_reg_rtx (Pmode);
3769 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3770 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3771
e65a3857
DE
3772 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3773 emit_move_insn (tmp1,
1de43f85 3774 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3775 emit_move_insn (tmp2, mem);
3776 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3777 last = emit_move_insn (got, tmp3);
bd94cb6e 3778 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3779 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3780 }
3781 }
3782 }
3783
3784 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3785 {
3786 r3 = gen_rtx_REG (Pmode, 3);
3787 if (TARGET_64BIT)
3788 insn = gen_tls_gd_64 (r3, got, addr);
3789 else
3790 insn = gen_tls_gd_32 (r3, got, addr);
3791 start_sequence ();
3792 emit_insn (insn);
3793 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3794 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3795 insn = emit_call_insn (insn);
3796 CONST_OR_PURE_CALL_P (insn) = 1;
3797 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3798 insn = get_insns ();
3799 end_sequence ();
3800 emit_libcall_block (insn, dest, r3, addr);
3801 }
3802 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3803 {
3804 r3 = gen_rtx_REG (Pmode, 3);
3805 if (TARGET_64BIT)
3806 insn = gen_tls_ld_64 (r3, got);
3807 else
3808 insn = gen_tls_ld_32 (r3, got);
3809 start_sequence ();
3810 emit_insn (insn);
3811 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3812 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3813 insn = emit_call_insn (insn);
3814 CONST_OR_PURE_CALL_P (insn) = 1;
3815 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3816 insn = get_insns ();
3817 end_sequence ();
3818 tmp1 = gen_reg_rtx (Pmode);
3819 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3820 UNSPEC_TLSLD);
3821 emit_libcall_block (insn, tmp1, r3, eqv);
3822 if (rs6000_tls_size == 16)
3823 {
3824 if (TARGET_64BIT)
3825 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3826 else
3827 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3828 }
3829 else if (rs6000_tls_size == 32)
3830 {
3831 tmp2 = gen_reg_rtx (Pmode);
3832 if (TARGET_64BIT)
3833 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3834 else
3835 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3836 emit_insn (insn);
3837 if (TARGET_64BIT)
3838 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3839 else
3840 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3841 }
3842 else
3843 {
3844 tmp2 = gen_reg_rtx (Pmode);
3845 if (TARGET_64BIT)
3846 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3847 else
3848 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3849 emit_insn (insn);
3850 insn = gen_rtx_SET (Pmode, dest,
3851 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3852 }
3853 emit_insn (insn);
3854 }
3855 else
3856 {
a7b376ee 3857 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3858 tmp2 = gen_reg_rtx (Pmode);
3859 if (TARGET_64BIT)
3860 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3861 else
3862 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3863 emit_insn (insn);
3864 if (TARGET_64BIT)
3865 insn = gen_tls_tls_64 (dest, tmp2, addr);
3866 else
3867 insn = gen_tls_tls_32 (dest, tmp2, addr);
3868 emit_insn (insn);
3869 }
3870 }
3871
3872 return dest;
3873}
3874
c4501e62
JJ
3875/* Return 1 if X contains a thread-local symbol. */
3876
3877bool
a2369ed3 3878rs6000_tls_referenced_p (rtx x)
c4501e62 3879{
cd413cab
AP
3880 if (! TARGET_HAVE_TLS)
3881 return false;
3882
c4501e62
JJ
3883 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3884}
3885
3886/* Return 1 if *X is a thread-local symbol. This is the same as
3887 rs6000_tls_symbol_ref except for the type of the unused argument. */
3888
9390387d 3889static int
a2369ed3 3890rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3891{
3892 return RS6000_SYMBOL_REF_TLS_P (*x);
3893}
3894
24ea750e
DJ
3895/* The convention appears to be to define this wherever it is used.
3896 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3897 is now used here. */
3898#ifndef REG_MODE_OK_FOR_BASE_P
3899#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3900#endif
3901
3902/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3903 replace the input X, or the original X if no replacement is called for.
3904 The output parameter *WIN is 1 if the calling macro should goto WIN,
3905 0 if it should not.
3906
3907 For RS/6000, we wish to handle large displacements off a base
3908 register by splitting the addend across an addiu/addis and the mem insn.
3909 This cuts number of extra insns needed from 3 to 1.
3910
3911 On Darwin, we use this to generate code for floating point constants.
3912 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3913 The Darwin code is inside #if TARGET_MACHO because only then is
3914 machopic_function_base_name() defined. */
3915rtx
f676971a 3916rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3917 int opnum, int type,
3918 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3919{
f676971a 3920 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3921 if (GET_CODE (x) == PLUS
3922 && GET_CODE (XEXP (x, 0)) == PLUS
3923 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3924 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3925 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3926 {
3927 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3928 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3929 opnum, (enum reload_type)type);
24ea750e
DJ
3930 *win = 1;
3931 return x;
3932 }
3deb2758 3933
24ea750e
DJ
3934#if TARGET_MACHO
3935 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3936 && GET_CODE (x) == LO_SUM
3937 && GET_CODE (XEXP (x, 0)) == PLUS
3938 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3939 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3940 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3941 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3942 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3943 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3944 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3945 {
3946 /* Result of previous invocation of this function on Darwin
6f317ef3 3947 floating point constant. */
24ea750e 3948 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3949 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3950 opnum, (enum reload_type)type);
24ea750e
DJ
3951 *win = 1;
3952 return x;
3953 }
3954#endif
4937d02d
DE
3955
3956 /* Force ld/std non-word aligned offset into base register by wrapping
3957 in offset 0. */
3958 if (GET_CODE (x) == PLUS
3959 && GET_CODE (XEXP (x, 0)) == REG
3960 && REGNO (XEXP (x, 0)) < 32
3961 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3962 && GET_CODE (XEXP (x, 1)) == CONST_INT
3963 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3964 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3965 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3966 && TARGET_POWERPC64)
3967 {
3968 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3969 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3970 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3971 opnum, (enum reload_type) type);
3972 *win = 1;
3973 return x;
3974 }
3975
24ea750e
DJ
3976 if (GET_CODE (x) == PLUS
3977 && GET_CODE (XEXP (x, 0)) == REG
3978 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3979 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3980 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3981 && !SPE_VECTOR_MODE (mode)
17caeff2 3982 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
54b695e7 3983 || mode == DImode))
78c875e8 3984 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3985 {
3986 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3987 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3988 HOST_WIDE_INT high
c4ad648e 3989 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3990
3991 /* Check for 32-bit overflow. */
3992 if (high + low != val)
c4ad648e 3993 {
24ea750e
DJ
3994 *win = 0;
3995 return x;
3996 }
3997
3998 /* Reload the high part into a base reg; leave the low part
c4ad648e 3999 in the mem directly. */
24ea750e
DJ
4000
4001 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4002 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4003 GEN_INT (high)),
4004 GEN_INT (low));
24ea750e
DJ
4005
4006 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4007 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4008 opnum, (enum reload_type)type);
24ea750e
DJ
4009 *win = 1;
4010 return x;
4011 }
4937d02d 4012
24ea750e 4013 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4014 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4015 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4016#if TARGET_MACHO
4017 && DEFAULT_ABI == ABI_DARWIN
a29077da 4018 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4019#else
4020 && DEFAULT_ABI == ABI_V4
4021 && !flag_pic
4022#endif
7393f7f8 4023 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
7b5d92b2
AM
4024 The same goes for DImode without 64-bit gprs and DFmode
4025 without fprs. */
0d8c1c97 4026 && mode != TFmode
7393f7f8 4027 && mode != TDmode
7b5d92b2
AM
4028 && (mode != DImode || TARGET_POWERPC64)
4029 && (mode != DFmode || TARGET_POWERPC64
4030 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4031 {
8308679f 4032#if TARGET_MACHO
a29077da
GK
4033 if (flag_pic)
4034 {
4035 rtx offset = gen_rtx_CONST (Pmode,
4036 gen_rtx_MINUS (Pmode, x,
11abc112 4037 machopic_function_base_sym ()));
a29077da
GK
4038 x = gen_rtx_LO_SUM (GET_MODE (x),
4039 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4040 gen_rtx_HIGH (Pmode, offset)), offset);
4041 }
4042 else
8308679f 4043#endif
a29077da 4044 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4045 gen_rtx_HIGH (Pmode, x), x);
a29077da 4046
24ea750e 4047 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4048 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4049 opnum, (enum reload_type)type);
24ea750e
DJ
4050 *win = 1;
4051 return x;
4052 }
4937d02d 4053
dec1f3aa
DE
4054 /* Reload an offset address wrapped by an AND that represents the
4055 masking of the lower bits. Strip the outer AND and let reload
4056 convert the offset address into an indirect address. */
4057 if (TARGET_ALTIVEC
4058 && ALTIVEC_VECTOR_MODE (mode)
4059 && GET_CODE (x) == AND
4060 && GET_CODE (XEXP (x, 0)) == PLUS
4061 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4062 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4063 && GET_CODE (XEXP (x, 1)) == CONST_INT
4064 && INTVAL (XEXP (x, 1)) == -16)
4065 {
4066 x = XEXP (x, 0);
4067 *win = 1;
4068 return x;
4069 }
4070
24ea750e 4071 if (TARGET_TOC
4d588c14 4072 && constant_pool_expr_p (x)
c1f11548 4073 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4074 {
194c524a 4075 x = create_TOC_reference (x);
24ea750e
DJ
4076 *win = 1;
4077 return x;
4078 }
4079 *win = 0;
4080 return x;
f676971a 4081}
24ea750e 4082
258bfae2
FS
4083/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4084 that is a valid memory address for an instruction.
4085 The MODE argument is the machine mode for the MEM expression
4086 that wants to use this address.
4087
4088 On the RS/6000, there are four valid address: a SYMBOL_REF that
4089 refers to a constant pool entry of an address (or the sum of it
4090 plus a constant), a short (16-bit signed) constant plus a register,
4091 the sum of two registers, or a register indirect, possibly with an
5bdc5878 4092 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
4093 we must ensure that both words are addressable or PowerPC64 with offset
4094 word aligned.
4095
4096 For modes spanning multiple registers (DFmode in 32-bit GPRs,
7393f7f8
BE
4097 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4098 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4099 during assembly output. */
4100int
a2369ed3 4101rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4102{
850e8d3d
DN
4103 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4104 if (TARGET_ALTIVEC
4105 && ALTIVEC_VECTOR_MODE (mode)
4106 && GET_CODE (x) == AND
4107 && GET_CODE (XEXP (x, 1)) == CONST_INT
4108 && INTVAL (XEXP (x, 1)) == -16)
4109 x = XEXP (x, 0);
4110
c4501e62
JJ
4111 if (RS6000_SYMBOL_REF_TLS_P (x))
4112 return 0;
4d588c14 4113 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4114 return 1;
4115 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4116 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4117 && !SPE_VECTOR_MODE (mode)
429ec7dc 4118 && mode != TFmode
7393f7f8 4119 && mode != TDmode
54b695e7 4120 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
4121 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4122 || mode == DImode))
258bfae2 4123 && TARGET_UPDATE
4d588c14 4124 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4125 return 1;
d04b6e6e 4126 if (legitimate_small_data_p (mode, x))
258bfae2 4127 return 1;
4d588c14 4128 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4129 return 1;
4130 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4131 if (! reg_ok_strict
4132 && GET_CODE (x) == PLUS
4133 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4134 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4135 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4136 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4137 return 1;
76d2b81d 4138 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4139 return 1;
4140 if (mode != TImode
76d2b81d 4141 && mode != TFmode
7393f7f8 4142 && mode != TDmode
a3170dc6
AH
4143 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4144 || TARGET_POWERPC64
4d4cbc0e 4145 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 4146 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4147 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4148 return 1;
6fb5fa3c
DB
4149 if (GET_CODE (x) == PRE_MODIFY
4150 && mode != TImode
4151 && mode != TFmode
4152 && mode != TDmode
4153 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4154 || TARGET_POWERPC64
4155 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
4156 && (TARGET_POWERPC64 || mode != DImode)
4157 && !ALTIVEC_VECTOR_MODE (mode)
4158 && !SPE_VECTOR_MODE (mode)
4159 /* Restrict addressing for DI because of our SUBREG hackery. */
4160 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
4161 && TARGET_UPDATE
4162 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4163 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4164 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4165 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4166 return 1;
4d588c14 4167 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4168 return 1;
4169 return 0;
4170}
4d588c14
RH
4171
4172/* Go to LABEL if ADDR (a legitimate address expression)
4173 has an effect that depends on the machine mode it is used for.
4174
4175 On the RS/6000 this is true of all integral offsets (since AltiVec
4176 modes don't allow them) or is a pre-increment or decrement.
4177
4178 ??? Except that due to conceptual problems in offsettable_address_p
4179 we can't really report the problems of integral offsets. So leave
f676971a 4180 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4181 sub-words of a TFmode operand, which is what we had before. */
4182
4183bool
a2369ed3 4184rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4185{
4186 switch (GET_CODE (addr))
4187 {
4188 case PLUS:
4189 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4190 {
4191 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4192 return val + 12 + 0x8000 >= 0x10000;
4193 }
4194 break;
4195
4196 case LO_SUM:
4197 return true;
4198
6fb5fa3c
DB
4199 case PRE_INC:
4200 case PRE_DEC:
4201 case PRE_MODIFY:
4202 return TARGET_UPDATE;
4d588c14
RH
4203
4204 default:
4205 break;
4206 }
4207
4208 return false;
4209}
d8ecbcdb 4210
d04b6e6e
EB
4211/* More elaborate version of recog's offsettable_memref_p predicate
4212 that works around the ??? note of rs6000_mode_dependent_address.
4213 In particular it accepts
4214
4215 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4216
4217 in 32-bit mode, that the recog predicate rejects. */
4218
4219bool
4220rs6000_offsettable_memref_p (rtx op)
4221{
4222 if (!MEM_P (op))
4223 return false;
4224
4225 /* First mimic offsettable_memref_p. */
4226 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4227 return true;
4228
4229 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4230 the latter predicate knows nothing about the mode of the memory
4231 reference and, therefore, assumes that it is the largest supported
4232 mode (TFmode). As a consequence, legitimate offsettable memory
4233 references are rejected. rs6000_legitimate_offset_address_p contains
4234 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4235 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4236}
4237
d8ecbcdb
AH
4238/* Return number of consecutive hard regs needed starting at reg REGNO
4239 to hold something of mode MODE.
4240 This is ordinarily the length in words of a value of mode MODE
4241 but can be less for certain modes in special long registers.
4242
4243 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4244 scalar instructions. The upper 32 bits are only available to the
4245 SIMD instructions.
4246
4247 POWER and PowerPC GPRs hold 32 bits worth;
4248 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4249
4250int
4251rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4252{
4253 if (FP_REGNO_P (regno))
4254 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4255
4256 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4257 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4258
4259 if (ALTIVEC_REGNO_P (regno))
4260 return
4261 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4262
8521c414
JM
4263 /* The value returned for SCmode in the E500 double case is 2 for
4264 ABI compatibility; storing an SCmode value in a single register
4265 would require function_arg and rs6000_spe_function_arg to handle
4266 SCmode so as to pass the value correctly in a pair of
4267 registers. */
4268 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4269 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4270
d8ecbcdb
AH
4271 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4272}
2aa4498c
AH
4273
4274/* Change register usage conditional on target flags. */
4275void
4276rs6000_conditional_register_usage (void)
4277{
4278 int i;
4279
4280 /* Set MQ register fixed (already call_used) if not POWER
4281 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4282 be allocated. */
4283 if (! TARGET_POWER)
4284 fixed_regs[64] = 1;
4285
7c9ac5c0 4286 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4287 if (TARGET_64BIT)
4288 fixed_regs[13] = call_used_regs[13]
4289 = call_really_used_regs[13] = 1;
4290
4291 /* Conditionally disable FPRs. */
4292 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4293 for (i = 32; i < 64; i++)
4294 fixed_regs[i] = call_used_regs[i]
c4ad648e 4295 = call_really_used_regs[i] = 1;
2aa4498c 4296
7c9ac5c0
PH
4297 /* The TOC register is not killed across calls in a way that is
4298 visible to the compiler. */
4299 if (DEFAULT_ABI == ABI_AIX)
4300 call_really_used_regs[2] = 0;
4301
2aa4498c
AH
4302 if (DEFAULT_ABI == ABI_V4
4303 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4304 && flag_pic == 2)
4305 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4306
4307 if (DEFAULT_ABI == ABI_V4
4308 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4309 && flag_pic == 1)
4310 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4311 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4312 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4313
4314 if (DEFAULT_ABI == ABI_DARWIN
4315 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4316 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4317 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4318 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4319
b4db40bf
JJ
4320 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4321 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4322 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4323
2aa4498c
AH
4324 if (TARGET_SPE)
4325 {
4326 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4327 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4328 registers in prologues and epilogues. We no longer use r14
4329 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4330 pool for link-compatibility with older versions of GCC. Once
4331 "old" code has died out, we can return r14 to the allocation
4332 pool. */
4333 fixed_regs[14]
4334 = call_used_regs[14]
4335 = call_really_used_regs[14] = 1;
2aa4498c
AH
4336 }
4337
0db747be 4338 if (!TARGET_ALTIVEC)
2aa4498c
AH
4339 {
4340 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4341 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4342 call_really_used_regs[VRSAVE_REGNO] = 1;
4343 }
4344
0db747be
DE
4345 if (TARGET_ALTIVEC)
4346 global_regs[VSCR_REGNO] = 1;
4347
2aa4498c 4348 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4349 {
4350 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4351 call_used_regs[i] = call_really_used_regs[i] = 1;
4352
4353 /* AIX reserves VR20:31 in non-extended ABI mode. */
4354 if (TARGET_XCOFF)
4355 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4356 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4357 }
2aa4498c 4358}
fb4d4348 4359\f
a4f6c312
SS
4360/* Try to output insns to set TARGET equal to the constant C if it can
4361 be done in less than N insns. Do all computations in MODE.
4362 Returns the place where the output has been placed if it can be
4363 done and the insns have been emitted. If it would take more than N
4364 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4365
4366rtx
f676971a 4367rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4368 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4369{
af8cb5c5 4370 rtx result, insn, set;
2bfcf297
DB
4371 HOST_WIDE_INT c0, c1;
4372
37409796 4373 switch (mode)
2bfcf297 4374 {
37409796
NS
4375 case QImode:
4376 case HImode:
2bfcf297 4377 if (dest == NULL)
c4ad648e 4378 dest = gen_reg_rtx (mode);
2bfcf297
DB
4379 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4380 return dest;
bb8df8a6 4381
37409796 4382 case SImode:
b3a13419 4383 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4384
d448860e 4385 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4386 GEN_INT (INTVAL (source)
4387 & (~ (HOST_WIDE_INT) 0xffff))));
4388 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4389 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4390 GEN_INT (INTVAL (source) & 0xffff))));
4391 result = dest;
37409796
NS
4392 break;
4393
4394 case DImode:
4395 switch (GET_CODE (source))
af8cb5c5 4396 {
37409796 4397 case CONST_INT:
af8cb5c5
DE
4398 c0 = INTVAL (source);
4399 c1 = -(c0 < 0);
37409796 4400 break;
bb8df8a6 4401
37409796 4402 case CONST_DOUBLE:
2bfcf297 4403#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4404 c0 = CONST_DOUBLE_LOW (source);
4405 c1 = -(c0 < 0);
2bfcf297 4406#else
af8cb5c5
DE
4407 c0 = CONST_DOUBLE_LOW (source);
4408 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4409#endif
37409796
NS
4410 break;
4411
4412 default:
4413 gcc_unreachable ();
af8cb5c5 4414 }
af8cb5c5
DE
4415
4416 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4417 break;
4418
4419 default:
4420 gcc_unreachable ();
2bfcf297 4421 }
2bfcf297 4422
af8cb5c5
DE
4423 insn = get_last_insn ();
4424 set = single_set (insn);
4425 if (! CONSTANT_P (SET_SRC (set)))
4426 set_unique_reg_note (insn, REG_EQUAL, source);
4427
4428 return result;
2bfcf297
DB
4429}
4430
4431/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4432 fall back to a straight forward decomposition. We do this to avoid
4433 exponential run times encountered when looking for longer sequences
4434 with rs6000_emit_set_const. */
4435static rtx
a2369ed3 4436rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4437{
4438 if (!TARGET_POWERPC64)
4439 {
4440 rtx operand1, operand2;
4441
4442 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4443 DImode);
d448860e 4444 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4445 DImode);
4446 emit_move_insn (operand1, GEN_INT (c1));
4447 emit_move_insn (operand2, GEN_INT (c2));
4448 }
4449 else
4450 {
bc06712d 4451 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4452
bc06712d 4453 ud1 = c1 & 0xffff;
f921c9c9 4454 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4455#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4456 c2 = c1 >> 32;
2bfcf297 4457#endif
bc06712d 4458 ud3 = c2 & 0xffff;
f921c9c9 4459 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4460
f676971a 4461 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4462 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4463 {
bc06712d 4464 if (ud1 & 0x8000)
b78d48dd 4465 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4466 else
4467 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4468 }
2bfcf297 4469
f676971a 4470 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4471 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4472 {
bc06712d 4473 if (ud2 & 0x8000)
f676971a 4474 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4475 - 0x80000000));
252b88f7 4476 else
bc06712d
TR
4477 emit_move_insn (dest, GEN_INT (ud2 << 16));
4478 if (ud1 != 0)
d448860e
JH
4479 emit_move_insn (copy_rtx (dest),
4480 gen_rtx_IOR (DImode, copy_rtx (dest),
4481 GEN_INT (ud1)));
252b88f7 4482 }
f676971a 4483 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4484 || (ud4 == 0 && ! (ud3 & 0x8000)))
4485 {
4486 if (ud3 & 0x8000)
f676971a 4487 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4488 - 0x80000000));
4489 else
4490 emit_move_insn (dest, GEN_INT (ud3 << 16));
4491
4492 if (ud2 != 0)
d448860e
JH
4493 emit_move_insn (copy_rtx (dest),
4494 gen_rtx_IOR (DImode, copy_rtx (dest),
4495 GEN_INT (ud2)));
4496 emit_move_insn (copy_rtx (dest),
4497 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4498 GEN_INT (16)));
bc06712d 4499 if (ud1 != 0)
d448860e
JH
4500 emit_move_insn (copy_rtx (dest),
4501 gen_rtx_IOR (DImode, copy_rtx (dest),
4502 GEN_INT (ud1)));
bc06712d 4503 }
f676971a 4504 else
bc06712d
TR
4505 {
4506 if (ud4 & 0x8000)
f676971a 4507 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4508 - 0x80000000));
4509 else
4510 emit_move_insn (dest, GEN_INT (ud4 << 16));
4511
4512 if (ud3 != 0)
d448860e
JH
4513 emit_move_insn (copy_rtx (dest),
4514 gen_rtx_IOR (DImode, copy_rtx (dest),
4515 GEN_INT (ud3)));
2bfcf297 4516
d448860e
JH
4517 emit_move_insn (copy_rtx (dest),
4518 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4519 GEN_INT (32)));
bc06712d 4520 if (ud2 != 0)
d448860e
JH
4521 emit_move_insn (copy_rtx (dest),
4522 gen_rtx_IOR (DImode, copy_rtx (dest),
4523 GEN_INT (ud2 << 16)));
bc06712d 4524 if (ud1 != 0)
d448860e
JH
4525 emit_move_insn (copy_rtx (dest),
4526 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4527 }
4528 }
2bfcf297
DB
4529 return dest;
4530}
4531
76d2b81d 4532/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4533 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4534
4535static void
4536rs6000_eliminate_indexed_memrefs (rtx operands[2])
4537{
4538 if (GET_CODE (operands[0]) == MEM
4539 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4540 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4541 && ! reload_in_progress)
4542 operands[0]
4543 = replace_equiv_address (operands[0],
4544 copy_addr_to_reg (XEXP (operands[0], 0)));
4545
4546 if (GET_CODE (operands[1]) == MEM
4547 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4548 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4549 && ! reload_in_progress)
4550 operands[1]
4551 = replace_equiv_address (operands[1],
4552 copy_addr_to_reg (XEXP (operands[1], 0)));
4553}
4554
fb4d4348
GK
4555/* Emit a move from SOURCE to DEST in mode MODE. */
4556void
a2369ed3 4557rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4558{
4559 rtx operands[2];
4560 operands[0] = dest;
4561 operands[1] = source;
f676971a 4562
fb4d4348
GK
4563 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4564 if (GET_CODE (operands[1]) == CONST_DOUBLE
4565 && ! FLOAT_MODE_P (mode)
4566 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4567 {
4568 /* FIXME. This should never happen. */
4569 /* Since it seems that it does, do the safe thing and convert
4570 to a CONST_INT. */
2496c7bd 4571 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4572 }
37409796
NS
4573 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4574 || FLOAT_MODE_P (mode)
4575 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4576 || CONST_DOUBLE_LOW (operands[1]) < 0)
4577 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4578 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4579
c9e8cb32
DD
4580 /* Check if GCC is setting up a block move that will end up using FP
4581 registers as temporaries. We must make sure this is acceptable. */
4582 if (GET_CODE (operands[0]) == MEM
4583 && GET_CODE (operands[1]) == MEM
4584 && mode == DImode
41543739
GK
4585 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4586 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4587 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4588 ? 32 : MEM_ALIGN (operands[0])))
4589 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4590 ? 32
41543739
GK
4591 : MEM_ALIGN (operands[1]))))
4592 && ! MEM_VOLATILE_P (operands [0])
4593 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4594 {
41543739
GK
4595 emit_move_insn (adjust_address (operands[0], SImode, 0),
4596 adjust_address (operands[1], SImode, 0));
d448860e
JH
4597 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4598 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4599 return;
4600 }
630d42a0 4601
b3a13419 4602 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4603 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4604 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4605
a3170dc6
AH
4606 if (mode == SFmode && ! TARGET_POWERPC
4607 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4608 && GET_CODE (operands[0]) == MEM)
fb4d4348 4609 {
ffc14f31
GK
4610 int regnum;
4611
4612 if (reload_in_progress || reload_completed)
4613 regnum = true_regnum (operands[1]);
4614 else if (GET_CODE (operands[1]) == REG)
4615 regnum = REGNO (operands[1]);
4616 else
4617 regnum = -1;
f676971a 4618
fb4d4348
GK
4619 /* If operands[1] is a register, on POWER it may have
4620 double-precision data in it, so truncate it to single
4621 precision. */
4622 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4623 {
4624 rtx newreg;
b3a13419 4625 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4626 : gen_reg_rtx (mode));
fb4d4348
GK
4627 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4628 operands[1] = newreg;
4629 }
4630 }
4631
c4501e62
JJ
4632 /* Recognize the case where operand[1] is a reference to thread-local
4633 data and load its address to a register. */
84f52ebd 4634 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4635 {
84f52ebd
RH
4636 enum tls_model model;
4637 rtx tmp = operands[1];
4638 rtx addend = NULL;
4639
4640 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4641 {
4642 addend = XEXP (XEXP (tmp, 0), 1);
4643 tmp = XEXP (XEXP (tmp, 0), 0);
4644 }
4645
4646 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4647 model = SYMBOL_REF_TLS_MODEL (tmp);
4648 gcc_assert (model != 0);
4649
4650 tmp = rs6000_legitimize_tls_address (tmp, model);
4651 if (addend)
4652 {
4653 tmp = gen_rtx_PLUS (mode, tmp, addend);
4654 tmp = force_operand (tmp, operands[0]);
4655 }
4656 operands[1] = tmp;
c4501e62
JJ
4657 }
4658
8f4e6caf
RH
4659 /* Handle the case where reload calls us with an invalid address. */
4660 if (reload_in_progress && mode == Pmode
69ef87e2 4661 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4662 || ! nonimmediate_operand (operands[0], mode)))
4663 goto emit_set;
4664
a9baceb1
GK
4665 /* 128-bit constant floating-point values on Darwin should really be
4666 loaded as two parts. */
8521c414 4667 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4668 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4669 {
4670 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4671 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4672 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4673 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4674 simplify_gen_subreg (imode, operands[1], mode, 0),
4675 imode);
4676 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4677 GET_MODE_SIZE (imode)),
4678 simplify_gen_subreg (imode, operands[1], mode,
4679 GET_MODE_SIZE (imode)),
4680 imode);
a9baceb1
GK
4681 return;
4682 }
4683
fb4d4348
GK
4684 /* FIXME: In the long term, this switch statement should go away
4685 and be replaced by a sequence of tests based on things like
4686 mode == Pmode. */
4687 switch (mode)
4688 {
4689 case HImode:
4690 case QImode:
4691 if (CONSTANT_P (operands[1])
4692 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4693 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4694 break;
4695
06f4e019 4696 case TFmode:
7393f7f8 4697 case TDmode:
76d2b81d
DJ
4698 rs6000_eliminate_indexed_memrefs (operands);
4699 /* fall through */
4700
fb4d4348 4701 case DFmode:
7393f7f8 4702 case DDmode:
fb4d4348 4703 case SFmode:
f676971a 4704 if (CONSTANT_P (operands[1])
fb4d4348 4705 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4706 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4707 break;
f676971a 4708
0ac081f6
AH
4709 case V16QImode:
4710 case V8HImode:
4711 case V4SFmode:
4712 case V4SImode:
a3170dc6
AH
4713 case V4HImode:
4714 case V2SFmode:
4715 case V2SImode:
00a892b8 4716 case V1DImode:
69ef87e2 4717 if (CONSTANT_P (operands[1])
d744e06e 4718 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4719 operands[1] = force_const_mem (mode, operands[1]);
4720 break;
f676971a 4721
fb4d4348 4722 case SImode:
a9098fd0 4723 case DImode:
fb4d4348
GK
4724 /* Use default pattern for address of ELF small data */
4725 if (TARGET_ELF
a9098fd0 4726 && mode == Pmode
f607bc57 4727 && DEFAULT_ABI == ABI_V4
f676971a 4728 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4729 || GET_CODE (operands[1]) == CONST)
4730 && small_data_operand (operands[1], mode))
fb4d4348
GK
4731 {
4732 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4733 return;
4734 }
4735
f607bc57 4736 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4737 && mode == Pmode && mode == SImode
4738 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4739 {
4740 emit_insn (gen_movsi_got (operands[0], operands[1]));
4741 return;
4742 }
4743
ee890fe2 4744 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4745 && TARGET_NO_TOC
4746 && ! flag_pic
a9098fd0 4747 && mode == Pmode
fb4d4348
GK
4748 && CONSTANT_P (operands[1])
4749 && GET_CODE (operands[1]) != HIGH
4750 && GET_CODE (operands[1]) != CONST_INT)
4751 {
b3a13419
ILT
4752 rtx target = (!can_create_pseudo_p ()
4753 ? operands[0]
4754 : gen_reg_rtx (mode));
fb4d4348
GK
4755
4756 /* If this is a function address on -mcall-aixdesc,
4757 convert it to the address of the descriptor. */
4758 if (DEFAULT_ABI == ABI_AIX
4759 && GET_CODE (operands[1]) == SYMBOL_REF
4760 && XSTR (operands[1], 0)[0] == '.')
4761 {
4762 const char *name = XSTR (operands[1], 0);
4763 rtx new_ref;
4764 while (*name == '.')
4765 name++;
4766 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4767 CONSTANT_POOL_ADDRESS_P (new_ref)
4768 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4769 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4770 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4771 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4772 operands[1] = new_ref;
4773 }
7509c759 4774
ee890fe2
SS
4775 if (DEFAULT_ABI == ABI_DARWIN)
4776 {
ab82a49f
AP
4777#if TARGET_MACHO
4778 if (MACHO_DYNAMIC_NO_PIC_P)
4779 {
4780 /* Take care of any required data indirection. */
4781 operands[1] = rs6000_machopic_legitimize_pic_address (
4782 operands[1], mode, operands[0]);
4783 if (operands[0] != operands[1])
4784 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4785 operands[0], operands[1]));
ab82a49f
AP
4786 return;
4787 }
4788#endif
b8a55285
AP
4789 emit_insn (gen_macho_high (target, operands[1]));
4790 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4791 return;
4792 }
4793
fb4d4348
GK
4794 emit_insn (gen_elf_high (target, operands[1]));
4795 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4796 return;
4797 }
4798
a9098fd0
GK
4799 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4800 and we have put it in the TOC, we just need to make a TOC-relative
4801 reference to it. */
4802 if (TARGET_TOC
4803 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4804 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4805 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4806 get_pool_mode (operands[1])))
fb4d4348 4807 {
a9098fd0 4808 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4809 }
a9098fd0
GK
4810 else if (mode == Pmode
4811 && CONSTANT_P (operands[1])
38886f37
AO
4812 && ((GET_CODE (operands[1]) != CONST_INT
4813 && ! easy_fp_constant (operands[1], mode))
4814 || (GET_CODE (operands[1]) == CONST_INT
4815 && num_insns_constant (operands[1], mode) > 2)
4816 || (GET_CODE (operands[0]) == REG
4817 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4818 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4819 && ! legitimate_constant_pool_address_p (operands[1])
4820 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4821 {
4822 /* Emit a USE operation so that the constant isn't deleted if
4823 expensive optimizations are turned on because nobody
4824 references it. This should only be done for operands that
4825 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4826 This should not be done for operands that contain LABEL_REFs.
4827 For now, we just handle the obvious case. */
4828 if (GET_CODE (operands[1]) != LABEL_REF)
4829 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4830
c859cda6 4831#if TARGET_MACHO
ee890fe2 4832 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4833 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4834 {
ee890fe2
SS
4835 operands[1] =
4836 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4837 operands[0]);
4838 if (operands[0] != operands[1])
4839 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4840 return;
4841 }
c859cda6 4842#endif
ee890fe2 4843
fb4d4348
GK
4844 /* If we are to limit the number of things we put in the TOC and
4845 this is a symbol plus a constant we can add in one insn,
4846 just put the symbol in the TOC and add the constant. Don't do
4847 this if reload is in progress. */
4848 if (GET_CODE (operands[1]) == CONST
4849 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4850 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4851 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4852 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4853 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4854 && ! side_effects_p (operands[0]))
4855 {
a4f6c312
SS
4856 rtx sym =
4857 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4858 rtx other = XEXP (XEXP (operands[1], 0), 1);
4859
a9098fd0
GK
4860 sym = force_reg (mode, sym);
4861 if (mode == SImode)
4862 emit_insn (gen_addsi3 (operands[0], sym, other));
4863 else
4864 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4865 return;
4866 }
4867
a9098fd0 4868 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4869
f676971a 4870 if (TARGET_TOC
4d588c14 4871 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4872 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4873 get_pool_constant (XEXP (operands[1], 0)),
4874 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4875 {
ba4828e0 4876 operands[1]
542a8afa 4877 = gen_const_mem (mode,
c4ad648e 4878 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4879 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4880 }
fb4d4348
GK
4881 }
4882 break;
a9098fd0 4883
fb4d4348 4884 case TImode:
76d2b81d
DJ
4885 rs6000_eliminate_indexed_memrefs (operands);
4886
27dc0551
DE
4887 if (TARGET_POWER)
4888 {
4889 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4890 gen_rtvec (2,
4891 gen_rtx_SET (VOIDmode,
4892 operands[0], operands[1]),
4893 gen_rtx_CLOBBER (VOIDmode,
4894 gen_rtx_SCRATCH (SImode)))));
4895 return;
4896 }
fb4d4348
GK
4897 break;
4898
4899 default:
37409796 4900 gcc_unreachable ();
fb4d4348
GK
4901 }
4902
a9098fd0
GK
4903 /* Above, we may have called force_const_mem which may have returned
4904 an invalid address. If we can, fix this up; otherwise, reload will
4905 have to deal with it. */
8f4e6caf
RH
4906 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4907 operands[1] = validize_mem (operands[1]);
a9098fd0 4908
8f4e6caf 4909 emit_set:
fb4d4348
GK
4910 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4911}
4697a36c 4912\f
2858f73a
GK
4913/* Nonzero if we can use a floating-point register to pass this arg. */
4914#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4915 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4916 && (MODE) != SDmode \
2858f73a
GK
4917 && (CUM)->fregno <= FP_ARG_MAX_REG \
4918 && TARGET_HARD_FLOAT && TARGET_FPRS)
4919
4920/* Nonzero if we can use an AltiVec register to pass this arg. */
4921#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4922 (ALTIVEC_VECTOR_MODE (MODE) \
4923 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4924 && TARGET_ALTIVEC_ABI \
83953138 4925 && (NAMED))
2858f73a 4926
c6e8c921
GK
4927/* Return a nonzero value to say to return the function value in
4928 memory, just as large structures are always returned. TYPE will be
4929 the data type of the value, and FNTYPE will be the type of the
4930 function doing the returning, or @code{NULL} for libcalls.
4931
4932 The AIX ABI for the RS/6000 specifies that all structures are
4933 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4934 specifies that structures <= 8 bytes are returned in r3/r4, but a
4935 draft put them in memory, and GCC used to implement the draft
df01da37 4936 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4937 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4938 compatibility can change DRAFT_V4_STRUCT_RET to override the
4939 default, and -m switches get the final word. See
4940 rs6000_override_options for more details.
4941
4942 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4943 long double support is enabled. These values are returned in memory.
4944
4945 int_size_in_bytes returns -1 for variable size objects, which go in
4946 memory always. The cast to unsigned makes -1 > 8. */
4947
4948static bool
586de218 4949rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 4950{
594a51fe
SS
4951 /* In the darwin64 abi, try to use registers for larger structs
4952 if possible. */
0b5383eb 4953 if (rs6000_darwin64_abi
594a51fe 4954 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4955 && int_size_in_bytes (type) > 0)
4956 {
4957 CUMULATIVE_ARGS valcum;
4958 rtx valret;
4959
4960 valcum.words = 0;
4961 valcum.fregno = FP_ARG_MIN_REG;
4962 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4963 /* Do a trial code generation as if this were going to be passed
4964 as an argument; if any part goes in memory, we return NULL. */
4965 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4966 if (valret)
4967 return false;
4968 /* Otherwise fall through to more conventional ABI rules. */
4969 }
594a51fe 4970
c6e8c921 4971 if (AGGREGATE_TYPE_P (type)
df01da37 4972 && (aix_struct_return
c6e8c921
GK
4973 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4974 return true;
b693336b 4975
bada2eb8
DE
4976 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4977 modes only exist for GCC vector types if -maltivec. */
4978 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4979 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4980 return false;
4981
b693336b
PB
4982 /* Return synthetic vectors in memory. */
4983 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4984 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4985 {
4986 static bool warned_for_return_big_vectors = false;
4987 if (!warned_for_return_big_vectors)
4988 {
d4ee4d25 4989 warning (0, "GCC vector returned by reference: "
b693336b
PB
4990 "non-standard ABI extension with no compatibility guarantee");
4991 warned_for_return_big_vectors = true;
4992 }
4993 return true;
4994 }
4995
602ea4d3 4996 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 4997 return true;
ad630bef 4998
c6e8c921
GK
4999 return false;
5000}
5001
4697a36c
MM
5002/* Initialize a variable CUM of type CUMULATIVE_ARGS
5003 for a call to a function whose data type is FNTYPE.
5004 For a library call, FNTYPE is 0.
5005
5006 For incoming args we set the number of arguments in the prototype large
1c20ae99 5007 so we never return a PARALLEL. */
4697a36c
MM
5008
5009void
f676971a 5010init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5011 rtx libname ATTRIBUTE_UNUSED, int incoming,
5012 int libcall, int n_named_args)
4697a36c
MM
5013{
5014 static CUMULATIVE_ARGS zero_cumulative;
5015
5016 *cum = zero_cumulative;
5017 cum->words = 0;
5018 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5019 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5020 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5021 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5022 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5023 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5024 cum->stdarg = fntype
5025 && (TYPE_ARG_TYPES (fntype) != 0
5026 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5027 != void_type_node));
4697a36c 5028
0f6937fe
AM
5029 cum->nargs_prototype = 0;
5030 if (incoming || cum->prototype)
5031 cum->nargs_prototype = n_named_args;
4697a36c 5032
a5c76ee6 5033 /* Check for a longcall attribute. */
3eb4e360
AM
5034 if ((!fntype && rs6000_default_long_calls)
5035 || (fntype
5036 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5037 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5038 cum->call_cookie |= CALL_LONG;
6a4cee5f 5039
4697a36c
MM
5040 if (TARGET_DEBUG_ARG)
5041 {
5042 fprintf (stderr, "\ninit_cumulative_args:");
5043 if (fntype)
5044 {
5045 tree ret_type = TREE_TYPE (fntype);
5046 fprintf (stderr, " ret code = %s,",
5047 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5048 }
5049
6a4cee5f
MM
5050 if (cum->call_cookie & CALL_LONG)
5051 fprintf (stderr, " longcall,");
5052
4697a36c
MM
5053 fprintf (stderr, " proto = %d, nargs = %d\n",
5054 cum->prototype, cum->nargs_prototype);
5055 }
f676971a 5056
c4ad648e
AM
5057 if (fntype
5058 && !TARGET_ALTIVEC
5059 && TARGET_ALTIVEC_ABI
5060 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5061 {
c85ce869 5062 error ("cannot return value in vector register because"
c4ad648e 5063 " altivec instructions are disabled, use -maltivec"
c85ce869 5064 " to enable them");
c4ad648e 5065 }
4697a36c
MM
5066}
5067\f
fe984136
RH
5068/* Return true if TYPE must be passed on the stack and not in registers. */
5069
5070static bool
586de218 5071rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5072{
5073 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5074 return must_pass_in_stack_var_size (mode, type);
5075 else
5076 return must_pass_in_stack_var_size_or_pad (mode, type);
5077}
5078
c229cba9
DE
5079/* If defined, a C expression which determines whether, and in which
5080 direction, to pad out an argument with extra space. The value
5081 should be of type `enum direction': either `upward' to pad above
5082 the argument, `downward' to pad below, or `none' to inhibit
5083 padding.
5084
5085 For the AIX ABI structs are always stored left shifted in their
5086 argument slot. */
5087
9ebbca7d 5088enum direction
586de218 5089function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5090{
6e985040
AM
5091#ifndef AGGREGATE_PADDING_FIXED
5092#define AGGREGATE_PADDING_FIXED 0
5093#endif
5094#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5095#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5096#endif
5097
5098 if (!AGGREGATE_PADDING_FIXED)
5099 {
5100 /* GCC used to pass structures of the same size as integer types as
5101 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5102 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5103 passed padded downward, except that -mstrict-align further
5104 muddied the water in that multi-component structures of 2 and 4
5105 bytes in size were passed padded upward.
5106
5107 The following arranges for best compatibility with previous
5108 versions of gcc, but removes the -mstrict-align dependency. */
5109 if (BYTES_BIG_ENDIAN)
5110 {
5111 HOST_WIDE_INT size = 0;
5112
5113 if (mode == BLKmode)
5114 {
5115 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5116 size = int_size_in_bytes (type);
5117 }
5118 else
5119 size = GET_MODE_SIZE (mode);
5120
5121 if (size == 1 || size == 2 || size == 4)
5122 return downward;
5123 }
5124 return upward;
5125 }
5126
5127 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5128 {
5129 if (type != 0 && AGGREGATE_TYPE_P (type))
5130 return upward;
5131 }
c229cba9 5132
d3704c46
KH
5133 /* Fall back to the default. */
5134 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5135}
5136
b6c9286a 5137/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5138 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5139 PARM_BOUNDARY is used for all arguments.
f676971a 5140
84e9ad15
AM
5141 V.4 wants long longs and doubles to be double word aligned. Just
5142 testing the mode size is a boneheaded way to do this as it means
5143 that other types such as complex int are also double word aligned.
5144 However, we're stuck with this because changing the ABI might break
5145 existing library interfaces.
5146
b693336b
PB
5147 Doubleword align SPE vectors.
5148 Quadword align Altivec vectors.
5149 Quadword align large synthetic vector types. */
b6c9286a
MM
5150
5151int
b693336b 5152function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5153{
84e9ad15
AM
5154 if (DEFAULT_ABI == ABI_V4
5155 && (GET_MODE_SIZE (mode) == 8
5156 || (TARGET_HARD_FLOAT
5157 && TARGET_FPRS
7393f7f8 5158 && (mode == TFmode || mode == TDmode))))
4ed78545 5159 return 64;
ad630bef
DE
5160 else if (SPE_VECTOR_MODE (mode)
5161 || (type && TREE_CODE (type) == VECTOR_TYPE
5162 && int_size_in_bytes (type) >= 8
5163 && int_size_in_bytes (type) < 16))
e1f83b4d 5164 return 64;
ad630bef
DE
5165 else if (ALTIVEC_VECTOR_MODE (mode)
5166 || (type && TREE_CODE (type) == VECTOR_TYPE
5167 && int_size_in_bytes (type) >= 16))
0ac081f6 5168 return 128;
0b5383eb
DJ
5169 else if (rs6000_darwin64_abi && mode == BLKmode
5170 && type && TYPE_ALIGN (type) > 64)
5171 return 128;
9ebbca7d 5172 else
b6c9286a 5173 return PARM_BOUNDARY;
b6c9286a 5174}
c53bdcf5 5175
294bd182
AM
5176/* For a function parm of MODE and TYPE, return the starting word in
5177 the parameter area. NWORDS of the parameter area are already used. */
5178
5179static unsigned int
5180rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5181{
5182 unsigned int align;
5183 unsigned int parm_offset;
5184
5185 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5186 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5187 return nwords + (-(parm_offset + nwords) & align);
5188}
5189
c53bdcf5
AM
5190/* Compute the size (in words) of a function argument. */
5191
5192static unsigned long
5193rs6000_arg_size (enum machine_mode mode, tree type)
5194{
5195 unsigned long size;
5196
5197 if (mode != BLKmode)
5198 size = GET_MODE_SIZE (mode);
5199 else
5200 size = int_size_in_bytes (type);
5201
5202 if (TARGET_32BIT)
5203 return (size + 3) >> 2;
5204 else
5205 return (size + 7) >> 3;
5206}
b6c9286a 5207\f
0b5383eb 5208/* Use this to flush pending int fields. */
594a51fe
SS
5209
5210static void
0b5383eb
DJ
5211rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5212 HOST_WIDE_INT bitpos)
594a51fe 5213{
0b5383eb
DJ
5214 unsigned int startbit, endbit;
5215 int intregs, intoffset;
5216 enum machine_mode mode;
594a51fe 5217
0b5383eb
DJ
5218 if (cum->intoffset == -1)
5219 return;
594a51fe 5220
0b5383eb
DJ
5221 intoffset = cum->intoffset;
5222 cum->intoffset = -1;
5223
5224 if (intoffset % BITS_PER_WORD != 0)
5225 {
5226 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5227 MODE_INT, 0);
5228 if (mode == BLKmode)
594a51fe 5229 {
0b5383eb
DJ
5230 /* We couldn't find an appropriate mode, which happens,
5231 e.g., in packed structs when there are 3 bytes to load.
5232 Back intoffset back to the beginning of the word in this
5233 case. */
5234 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5235 }
594a51fe 5236 }
0b5383eb
DJ
5237
5238 startbit = intoffset & -BITS_PER_WORD;
5239 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5240 intregs = (endbit - startbit) / BITS_PER_WORD;
5241 cum->words += intregs;
5242}
5243
5244/* The darwin64 ABI calls for us to recurse down through structs,
5245 looking for elements passed in registers. Unfortunately, we have
5246 to track int register count here also because of misalignments
5247 in powerpc alignment mode. */
5248
5249static void
5250rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5251 tree type,
5252 HOST_WIDE_INT startbitpos)
5253{
5254 tree f;
5255
5256 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5257 if (TREE_CODE (f) == FIELD_DECL)
5258 {
5259 HOST_WIDE_INT bitpos = startbitpos;
5260 tree ftype = TREE_TYPE (f);
70fb00df
AP
5261 enum machine_mode mode;
5262 if (ftype == error_mark_node)
5263 continue;
5264 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5265
5266 if (DECL_SIZE (f) != 0
5267 && host_integerp (bit_position (f), 1))
5268 bitpos += int_bit_position (f);
5269
5270 /* ??? FIXME: else assume zero offset. */
5271
5272 if (TREE_CODE (ftype) == RECORD_TYPE)
5273 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5274 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5275 {
5276 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5277 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5278 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5279 }
5280 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5281 {
5282 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5283 cum->vregno++;
5284 cum->words += 2;
5285 }
5286 else if (cum->intoffset == -1)
5287 cum->intoffset = bitpos;
5288 }
594a51fe
SS
5289}
5290
4697a36c
MM
5291/* Update the data in CUM to advance over an argument
5292 of mode MODE and data type TYPE.
b2d04ecf
AM
5293 (TYPE is null for libcalls where that information may not be available.)
5294
5295 Note that for args passed by reference, function_arg will be called
5296 with MODE and TYPE set to that of the pointer to the arg, not the arg
5297 itself. */
4697a36c
MM
5298
5299void
f676971a 5300function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5301 tree type, int named, int depth)
4697a36c 5302{
0b5383eb
DJ
5303 int size;
5304
594a51fe
SS
5305 /* Only tick off an argument if we're not recursing. */
5306 if (depth == 0)
5307 cum->nargs_prototype--;
4697a36c 5308
ad630bef
DE
5309 if (TARGET_ALTIVEC_ABI
5310 && (ALTIVEC_VECTOR_MODE (mode)
5311 || (type && TREE_CODE (type) == VECTOR_TYPE
5312 && int_size_in_bytes (type) == 16)))
0ac081f6 5313 {
4ed78545
AM
5314 bool stack = false;
5315
2858f73a 5316 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5317 {
6d0ef01e
HP
5318 cum->vregno++;
5319 if (!TARGET_ALTIVEC)
c85ce869 5320 error ("cannot pass argument in vector register because"
6d0ef01e 5321 " altivec instructions are disabled, use -maltivec"
c85ce869 5322 " to enable them");
4ed78545
AM
5323
5324 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5325 even if it is going to be passed in a vector register.
4ed78545
AM
5326 Darwin does the same for variable-argument functions. */
5327 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5328 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5329 stack = true;
6d0ef01e 5330 }
4ed78545
AM
5331 else
5332 stack = true;
5333
5334 if (stack)
c4ad648e 5335 {
a594a19c 5336 int align;
f676971a 5337
2858f73a
GK
5338 /* Vector parameters must be 16-byte aligned. This places
5339 them at 2 mod 4 in terms of words in 32-bit mode, since
5340 the parameter save area starts at offset 24 from the
5341 stack. In 64-bit mode, they just have to start on an
5342 even word, since the parameter save area is 16-byte
5343 aligned. Space for GPRs is reserved even if the argument
5344 will be passed in memory. */
5345 if (TARGET_32BIT)
4ed78545 5346 align = (2 - cum->words) & 3;
2858f73a
GK
5347 else
5348 align = cum->words & 1;
c53bdcf5 5349 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5350
a594a19c
GK
5351 if (TARGET_DEBUG_ARG)
5352 {
f676971a 5353 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5354 cum->words, align);
5355 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5356 cum->nargs_prototype, cum->prototype,
2858f73a 5357 GET_MODE_NAME (mode));
a594a19c
GK
5358 }
5359 }
0ac081f6 5360 }
a4b0320c 5361 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5362 && !cum->stdarg
5363 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5364 cum->sysv_gregno++;
594a51fe
SS
5365
5366 else if (rs6000_darwin64_abi
5367 && mode == BLKmode
0b5383eb
DJ
5368 && TREE_CODE (type) == RECORD_TYPE
5369 && (size = int_size_in_bytes (type)) > 0)
5370 {
5371 /* Variable sized types have size == -1 and are
5372 treated as if consisting entirely of ints.
5373 Pad to 16 byte boundary if needed. */
5374 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5375 && (cum->words % 2) != 0)
5376 cum->words++;
5377 /* For varargs, we can just go up by the size of the struct. */
5378 if (!named)
5379 cum->words += (size + 7) / 8;
5380 else
5381 {
5382 /* It is tempting to say int register count just goes up by
5383 sizeof(type)/8, but this is wrong in a case such as
5384 { int; double; int; } [powerpc alignment]. We have to
5385 grovel through the fields for these too. */
5386 cum->intoffset = 0;
5387 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5388 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5389 size * BITS_PER_UNIT);
5390 }
5391 }
f607bc57 5392 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5393 {
a3170dc6 5394 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5395 && (mode == SFmode || mode == DFmode
7393f7f8 5396 || mode == DDmode || mode == TDmode
602ea4d3 5397 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5398 {
2d83f070
JJ
5399 /* _Decimal128 must use an even/odd register pair. This assumes
5400 that the register number is odd when fregno is odd. */
5401 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5402 cum->fregno++;
5403
5404 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5405 <= FP_ARG_V4_MAX_REG)
602ea4d3 5406 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5407 else
5408 {
602ea4d3 5409 cum->fregno = FP_ARG_V4_MAX_REG + 1;
7393f7f8 5410 if (mode == DFmode || mode == TFmode || mode == DDmode || mode == TDmode)
c4ad648e 5411 cum->words += cum->words & 1;
c53bdcf5 5412 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5413 }
4697a36c 5414 }
4cc833b7
RH
5415 else
5416 {
b2d04ecf 5417 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5418 int gregno = cum->sysv_gregno;
5419
4ed78545
AM
5420 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5421 (r7,r8) or (r9,r10). As does any other 2 word item such
5422 as complex int due to a historical mistake. */
5423 if (n_words == 2)
5424 gregno += (1 - gregno) & 1;
4cc833b7 5425
4ed78545 5426 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5427 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5428 {
4ed78545
AM
5429 /* Long long and SPE vectors are aligned on the stack.
5430 So are other 2 word items such as complex int due to
5431 a historical mistake. */
4cc833b7
RH
5432 if (n_words == 2)
5433 cum->words += cum->words & 1;
5434 cum->words += n_words;
5435 }
4697a36c 5436
4cc833b7
RH
5437 /* Note: continuing to accumulate gregno past when we've started
5438 spilling to the stack indicates the fact that we've started
5439 spilling to the stack to expand_builtin_saveregs. */
5440 cum->sysv_gregno = gregno + n_words;
5441 }
4697a36c 5442
4cc833b7
RH
5443 if (TARGET_DEBUG_ARG)
5444 {
5445 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5446 cum->words, cum->fregno);
5447 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5448 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5449 fprintf (stderr, "mode = %4s, named = %d\n",
5450 GET_MODE_NAME (mode), named);
5451 }
4697a36c
MM
5452 }
5453 else
4cc833b7 5454 {
b2d04ecf 5455 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5456 int start_words = cum->words;
5457 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5458
294bd182 5459 cum->words = align_words + n_words;
4697a36c 5460
ebb109ad 5461 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5462 && mode != SDmode
a3170dc6 5463 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5464 {
5465 /* _Decimal128 must be passed in an even/odd float register pair.
5466 This assumes that the register number is odd when fregno is
5467 odd. */
5468 if (mode == TDmode && (cum->fregno % 2) == 1)
5469 cum->fregno++;
5470 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5471 }
4cc833b7
RH
5472
5473 if (TARGET_DEBUG_ARG)
5474 {
5475 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5476 cum->words, cum->fregno);
5477 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5478 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5479 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5480 named, align_words - start_words, depth);
4cc833b7
RH
5481 }
5482 }
4697a36c 5483}
a6c9bed4 5484
f82f556d
AH
5485static rtx
5486spe_build_register_parallel (enum machine_mode mode, int gregno)
5487{
17caeff2 5488 rtx r1, r3, r5, r7;
f82f556d 5489
37409796 5490 switch (mode)
f82f556d 5491 {
37409796 5492 case DFmode:
54b695e7
AH
5493 r1 = gen_rtx_REG (DImode, gregno);
5494 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5495 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5496
5497 case DCmode:
17caeff2 5498 case TFmode:
54b695e7
AH
5499 r1 = gen_rtx_REG (DImode, gregno);
5500 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5501 r3 = gen_rtx_REG (DImode, gregno + 2);
5502 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5503 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5504
17caeff2
JM
5505 case TCmode:
5506 r1 = gen_rtx_REG (DImode, gregno);
5507 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5508 r3 = gen_rtx_REG (DImode, gregno + 2);
5509 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5510 r5 = gen_rtx_REG (DImode, gregno + 4);
5511 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5512 r7 = gen_rtx_REG (DImode, gregno + 6);
5513 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5514 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5515
37409796
NS
5516 default:
5517 gcc_unreachable ();
f82f556d 5518 }
f82f556d 5519}
b78d48dd 5520
f82f556d 5521/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5522static rtx
f676971a 5523rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5524 tree type)
a6c9bed4 5525{
f82f556d
AH
5526 int gregno = cum->sysv_gregno;
5527
5528 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5529 are passed and returned in a pair of GPRs for ABI compatibility. */
17caeff2
JM
5530 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode
5531 || mode == TFmode || mode == TCmode))
f82f556d 5532 {
b5870bee
AH
5533 int n_words = rs6000_arg_size (mode, type);
5534
f82f556d 5535 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
5536 if (mode == DFmode)
5537 gregno += (1 - gregno) & 1;
f82f556d 5538
b5870bee
AH
5539 /* Multi-reg args are not split between registers and stack. */
5540 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5541 return NULL_RTX;
5542
5543 return spe_build_register_parallel (mode, gregno);
5544 }
a6c9bed4
AH
5545 if (cum->stdarg)
5546 {
c53bdcf5 5547 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5548
5549 /* SPE vectors are put in odd registers. */
5550 if (n_words == 2 && (gregno & 1) == 0)
5551 gregno += 1;
5552
5553 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5554 {
5555 rtx r1, r2;
5556 enum machine_mode m = SImode;
5557
5558 r1 = gen_rtx_REG (m, gregno);
5559 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5560 r2 = gen_rtx_REG (m, gregno + 1);
5561 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5562 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5563 }
5564 else
b78d48dd 5565 return NULL_RTX;
a6c9bed4
AH
5566 }
5567 else
5568 {
f82f556d
AH
5569 if (gregno <= GP_ARG_MAX_REG)
5570 return gen_rtx_REG (mode, gregno);
a6c9bed4 5571 else
b78d48dd 5572 return NULL_RTX;
a6c9bed4
AH
5573 }
5574}
5575
0b5383eb
DJ
5576/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5577 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5578
0b5383eb 5579static void
bb8df8a6 5580rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5581 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5582{
0b5383eb
DJ
5583 enum machine_mode mode;
5584 unsigned int regno;
5585 unsigned int startbit, endbit;
5586 int this_regno, intregs, intoffset;
5587 rtx reg;
594a51fe 5588
0b5383eb
DJ
5589 if (cum->intoffset == -1)
5590 return;
5591
5592 intoffset = cum->intoffset;
5593 cum->intoffset = -1;
5594
5595 /* If this is the trailing part of a word, try to only load that
5596 much into the register. Otherwise load the whole register. Note
5597 that in the latter case we may pick up unwanted bits. It's not a
5598 problem at the moment but may wish to revisit. */
5599
5600 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5601 {
0b5383eb
DJ
5602 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5603 MODE_INT, 0);
5604 if (mode == BLKmode)
5605 {
5606 /* We couldn't find an appropriate mode, which happens,
5607 e.g., in packed structs when there are 3 bytes to load.
5608 Back intoffset back to the beginning of the word in this
5609 case. */
5610 intoffset = intoffset & -BITS_PER_WORD;
5611 mode = word_mode;
5612 }
5613 }
5614 else
5615 mode = word_mode;
5616
5617 startbit = intoffset & -BITS_PER_WORD;
5618 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5619 intregs = (endbit - startbit) / BITS_PER_WORD;
5620 this_regno = cum->words + intoffset / BITS_PER_WORD;
5621
5622 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5623 cum->use_stack = 1;
bb8df8a6 5624
0b5383eb
DJ
5625 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5626 if (intregs <= 0)
5627 return;
5628
5629 intoffset /= BITS_PER_UNIT;
5630 do
5631 {
5632 regno = GP_ARG_MIN_REG + this_regno;
5633 reg = gen_rtx_REG (mode, regno);
5634 rvec[(*k)++] =
5635 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5636
5637 this_regno += 1;
5638 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5639 mode = word_mode;
5640 intregs -= 1;
5641 }
5642 while (intregs > 0);
5643}
5644
5645/* Recursive workhorse for the following. */
5646
5647static void
586de218 5648rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5649 HOST_WIDE_INT startbitpos, rtx rvec[],
5650 int *k)
5651{
5652 tree f;
5653
5654 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5655 if (TREE_CODE (f) == FIELD_DECL)
5656 {
5657 HOST_WIDE_INT bitpos = startbitpos;
5658 tree ftype = TREE_TYPE (f);
70fb00df
AP
5659 enum machine_mode mode;
5660 if (ftype == error_mark_node)
5661 continue;
5662 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5663
5664 if (DECL_SIZE (f) != 0
5665 && host_integerp (bit_position (f), 1))
5666 bitpos += int_bit_position (f);
5667
5668 /* ??? FIXME: else assume zero offset. */
5669
5670 if (TREE_CODE (ftype) == RECORD_TYPE)
5671 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5672 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5673 {
0b5383eb
DJ
5674#if 0
5675 switch (mode)
594a51fe 5676 {
0b5383eb
DJ
5677 case SCmode: mode = SFmode; break;
5678 case DCmode: mode = DFmode; break;
5679 case TCmode: mode = TFmode; break;
5680 default: break;
594a51fe 5681 }
0b5383eb
DJ
5682#endif
5683 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5684 rvec[(*k)++]
bb8df8a6 5685 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5686 gen_rtx_REG (mode, cum->fregno++),
5687 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5688 if (mode == TFmode || mode == TDmode)
0b5383eb 5689 cum->fregno++;
594a51fe 5690 }
0b5383eb
DJ
5691 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5692 {
5693 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5694 rvec[(*k)++]
bb8df8a6
EC
5695 = gen_rtx_EXPR_LIST (VOIDmode,
5696 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5697 GEN_INT (bitpos / BITS_PER_UNIT));
5698 }
5699 else if (cum->intoffset == -1)
5700 cum->intoffset = bitpos;
5701 }
5702}
594a51fe 5703
0b5383eb
DJ
5704/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5705 the register(s) to be used for each field and subfield of a struct
5706 being passed by value, along with the offset of where the
5707 register's value may be found in the block. FP fields go in FP
5708 register, vector fields go in vector registers, and everything
bb8df8a6 5709 else goes in int registers, packed as in memory.
8ff40a74 5710
0b5383eb
DJ
5711 This code is also used for function return values. RETVAL indicates
5712 whether this is the case.
8ff40a74 5713
a4d05547 5714 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5715 calling convention. */
594a51fe 5716
0b5383eb 5717static rtx
586de218 5718rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5719 int named, bool retval)
5720{
5721 rtx rvec[FIRST_PSEUDO_REGISTER];
5722 int k = 1, kbase = 1;
5723 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5724 /* This is a copy; modifications are not visible to our caller. */
5725 CUMULATIVE_ARGS copy_cum = *orig_cum;
5726 CUMULATIVE_ARGS *cum = &copy_cum;
5727
5728 /* Pad to 16 byte boundary if needed. */
5729 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5730 && (cum->words % 2) != 0)
5731 cum->words++;
5732
5733 cum->intoffset = 0;
5734 cum->use_stack = 0;
5735 cum->named = named;
5736
5737 /* Put entries into rvec[] for individual FP and vector fields, and
5738 for the chunks of memory that go in int regs. Note we start at
5739 element 1; 0 is reserved for an indication of using memory, and
5740 may or may not be filled in below. */
5741 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5742 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5743
5744 /* If any part of the struct went on the stack put all of it there.
5745 This hack is because the generic code for
5746 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5747 parts of the struct are not at the beginning. */
5748 if (cum->use_stack)
5749 {
5750 if (retval)
5751 return NULL_RTX; /* doesn't go in registers at all */
5752 kbase = 0;
5753 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5754 }
5755 if (k > 1 || cum->use_stack)
5756 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5757 else
5758 return NULL_RTX;
5759}
5760
b78d48dd
FJ
5761/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5762
5763static rtx
ec6376ab 5764rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5765{
ec6376ab
AM
5766 int n_units;
5767 int i, k;
5768 rtx rvec[GP_ARG_NUM_REG + 1];
5769
5770 if (align_words >= GP_ARG_NUM_REG)
5771 return NULL_RTX;
5772
5773 n_units = rs6000_arg_size (mode, type);
5774
5775 /* Optimize the simple case where the arg fits in one gpr, except in
5776 the case of BLKmode due to assign_parms assuming that registers are
5777 BITS_PER_WORD wide. */
5778 if (n_units == 0
5779 || (n_units == 1 && mode != BLKmode))
5780 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5781
5782 k = 0;
5783 if (align_words + n_units > GP_ARG_NUM_REG)
5784 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5785 using a magic NULL_RTX component.
79773478
AM
5786 This is not strictly correct. Only some of the arg belongs in
5787 memory, not all of it. However, the normal scheme using
5788 function_arg_partial_nregs can result in unusual subregs, eg.
5789 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5790 store the whole arg to memory is often more efficient than code
5791 to store pieces, and we know that space is available in the right
5792 place for the whole arg. */
ec6376ab
AM
5793 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5794
5795 i = 0;
5796 do
36a454e1 5797 {
ec6376ab
AM
5798 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5799 rtx off = GEN_INT (i++ * 4);
5800 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5801 }
ec6376ab
AM
5802 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5803
5804 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5805}
5806
4697a36c
MM
5807/* Determine where to put an argument to a function.
5808 Value is zero to push the argument on the stack,
5809 or a hard register in which to store the argument.
5810
5811 MODE is the argument's machine mode.
5812 TYPE is the data type of the argument (as a tree).
5813 This is null for libcalls where that information may
5814 not be available.
5815 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5816 the preceding args and about the function being called. It is
5817 not modified in this routine.
4697a36c
MM
5818 NAMED is nonzero if this argument is a named parameter
5819 (otherwise it is an extra parameter matching an ellipsis).
5820
5821 On RS/6000 the first eight words of non-FP are normally in registers
5822 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5823 Under V.4, the first 8 FP args are in registers.
5824
5825 If this is floating-point and no prototype is specified, we use
5826 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5827 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5828 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5829 doesn't support PARALLEL anyway.
5830
5831 Note that for args passed by reference, function_arg will be called
5832 with MODE and TYPE set to that of the pointer to the arg, not the arg
5833 itself. */
4697a36c 5834
9390387d 5835rtx
f676971a 5836function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5837 tree type, int named)
4697a36c 5838{
4cc833b7 5839 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5840
a4f6c312
SS
5841 /* Return a marker to indicate whether CR1 needs to set or clear the
5842 bit that V.4 uses to say fp args were passed in registers.
5843 Assume that we don't need the marker for software floating point,
5844 or compiler generated library calls. */
4697a36c
MM
5845 if (mode == VOIDmode)
5846 {
f607bc57 5847 if (abi == ABI_V4
b9599e46 5848 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5849 && (cum->stdarg
5850 || (cum->nargs_prototype < 0
5851 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5852 {
a3170dc6
AH
5853 /* For the SPE, we need to crxor CR6 always. */
5854 if (TARGET_SPE_ABI)
5855 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5856 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5857 return GEN_INT (cum->call_cookie
5858 | ((cum->fregno == FP_ARG_MIN_REG)
5859 ? CALL_V4_SET_FP_ARGS
5860 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5861 }
4697a36c 5862
7509c759 5863 return GEN_INT (cum->call_cookie);
4697a36c
MM
5864 }
5865
0b5383eb
DJ
5866 if (rs6000_darwin64_abi && mode == BLKmode
5867 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5868 {
0b5383eb 5869 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5870 if (rslt != NULL_RTX)
5871 return rslt;
5872 /* Else fall through to usual handling. */
5873 }
5874
2858f73a 5875 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5876 if (TARGET_64BIT && ! cum->prototype)
5877 {
c4ad648e
AM
5878 /* Vector parameters get passed in vector register
5879 and also in GPRs or memory, in absence of prototype. */
5880 int align_words;
5881 rtx slot;
5882 align_words = (cum->words + 1) & ~1;
5883
5884 if (align_words >= GP_ARG_NUM_REG)
5885 {
5886 slot = NULL_RTX;
5887 }
5888 else
5889 {
5890 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5891 }
5892 return gen_rtx_PARALLEL (mode,
5893 gen_rtvec (2,
5894 gen_rtx_EXPR_LIST (VOIDmode,
5895 slot, const0_rtx),
5896 gen_rtx_EXPR_LIST (VOIDmode,
5897 gen_rtx_REG (mode, cum->vregno),
5898 const0_rtx)));
c72d6c26
HP
5899 }
5900 else
5901 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5902 else if (TARGET_ALTIVEC_ABI
5903 && (ALTIVEC_VECTOR_MODE (mode)
5904 || (type && TREE_CODE (type) == VECTOR_TYPE
5905 && int_size_in_bytes (type) == 16)))
0ac081f6 5906 {
2858f73a 5907 if (named || abi == ABI_V4)
a594a19c 5908 return NULL_RTX;
0ac081f6 5909 else
a594a19c
GK
5910 {
5911 /* Vector parameters to varargs functions under AIX or Darwin
5912 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5913 int align, align_words, n_words;
5914 enum machine_mode part_mode;
a594a19c
GK
5915
5916 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5917 2 mod 4 in terms of words in 32-bit mode, since the parameter
5918 save area starts at offset 24 from the stack. In 64-bit mode,
5919 they just have to start on an even word, since the parameter
5920 save area is 16-byte aligned. */
5921 if (TARGET_32BIT)
4ed78545 5922 align = (2 - cum->words) & 3;
2858f73a
GK
5923 else
5924 align = cum->words & 1;
a594a19c
GK
5925 align_words = cum->words + align;
5926
5927 /* Out of registers? Memory, then. */
5928 if (align_words >= GP_ARG_NUM_REG)
5929 return NULL_RTX;
ec6376ab
AM
5930
5931 if (TARGET_32BIT && TARGET_POWERPC64)
5932 return rs6000_mixed_function_arg (mode, type, align_words);
5933
2858f73a
GK
5934 /* The vector value goes in GPRs. Only the part of the
5935 value in GPRs is reported here. */
ec6376ab
AM
5936 part_mode = mode;
5937 n_words = rs6000_arg_size (mode, type);
5938 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5939 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5940 is either wholly in GPRs or half in GPRs and half not. */
5941 part_mode = DImode;
ec6376ab
AM
5942
5943 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5944 }
0ac081f6 5945 }
f82f556d
AH
5946 else if (TARGET_SPE_ABI && TARGET_SPE
5947 && (SPE_VECTOR_MODE (mode)
18f63bfa 5948 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5949 || mode == DDmode
17caeff2
JM
5950 || mode == DCmode
5951 || mode == TFmode
7393f7f8 5952 || mode == TDmode
17caeff2 5953 || mode == TCmode))))
a6c9bed4 5954 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5955
f607bc57 5956 else if (abi == ABI_V4)
4697a36c 5957 {
a3170dc6 5958 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5959 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5960 || (mode == TFmode && !TARGET_IEEEQUAD)
5961 || mode == DDmode || mode == TDmode))
4cc833b7 5962 {
2d83f070
JJ
5963 /* _Decimal128 must use an even/odd register pair. This assumes
5964 that the register number is odd when fregno is odd. */
5965 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5966 cum->fregno++;
5967
5968 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5969 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5970 return gen_rtx_REG (mode, cum->fregno);
5971 else
b78d48dd 5972 return NULL_RTX;
4cc833b7
RH
5973 }
5974 else
5975 {
b2d04ecf 5976 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5977 int gregno = cum->sysv_gregno;
5978
4ed78545
AM
5979 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5980 (r7,r8) or (r9,r10). As does any other 2 word item such
5981 as complex int due to a historical mistake. */
5982 if (n_words == 2)
5983 gregno += (1 - gregno) & 1;
4cc833b7 5984
4ed78545 5985 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5986 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5987 return NULL_RTX;
ec6376ab
AM
5988
5989 if (TARGET_32BIT && TARGET_POWERPC64)
5990 return rs6000_mixed_function_arg (mode, type,
5991 gregno - GP_ARG_MIN_REG);
5992 return gen_rtx_REG (mode, gregno);
4cc833b7 5993 }
4697a36c 5994 }
4cc833b7
RH
5995 else
5996 {
294bd182 5997 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5998
2d83f070
JJ
5999 /* _Decimal128 must be passed in an even/odd float register pair.
6000 This assumes that the register number is odd when fregno is odd. */
6001 if (mode == TDmode && (cum->fregno % 2) == 1)
6002 cum->fregno++;
6003
2858f73a 6004 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6005 {
ec6376ab
AM
6006 rtx rvec[GP_ARG_NUM_REG + 1];
6007 rtx r;
6008 int k;
c53bdcf5
AM
6009 bool needs_psave;
6010 enum machine_mode fmode = mode;
c53bdcf5
AM
6011 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6012
6013 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6014 {
c53bdcf5
AM
6015 /* Currently, we only ever need one reg here because complex
6016 doubles are split. */
7393f7f8
BE
6017 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6018 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6019
7393f7f8
BE
6020 /* Long double or _Decimal128 split over regs and memory. */
6021 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6022 }
c53bdcf5
AM
6023
6024 /* Do we also need to pass this arg in the parameter save
6025 area? */
6026 needs_psave = (type
6027 && (cum->nargs_prototype <= 0
6028 || (DEFAULT_ABI == ABI_AIX
de17c25f 6029 && TARGET_XL_COMPAT
c53bdcf5
AM
6030 && align_words >= GP_ARG_NUM_REG)));
6031
6032 if (!needs_psave && mode == fmode)
ec6376ab 6033 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6034
ec6376ab 6035 k = 0;
c53bdcf5
AM
6036 if (needs_psave)
6037 {
ec6376ab 6038 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6039 This piece must come first, before the fprs. */
c53bdcf5
AM
6040 if (align_words < GP_ARG_NUM_REG)
6041 {
6042 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6043
6044 if (align_words + n_words > GP_ARG_NUM_REG
6045 || (TARGET_32BIT && TARGET_POWERPC64))
6046 {
6047 /* If this is partially on the stack, then we only
6048 include the portion actually in registers here. */
6049 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6050 rtx off;
79773478
AM
6051 int i = 0;
6052 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6053 /* Not all of the arg fits in gprs. Say that it
6054 goes in memory too, using a magic NULL_RTX
6055 component. Also see comment in
6056 rs6000_mixed_function_arg for why the normal
6057 function_arg_partial_nregs scheme doesn't work
6058 in this case. */
6059 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6060 const0_rtx);
ec6376ab
AM
6061 do
6062 {
6063 r = gen_rtx_REG (rmode,
6064 GP_ARG_MIN_REG + align_words);
2e6c9641 6065 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6066 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6067 }
6068 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6069 }
6070 else
6071 {
6072 /* The whole arg fits in gprs. */
6073 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6074 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6075 }
c53bdcf5 6076 }
ec6376ab
AM
6077 else
6078 /* It's entirely in memory. */
6079 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6080 }
6081
ec6376ab
AM
6082 /* Describe where this piece goes in the fprs. */
6083 r = gen_rtx_REG (fmode, cum->fregno);
6084 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6085
6086 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6087 }
6088 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6089 {
ec6376ab
AM
6090 if (TARGET_32BIT && TARGET_POWERPC64)
6091 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6092
4eeca74f
AM
6093 if (mode == BLKmode)
6094 mode = Pmode;
6095
b2d04ecf
AM
6096 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6097 }
4cc833b7
RH
6098 else
6099 return NULL_RTX;
4697a36c 6100 }
4697a36c
MM
6101}
6102\f
ec6376ab 6103/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6104 the number of bytes passed in registers. For args passed entirely in
6105 registers or entirely in memory, zero. When an arg is described by a
6106 PARALLEL, perhaps using more than one register type, this function
6107 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6108
78a52f11
RH
6109static int
6110rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6111 tree type, bool named)
4697a36c 6112{
c53bdcf5 6113 int ret = 0;
ec6376ab 6114 int align_words;
c53bdcf5 6115
f607bc57 6116 if (DEFAULT_ABI == ABI_V4)
4697a36c 6117 return 0;
4697a36c 6118
c53bdcf5
AM
6119 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6120 && cum->nargs_prototype >= 0)
6121 return 0;
6122
0b5383eb
DJ
6123 /* In this complicated case we just disable the partial_nregs code. */
6124 if (rs6000_darwin64_abi && mode == BLKmode
6125 && TREE_CODE (type) == RECORD_TYPE
6126 && int_size_in_bytes (type) > 0)
6127 return 0;
6128
294bd182 6129 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6130
79773478
AM
6131 if (USE_FP_FOR_ARG_P (cum, mode, type))
6132 {
fb63c729
AM
6133 /* If we are passing this arg in the fixed parameter save area
6134 (gprs or memory) as well as fprs, then this function should
79773478
AM
6135 return the number of partial bytes passed in the parameter
6136 save area rather than partial bytes passed in fprs. */
6137 if (type
6138 && (cum->nargs_prototype <= 0
6139 || (DEFAULT_ABI == ABI_AIX
6140 && TARGET_XL_COMPAT
6141 && align_words >= GP_ARG_NUM_REG)))
6142 return 0;
6143 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6144 > FP_ARG_MAX_REG + 1)
ac7e839c 6145 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6146 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6147 return 0;
6148 }
6149
ec6376ab
AM
6150 if (align_words < GP_ARG_NUM_REG
6151 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6152 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6153
c53bdcf5 6154 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6155 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6156
c53bdcf5 6157 return ret;
4697a36c
MM
6158}
6159\f
6160/* A C expression that indicates when an argument must be passed by
6161 reference. If nonzero for an argument, a copy of that argument is
6162 made in memory and a pointer to the argument is passed instead of
6163 the argument itself. The pointer is passed in whatever way is
6164 appropriate for passing a pointer to that type.
6165
b2d04ecf
AM
6166 Under V.4, aggregates and long double are passed by reference.
6167
6168 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6169 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6170
6171 As an extension to all ABIs, variable sized types are passed by
6172 reference. */
4697a36c 6173
8cd5a4e0 6174static bool
f676971a 6175rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6176 enum machine_mode mode, const_tree type,
bada2eb8 6177 bool named ATTRIBUTE_UNUSED)
4697a36c 6178{
602ea4d3 6179 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6180 {
6181 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6182 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6183 return 1;
6184 }
6185
6186 if (!type)
6187 return 0;
4697a36c 6188
bada2eb8
DE
6189 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6190 {
6191 if (TARGET_DEBUG_ARG)
6192 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6193 return 1;
6194 }
6195
6196 if (int_size_in_bytes (type) < 0)
6197 {
6198 if (TARGET_DEBUG_ARG)
6199 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6200 return 1;
6201 }
6202
6203 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6204 modes only exist for GCC vector types if -maltivec. */
6205 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6206 {
6207 if (TARGET_DEBUG_ARG)
6208 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6209 return 1;
6210 }
b693336b
PB
6211
6212 /* Pass synthetic vectors in memory. */
bada2eb8 6213 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6214 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6215 {
6216 static bool warned_for_pass_big_vectors = false;
6217 if (TARGET_DEBUG_ARG)
6218 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6219 if (!warned_for_pass_big_vectors)
6220 {
d4ee4d25 6221 warning (0, "GCC vector passed by reference: "
b693336b
PB
6222 "non-standard ABI extension with no compatibility guarantee");
6223 warned_for_pass_big_vectors = true;
6224 }
6225 return 1;
6226 }
6227
b2d04ecf 6228 return 0;
4697a36c 6229}
5985c7a6
FJ
6230
6231static void
2d9db8eb 6232rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6233{
6234 int i;
6235 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6236
6237 if (nregs == 0)
6238 return;
6239
c4ad648e 6240 for (i = 0; i < nregs; i++)
5985c7a6 6241 {
9390387d 6242 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6243 if (reload_completed)
c4ad648e
AM
6244 {
6245 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6246 tem = NULL_RTX;
6247 else
6248 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6249 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6250 }
5985c7a6
FJ
6251 else
6252 tem = replace_equiv_address (tem, XEXP (tem, 0));
6253
37409796 6254 gcc_assert (tem);
5985c7a6
FJ
6255
6256 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6257 }
6258}
4697a36c
MM
6259\f
6260/* Perform any needed actions needed for a function that is receiving a
f676971a 6261 variable number of arguments.
4697a36c
MM
6262
6263 CUM is as above.
6264
6265 MODE and TYPE are the mode and type of the current parameter.
6266
6267 PRETEND_SIZE is a variable that should be set to the amount of stack
6268 that must be pushed by the prolog to pretend that our caller pushed
6269 it.
6270
6271 Normally, this macro will push all remaining incoming registers on the
6272 stack and set PRETEND_SIZE to the length of the registers pushed. */
6273
c6e8c921 6274static void
f676971a 6275setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6276 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6277 int no_rtl)
4697a36c 6278{
4cc833b7
RH
6279 CUMULATIVE_ARGS next_cum;
6280 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6281 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6282 int first_reg_offset;
6283 alias_set_type set;
4697a36c 6284
f31bf321 6285 /* Skip the last named argument. */
d34c5b80 6286 next_cum = *cum;
594a51fe 6287 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6288
f607bc57 6289 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6290 {
5b667039
JJ
6291 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6292
60e2d0ca 6293 if (! no_rtl)
5b667039
JJ
6294 {
6295 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6296 HOST_WIDE_INT offset = 0;
6297
6298 /* Try to optimize the size of the varargs save area.
6299 The ABI requires that ap.reg_save_area is doubleword
6300 aligned, but we don't need to allocate space for all
6301 the bytes, only those to which we actually will save
6302 anything. */
6303 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6304 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6305 if (TARGET_HARD_FLOAT && TARGET_FPRS
6306 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6307 && cfun->va_list_fpr_size)
6308 {
6309 if (gpr_reg_num)
6310 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6311 * UNITS_PER_FP_WORD;
6312 if (cfun->va_list_fpr_size
6313 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6314 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6315 else
6316 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6317 * UNITS_PER_FP_WORD;
6318 }
6319 if (gpr_reg_num)
6320 {
6321 offset = -((first_reg_offset * reg_size) & ~7);
6322 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6323 {
6324 gpr_reg_num = cfun->va_list_gpr_size;
6325 if (reg_size == 4 && (first_reg_offset & 1))
6326 gpr_reg_num++;
6327 }
6328 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6329 }
6330 else if (fpr_size)
6331 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6332 * UNITS_PER_FP_WORD
6333 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6334
5b667039
JJ
6335 if (gpr_size + fpr_size)
6336 {
6337 rtx reg_save_area
6338 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6339 gcc_assert (GET_CODE (reg_save_area) == MEM);
6340 reg_save_area = XEXP (reg_save_area, 0);
6341 if (GET_CODE (reg_save_area) == PLUS)
6342 {
6343 gcc_assert (XEXP (reg_save_area, 0)
6344 == virtual_stack_vars_rtx);
6345 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6346 offset += INTVAL (XEXP (reg_save_area, 1));
6347 }
6348 else
6349 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6350 }
6351
6352 cfun->machine->varargs_save_offset = offset;
6353 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6354 }
4697a36c 6355 }
60e2d0ca 6356 else
4697a36c 6357 {
d34c5b80 6358 first_reg_offset = next_cum.words;
4cc833b7 6359 save_area = virtual_incoming_args_rtx;
4697a36c 6360
fe984136 6361 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6362 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6363 }
4697a36c 6364
dfafc897 6365 set = get_varargs_alias_set ();
9d30f3c1
JJ
6366 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6367 && cfun->va_list_gpr_size)
4cc833b7 6368 {
9d30f3c1
JJ
6369 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6370
6371 if (va_list_gpr_counter_field)
6372 {
6373 /* V4 va_list_gpr_size counts number of registers needed. */
6374 if (nregs > cfun->va_list_gpr_size)
6375 nregs = cfun->va_list_gpr_size;
6376 }
6377 else
6378 {
6379 /* char * va_list instead counts number of bytes needed. */
6380 if (nregs > cfun->va_list_gpr_size / reg_size)
6381 nregs = cfun->va_list_gpr_size / reg_size;
6382 }
6383
dfafc897 6384 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6385 plus_constant (save_area,
13e2e16e
DE
6386 first_reg_offset * reg_size));
6387 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6388 set_mem_alias_set (mem, set);
8ac61af7 6389 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6390
f676971a 6391 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6392 nregs);
4697a36c
MM
6393 }
6394
4697a36c 6395 /* Save FP registers if needed. */
f607bc57 6396 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6397 && TARGET_HARD_FLOAT && TARGET_FPRS
6398 && ! no_rtl
9d30f3c1
JJ
6399 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6400 && cfun->va_list_fpr_size)
4697a36c 6401 {
9d30f3c1 6402 int fregno = next_cum.fregno, nregs;
9ebbca7d 6403 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6404 rtx lab = gen_label_rtx ();
5b667039
JJ
6405 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6406 * UNITS_PER_FP_WORD);
4697a36c 6407
c4ad648e
AM
6408 emit_jump_insn
6409 (gen_rtx_SET (VOIDmode,
6410 pc_rtx,
6411 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6412 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6413 const0_rtx),
39403d82 6414 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6415 pc_rtx)));
6416
9d30f3c1
JJ
6417 for (nregs = 0;
6418 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6419 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6420 {
5496b36f 6421 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6422 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6423 set_mem_alias_set (mem, set);
94ff898d 6424 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6425 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6426 }
4cc833b7
RH
6427
6428 emit_label (lab);
4697a36c 6429 }
4697a36c 6430}
4697a36c 6431
dfafc897 6432/* Create the va_list data type. */
2c4974b7 6433
c35d187f
RH
6434static tree
6435rs6000_build_builtin_va_list (void)
dfafc897 6436{
64c2816f 6437 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6438
9ebbca7d
GK
6439 /* For AIX, prefer 'char *' because that's what the system
6440 header files like. */
f607bc57 6441 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6442 return build_pointer_type (char_type_node);
dfafc897 6443
f1e639b1 6444 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6445 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6446
f676971a 6447 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6448 unsigned_char_type_node);
f676971a 6449 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6450 unsigned_char_type_node);
64c2816f
DT
6451 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6452 every user file. */
6453 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6454 short_unsigned_type_node);
dfafc897
FS
6455 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6456 ptr_type_node);
6457 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6458 ptr_type_node);
6459
9d30f3c1
JJ
6460 va_list_gpr_counter_field = f_gpr;
6461 va_list_fpr_counter_field = f_fpr;
6462
dfafc897
FS
6463 DECL_FIELD_CONTEXT (f_gpr) = record;
6464 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6465 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6466 DECL_FIELD_CONTEXT (f_ovf) = record;
6467 DECL_FIELD_CONTEXT (f_sav) = record;
6468
bab45a51
FS
6469 TREE_CHAIN (record) = type_decl;
6470 TYPE_NAME (record) = type_decl;
dfafc897
FS
6471 TYPE_FIELDS (record) = f_gpr;
6472 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6473 TREE_CHAIN (f_fpr) = f_res;
6474 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6475 TREE_CHAIN (f_ovf) = f_sav;
6476
6477 layout_type (record);
6478
6479 /* The correct type is an array type of one element. */
6480 return build_array_type (record, build_index_type (size_zero_node));
6481}
6482
6483/* Implement va_start. */
6484
6485void
a2369ed3 6486rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6487{
dfafc897 6488 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6489 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6490 tree gpr, fpr, ovf, sav, t;
2c4974b7 6491
dfafc897 6492 /* Only SVR4 needs something special. */
f607bc57 6493 if (DEFAULT_ABI != ABI_V4)
dfafc897 6494 {
e5faf155 6495 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6496 return;
6497 }
6498
973a648b 6499 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6500 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6501 f_res = TREE_CHAIN (f_fpr);
6502 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6503 f_sav = TREE_CHAIN (f_ovf);
6504
872a65b5 6505 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6506 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6507 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6508 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6509 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6510
6511 /* Count number of gp and fp argument registers used. */
4cc833b7 6512 words = current_function_args_info.words;
987732e0
DE
6513 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6514 GP_ARG_NUM_REG);
6515 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6516 FP_ARG_NUM_REG);
dfafc897
FS
6517
6518 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6519 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6520 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6521 words, n_gpr, n_fpr);
dfafc897 6522
9d30f3c1
JJ
6523 if (cfun->va_list_gpr_size)
6524 {
07beea0d 6525 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6526 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6527 TREE_SIDE_EFFECTS (t) = 1;
6528 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6529 }
58c8adc1 6530
9d30f3c1
JJ
6531 if (cfun->va_list_fpr_size)
6532 {
07beea0d 6533 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6534 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6535 TREE_SIDE_EFFECTS (t) = 1;
6536 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6537 }
dfafc897
FS
6538
6539 /* Find the overflow area. */
6540 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6541 if (words != 0)
5be014d5
AP
6542 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6543 size_int (words * UNITS_PER_WORD));
07beea0d 6544 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6545 TREE_SIDE_EFFECTS (t) = 1;
6546 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6547
9d30f3c1
JJ
6548 /* If there were no va_arg invocations, don't set up the register
6549 save area. */
6550 if (!cfun->va_list_gpr_size
6551 && !cfun->va_list_fpr_size
6552 && n_gpr < GP_ARG_NUM_REG
6553 && n_fpr < FP_ARG_V4_MAX_REG)
6554 return;
6555
dfafc897
FS
6556 /* Find the register save area. */
6557 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6558 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6559 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6560 size_int (cfun->machine->varargs_save_offset));
07beea0d 6561 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6562 TREE_SIDE_EFFECTS (t) = 1;
6563 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6564}
6565
6566/* Implement va_arg. */
6567
23a60a04
JM
6568tree
6569rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6570{
cd3ce9b4
JM
6571 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6572 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6573 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6574 tree lab_false, lab_over, addr;
6575 int align;
6576 tree ptrtype = build_pointer_type (type);
7393f7f8 6577 int regalign = 0;
cd3ce9b4 6578
08b0dc1b
RH
6579 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6580 {
6581 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6582 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6583 }
6584
cd3ce9b4
JM
6585 if (DEFAULT_ABI != ABI_V4)
6586 {
08b0dc1b 6587 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6588 {
6589 tree elem_type = TREE_TYPE (type);
6590 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6591 int elem_size = GET_MODE_SIZE (elem_mode);
6592
6593 if (elem_size < UNITS_PER_WORD)
6594 {
23a60a04 6595 tree real_part, imag_part;
cd3ce9b4
JM
6596 tree post = NULL_TREE;
6597
23a60a04
JM
6598 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6599 &post);
6600 /* Copy the value into a temporary, lest the formal temporary
6601 be reused out from under us. */
6602 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6603 append_to_statement_list (post, pre_p);
6604
23a60a04
JM
6605 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6606 post_p);
cd3ce9b4 6607
47a25a46 6608 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6609 }
6610 }
6611
23a60a04 6612 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6613 }
6614
6615 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6616 f_fpr = TREE_CHAIN (f_gpr);
6617 f_res = TREE_CHAIN (f_fpr);
6618 f_ovf = TREE_CHAIN (f_res);
6619 f_sav = TREE_CHAIN (f_ovf);
6620
872a65b5 6621 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6622 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6623 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6624 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6625 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6626
6627 size = int_size_in_bytes (type);
6628 rsize = (size + 3) / 4;
6629 align = 1;
6630
08b0dc1b 6631 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6632 && (TYPE_MODE (type) == SFmode
6633 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6634 || TYPE_MODE (type) == TFmode
6635 || TYPE_MODE (type) == DDmode
6636 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6637 {
6638 /* FP args go in FP registers, if present. */
cd3ce9b4 6639 reg = fpr;
602ea4d3 6640 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6641 sav_ofs = 8*4;
6642 sav_scale = 8;
602ea4d3 6643 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6644 align = 8;
6645 }
6646 else
6647 {
6648 /* Otherwise into GP registers. */
cd3ce9b4
JM
6649 reg = gpr;
6650 n_reg = rsize;
6651 sav_ofs = 0;
6652 sav_scale = 4;
6653 if (n_reg == 2)
6654 align = 8;
6655 }
6656
6657 /* Pull the value out of the saved registers.... */
6658
6659 lab_over = NULL;
6660 addr = create_tmp_var (ptr_type_node, "addr");
6661 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6662
6663 /* AltiVec vectors never go in registers when -mabi=altivec. */
6664 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6665 align = 16;
6666 else
6667 {
6668 lab_false = create_artificial_label ();
6669 lab_over = create_artificial_label ();
6670
6671 /* Long long and SPE vectors are aligned in the registers.
6672 As are any other 2 gpr item such as complex int due to a
6673 historical mistake. */
6674 u = reg;
602ea4d3 6675 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6676 {
7393f7f8 6677 regalign = 1;
cd3ce9b4 6678 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6679 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6680 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6681 }
7393f7f8
BE
6682 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6683 reg number is 0 for f1, so we want to make it odd. */
6684 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6685 {
6686 regalign = 1;
6687 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6688 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6689 }
cd3ce9b4 6690
95674810 6691 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6692 t = build2 (GE_EXPR, boolean_type_node, u, t);
6693 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6694 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6695 gimplify_and_add (t, pre_p);
6696
6697 t = sav;
6698 if (sav_ofs)
5be014d5 6699 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6700
8fb632eb
ZD
6701 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6702 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6703 u = fold_convert (sizetype, u);
6704 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6705 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6706
07beea0d 6707 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6708 gimplify_and_add (t, pre_p);
6709
6710 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6711 gimplify_and_add (t, pre_p);
6712
6713 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6714 append_to_statement_list (t, pre_p);
6715
7393f7f8 6716 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6717 {
6718 /* Ensure that we don't find any more args in regs.
7393f7f8 6719 Alignment has taken care of for special cases. */
07beea0d 6720 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6721 gimplify_and_add (t, pre_p);
6722 }
6723 }
6724
6725 /* ... otherwise out of the overflow area. */
6726
6727 /* Care for on-stack alignment if needed. */
6728 t = ovf;
6729 if (align != 1)
6730 {
5be014d5
AP
6731 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6732 t = fold_convert (sizetype, t);
4a90aeeb 6733 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6734 size_int (-align));
6735 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6736 }
6737 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6738
07beea0d 6739 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6740 gimplify_and_add (u, pre_p);
6741
5be014d5 6742 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6743 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6744 gimplify_and_add (t, pre_p);
6745
6746 if (lab_over)
6747 {
6748 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6749 append_to_statement_list (t, pre_p);
6750 }
6751
0cfbc62b
JM
6752 if (STRICT_ALIGNMENT
6753 && (TYPE_ALIGN (type)
6754 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6755 {
6756 /* The value (of type complex double, for example) may not be
6757 aligned in memory in the saved registers, so copy via a
6758 temporary. (This is the same code as used for SPARC.) */
6759 tree tmp = create_tmp_var (type, "va_arg_tmp");
6760 tree dest_addr = build_fold_addr_expr (tmp);
6761
5039610b
SL
6762 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6763 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6764
6765 gimplify_and_add (copy, pre_p);
6766 addr = dest_addr;
6767 }
6768
08b0dc1b 6769 addr = fold_convert (ptrtype, addr);
872a65b5 6770 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6771}
6772
0ac081f6
AH
6773/* Builtins. */
6774
58646b77
PB
6775static void
6776def_builtin (int mask, const char *name, tree type, int code)
6777{
96038623 6778 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6779 {
6780 if (rs6000_builtin_decls[code])
6781 abort ();
6782
6783 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6784 add_builtin_function (name, type, code, BUILT_IN_MD,
6785 NULL, NULL_TREE);
58646b77
PB
6786 }
6787}
0ac081f6 6788
24408032
AH
6789/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6790
2212663f 6791static const struct builtin_description bdesc_3arg[] =
24408032
AH
6792{
6793 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6794 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6795 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6796 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6797 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6798 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6799 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6800 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6801 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6802 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6803 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6804 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6805 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6806 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6807 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6808 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6809 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6810 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6811 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6812 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6813 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6814 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6815 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6816
6817 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6818 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6819 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6820 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6821 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6822 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6823 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6824 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6825 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6826 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6827 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6828 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6829 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6830 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6831 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6832
6833 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6834 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6835 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6836 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6837 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6838 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6839 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6840 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6841 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6842};
2212663f 6843
95385cbb
AH
6844/* DST operations: void foo (void *, const int, const char). */
6845
6846static const struct builtin_description bdesc_dst[] =
6847{
6848 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6849 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6850 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6851 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6852
6853 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6854 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6855 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6856 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6857};
6858
2212663f 6859/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6860
a3170dc6 6861static struct builtin_description bdesc_2arg[] =
0ac081f6 6862{
f18c054f
DB
6863 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6864 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6865 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6866 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6867 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6868 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6869 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6870 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6871 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6872 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6873 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6874 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6875 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6876 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6877 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6878 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6879 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6880 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6881 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6882 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6883 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6884 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6885 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6886 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6887 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6888 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6889 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6890 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6891 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6892 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6893 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6894 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6895 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6896 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6897 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6898 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6899 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6900 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6901 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6902 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6903 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6904 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6905 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6906 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6907 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6908 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6909 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6910 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6911 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6912 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6913 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6914 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6915 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6916 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6917 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6918 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6919 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6920 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6921 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6922 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6923 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6924 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6925 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6926 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6927 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6928 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6929 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6930 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6931 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6932 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6933 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6934 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6935 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6936 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6937 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6938 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6939 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6940 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6941 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6942 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6943 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6944 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6945 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6946 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6947 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6948 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6949 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6950 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6951 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6952 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6953 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6954 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6955 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6956 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6957 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6958 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6959 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6960 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6961 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6962 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6963 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6964 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6965 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6966 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6967 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6968 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6969 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6970 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6971 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6972 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6973 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6974
58646b77
PB
6975 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6976 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6977 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6978 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6979 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6980 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6981 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6982 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6983 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6984 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6985 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6986 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6987 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6988 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6989 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6990 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6991 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6992 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6993 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6994 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6995 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6996 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6997 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6998 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6999 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7000 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7001 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7002 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7003 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7004 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7005 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7006 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7007 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7008 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7009 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7010 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7011 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7012 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7013 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7014 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7015 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7016 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7017 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7018 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7019 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7020 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7021 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7022 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7023 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7024 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7025 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7026 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7027 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7028 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7029 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7030 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7031 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7032 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7033 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7034 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7035 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7036 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7037 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7038 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7039 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7040 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7041 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7042 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7043 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7044 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7045 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7046 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7047 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7048 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7049 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7050 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7051 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7052 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7053 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7054 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7055 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7056 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7057 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7058 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7059 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7060 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7061 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7062 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7063 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7064 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7065 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7066 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7067 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7068 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7069 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7070 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7071 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7072 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7073 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7074 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7075 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7076 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7077 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7078 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7079 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7080 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7081 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7082 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7083 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7084 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7085 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7102
96038623
DE
7103 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7104 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7105 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7106 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7107 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7108 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7109 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7110 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7111 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7112 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7113
a3170dc6
AH
7114 /* Place holder, leave as first spe builtin. */
7115 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7116 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7117 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7118 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7119 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7120 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7121 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7122 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7123 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7124 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7125 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7126 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7127 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7128 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7129 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7130 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7131 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7132 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7133 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7134 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7135 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7136 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7137 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7138 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7139 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7140 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7141 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7142 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7143 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7144 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7145 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7146 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7147 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7148 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7149 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7150 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7151 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7152 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7153 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7154 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7155 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7156 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7157 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7158 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7159 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7160 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7161 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7162 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7163 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7164 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7165 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7166 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7167 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7168 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7169 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7170 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7171 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7172 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7173 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7174 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7175 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7176 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7177 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7178 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7179 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7180 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7181 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7182 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7183 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7184 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7185 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7186 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7187 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7188 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7189 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7190 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7191 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7192 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7193 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7194 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7195 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7196 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7197 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7198 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7199 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7200 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7201 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7202 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7203 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7204 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7205 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7206 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7207 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7208 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7209 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7210 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7211 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7212 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7213 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7214 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7215 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7216 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7217 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7218 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7219 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7220 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7221 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7222 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7223 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7224
7225 /* SPE binary operations expecting a 5-bit unsigned literal. */
7226 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7227
7228 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7229 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7230 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7231 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7232 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7233 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7234 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7235 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7236 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7237 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7238 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7239 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7240 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7241 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7242 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7243 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7244 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7245 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7246 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7247 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7248 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7249 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7250 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7251 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7252 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7253 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7254
7255 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7256 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7257};
7258
7259/* AltiVec predicates. */
7260
7261struct builtin_description_predicates
7262{
7263 const unsigned int mask;
7264 const enum insn_code icode;
7265 const char *opcode;
7266 const char *const name;
7267 const enum rs6000_builtins code;
7268};
7269
7270static const struct builtin_description_predicates bdesc_altivec_preds[] =
7271{
7272 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7273 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7274 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7275 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7276 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7277 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7278 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7279 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7280 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7281 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7282 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7283 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7284 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7285
7286 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7287 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7288 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7289};
24408032 7290
a3170dc6
AH
7291/* SPE predicates. */
7292static struct builtin_description bdesc_spe_predicates[] =
7293{
7294 /* Place-holder. Leave as first. */
7295 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7296 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7297 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7298 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7299 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7300 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7301 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7302 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7303 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7304 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7305 /* Place-holder. Leave as last. */
7306 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7307};
7308
7309/* SPE evsel predicates. */
7310static struct builtin_description bdesc_spe_evsel[] =
7311{
7312 /* Place-holder. Leave as first. */
7313 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7314 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7315 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7316 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7317 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7318 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7319 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7320 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7321 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7322 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7323 /* Place-holder. Leave as last. */
7324 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7325};
7326
96038623
DE
7327/* PAIRED predicates. */
7328static const struct builtin_description bdesc_paired_preds[] =
7329{
7330 /* Place-holder. Leave as first. */
7331 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7332 /* Place-holder. Leave as last. */
7333 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7334};
7335
b6d08ca1 7336/* ABS* operations. */
100c4561
AH
7337
7338static const struct builtin_description bdesc_abs[] =
7339{
7340 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7341 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7342 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7343 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7344 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7345 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7346 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7347};
7348
617e0e1d
DB
7349/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7350 foo (VECa). */
24408032 7351
a3170dc6 7352static struct builtin_description bdesc_1arg[] =
2212663f 7353{
617e0e1d
DB
7354 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7355 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7356 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7357 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7358 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7359 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7360 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7361 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7362 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7363 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7364 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7365 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7366 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7367 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7368 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7369 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7370 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7371
58646b77
PB
7372 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7373 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7374 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7375 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7376 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7377 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7378 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7379 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7380 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7381 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7382 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7391
a3170dc6
AH
7392 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7393 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7394 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7395 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7396 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7397 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7398 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7399 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7400 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7401 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7402 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7403 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7404 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7405 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7406 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7407 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7408 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7409 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7410 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7411 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7412 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7413 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7414 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7415 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7416 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7417 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7418 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7419 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7420 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7421 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7422
7423 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7424 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7425
7426 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7427 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7428 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7429 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7430 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7431};
7432
7433static rtx
5039610b 7434rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7435{
7436 rtx pat;
5039610b 7437 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7438 rtx op0 = expand_normal (arg0);
2212663f
DB
7439 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7440 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7441
0559cc77
DE
7442 if (icode == CODE_FOR_nothing)
7443 /* Builtin not supported on this processor. */
7444 return 0;
7445
20e26713
AH
7446 /* If we got invalid arguments bail out before generating bad rtl. */
7447 if (arg0 == error_mark_node)
9a171fcd 7448 return const0_rtx;
20e26713 7449
0559cc77
DE
7450 if (icode == CODE_FOR_altivec_vspltisb
7451 || icode == CODE_FOR_altivec_vspltish
7452 || icode == CODE_FOR_altivec_vspltisw
7453 || icode == CODE_FOR_spe_evsplatfi
7454 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7455 {
7456 /* Only allow 5-bit *signed* literals. */
b44140e7 7457 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7458 || INTVAL (op0) > 15
7459 || INTVAL (op0) < -16)
b44140e7
AH
7460 {
7461 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7462 return const0_rtx;
b44140e7 7463 }
b44140e7
AH
7464 }
7465
c62f2db5 7466 if (target == 0
2212663f
DB
7467 || GET_MODE (target) != tmode
7468 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7469 target = gen_reg_rtx (tmode);
7470
7471 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7472 op0 = copy_to_mode_reg (mode0, op0);
7473
7474 pat = GEN_FCN (icode) (target, op0);
7475 if (! pat)
7476 return 0;
7477 emit_insn (pat);
0ac081f6 7478
2212663f
DB
7479 return target;
7480}
ae4b4a02 7481
100c4561 7482static rtx
5039610b 7483altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7484{
7485 rtx pat, scratch1, scratch2;
5039610b 7486 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7487 rtx op0 = expand_normal (arg0);
100c4561
AH
7488 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7489 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7490
7491 /* If we have invalid arguments, bail out before generating bad rtl. */
7492 if (arg0 == error_mark_node)
9a171fcd 7493 return const0_rtx;
100c4561
AH
7494
7495 if (target == 0
7496 || GET_MODE (target) != tmode
7497 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7498 target = gen_reg_rtx (tmode);
7499
7500 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7501 op0 = copy_to_mode_reg (mode0, op0);
7502
7503 scratch1 = gen_reg_rtx (mode0);
7504 scratch2 = gen_reg_rtx (mode0);
7505
7506 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7507 if (! pat)
7508 return 0;
7509 emit_insn (pat);
7510
7511 return target;
7512}
7513
0ac081f6 7514static rtx
5039610b 7515rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7516{
7517 rtx pat;
5039610b
SL
7518 tree arg0 = CALL_EXPR_ARG (exp, 0);
7519 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7520 rtx op0 = expand_normal (arg0);
7521 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7522 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7523 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7524 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7525
0559cc77
DE
7526 if (icode == CODE_FOR_nothing)
7527 /* Builtin not supported on this processor. */
7528 return 0;
7529
20e26713
AH
7530 /* If we got invalid arguments bail out before generating bad rtl. */
7531 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7532 return const0_rtx;
20e26713 7533
0559cc77
DE
7534 if (icode == CODE_FOR_altivec_vcfux
7535 || icode == CODE_FOR_altivec_vcfsx
7536 || icode == CODE_FOR_altivec_vctsxs
7537 || icode == CODE_FOR_altivec_vctuxs
7538 || icode == CODE_FOR_altivec_vspltb
7539 || icode == CODE_FOR_altivec_vsplth
7540 || icode == CODE_FOR_altivec_vspltw
7541 || icode == CODE_FOR_spe_evaddiw
7542 || icode == CODE_FOR_spe_evldd
7543 || icode == CODE_FOR_spe_evldh
7544 || icode == CODE_FOR_spe_evldw
7545 || icode == CODE_FOR_spe_evlhhesplat
7546 || icode == CODE_FOR_spe_evlhhossplat
7547 || icode == CODE_FOR_spe_evlhhousplat
7548 || icode == CODE_FOR_spe_evlwhe
7549 || icode == CODE_FOR_spe_evlwhos
7550 || icode == CODE_FOR_spe_evlwhou
7551 || icode == CODE_FOR_spe_evlwhsplat
7552 || icode == CODE_FOR_spe_evlwwsplat
7553 || icode == CODE_FOR_spe_evrlwi
7554 || icode == CODE_FOR_spe_evslwi
7555 || icode == CODE_FOR_spe_evsrwis
f5119d10 7556 || icode == CODE_FOR_spe_evsubifw
0559cc77 7557 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7558 {
7559 /* Only allow 5-bit unsigned literals. */
8bb418a3 7560 STRIP_NOPS (arg1);
b44140e7
AH
7561 if (TREE_CODE (arg1) != INTEGER_CST
7562 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7563 {
7564 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7565 return const0_rtx;
b44140e7 7566 }
b44140e7
AH
7567 }
7568
c62f2db5 7569 if (target == 0
0ac081f6
AH
7570 || GET_MODE (target) != tmode
7571 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7572 target = gen_reg_rtx (tmode);
7573
7574 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7575 op0 = copy_to_mode_reg (mode0, op0);
7576 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7577 op1 = copy_to_mode_reg (mode1, op1);
7578
7579 pat = GEN_FCN (icode) (target, op0, op1);
7580 if (! pat)
7581 return 0;
7582 emit_insn (pat);
7583
7584 return target;
7585}
6525c0e7 7586
ae4b4a02 7587static rtx
f676971a 7588altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7589 tree exp, rtx target)
ae4b4a02
AH
7590{
7591 rtx pat, scratch;
5039610b
SL
7592 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7593 tree arg0 = CALL_EXPR_ARG (exp, 1);
7594 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7595 rtx op0 = expand_normal (arg0);
7596 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7597 enum machine_mode tmode = SImode;
7598 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7599 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7600 int cr6_form_int;
7601
7602 if (TREE_CODE (cr6_form) != INTEGER_CST)
7603 {
7604 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7605 return const0_rtx;
ae4b4a02
AH
7606 }
7607 else
7608 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7609
37409796 7610 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7611
7612 /* If we have invalid arguments, bail out before generating bad rtl. */
7613 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7614 return const0_rtx;
ae4b4a02
AH
7615
7616 if (target == 0
7617 || GET_MODE (target) != tmode
7618 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7619 target = gen_reg_rtx (tmode);
7620
7621 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7622 op0 = copy_to_mode_reg (mode0, op0);
7623 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7624 op1 = copy_to_mode_reg (mode1, op1);
7625
7626 scratch = gen_reg_rtx (mode0);
7627
7628 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7629 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7630 if (! pat)
7631 return 0;
7632 emit_insn (pat);
7633
7634 /* The vec_any* and vec_all* predicates use the same opcodes for two
7635 different operations, but the bits in CR6 will be different
7636 depending on what information we want. So we have to play tricks
7637 with CR6 to get the right bits out.
7638
7639 If you think this is disgusting, look at the specs for the
7640 AltiVec predicates. */
7641
c4ad648e
AM
7642 switch (cr6_form_int)
7643 {
7644 case 0:
7645 emit_insn (gen_cr6_test_for_zero (target));
7646 break;
7647 case 1:
7648 emit_insn (gen_cr6_test_for_zero_reverse (target));
7649 break;
7650 case 2:
7651 emit_insn (gen_cr6_test_for_lt (target));
7652 break;
7653 case 3:
7654 emit_insn (gen_cr6_test_for_lt_reverse (target));
7655 break;
7656 default:
7657 error ("argument 1 of __builtin_altivec_predicate is out of range");
7658 break;
7659 }
ae4b4a02
AH
7660
7661 return target;
7662}
7663
96038623
DE
7664static rtx
7665paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7666{
7667 rtx pat, addr;
7668 tree arg0 = CALL_EXPR_ARG (exp, 0);
7669 tree arg1 = CALL_EXPR_ARG (exp, 1);
7670 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7671 enum machine_mode mode0 = Pmode;
7672 enum machine_mode mode1 = Pmode;
7673 rtx op0 = expand_normal (arg0);
7674 rtx op1 = expand_normal (arg1);
7675
7676 if (icode == CODE_FOR_nothing)
7677 /* Builtin not supported on this processor. */
7678 return 0;
7679
7680 /* If we got invalid arguments bail out before generating bad rtl. */
7681 if (arg0 == error_mark_node || arg1 == error_mark_node)
7682 return const0_rtx;
7683
7684 if (target == 0
7685 || GET_MODE (target) != tmode
7686 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7687 target = gen_reg_rtx (tmode);
7688
7689 op1 = copy_to_mode_reg (mode1, op1);
7690
7691 if (op0 == const0_rtx)
7692 {
7693 addr = gen_rtx_MEM (tmode, op1);
7694 }
7695 else
7696 {
7697 op0 = copy_to_mode_reg (mode0, op0);
7698 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7699 }
7700
7701 pat = GEN_FCN (icode) (target, addr);
7702
7703 if (! pat)
7704 return 0;
7705 emit_insn (pat);
7706
7707 return target;
7708}
7709
b4a62fa0 7710static rtx
5039610b 7711altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7712{
7713 rtx pat, addr;
5039610b
SL
7714 tree arg0 = CALL_EXPR_ARG (exp, 0);
7715 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7716 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7717 enum machine_mode mode0 = Pmode;
7718 enum machine_mode mode1 = Pmode;
84217346
MD
7719 rtx op0 = expand_normal (arg0);
7720 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7721
7722 if (icode == CODE_FOR_nothing)
7723 /* Builtin not supported on this processor. */
7724 return 0;
7725
7726 /* If we got invalid arguments bail out before generating bad rtl. */
7727 if (arg0 == error_mark_node || arg1 == error_mark_node)
7728 return const0_rtx;
7729
7730 if (target == 0
7731 || GET_MODE (target) != tmode
7732 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7733 target = gen_reg_rtx (tmode);
7734
f676971a 7735 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7736
7737 if (op0 == const0_rtx)
7738 {
7739 addr = gen_rtx_MEM (tmode, op1);
7740 }
7741 else
7742 {
7743 op0 = copy_to_mode_reg (mode0, op0);
7744 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7745 }
7746
7747 pat = GEN_FCN (icode) (target, addr);
7748
7749 if (! pat)
7750 return 0;
7751 emit_insn (pat);
7752
7753 return target;
7754}
7755
61bea3b0 7756static rtx
5039610b 7757spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7758{
5039610b
SL
7759 tree arg0 = CALL_EXPR_ARG (exp, 0);
7760 tree arg1 = CALL_EXPR_ARG (exp, 1);
7761 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7762 rtx op0 = expand_normal (arg0);
7763 rtx op1 = expand_normal (arg1);
7764 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7765 rtx pat;
7766 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7767 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7768 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7769
7770 /* Invalid arguments. Bail before doing anything stoopid! */
7771 if (arg0 == error_mark_node
7772 || arg1 == error_mark_node
7773 || arg2 == error_mark_node)
7774 return const0_rtx;
7775
7776 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7777 op0 = copy_to_mode_reg (mode2, op0);
7778 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7779 op1 = copy_to_mode_reg (mode0, op1);
7780 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7781 op2 = copy_to_mode_reg (mode1, op2);
7782
7783 pat = GEN_FCN (icode) (op1, op2, op0);
7784 if (pat)
7785 emit_insn (pat);
7786 return NULL_RTX;
7787}
7788
96038623
DE
7789static rtx
7790paired_expand_stv_builtin (enum insn_code icode, tree exp)
7791{
7792 tree arg0 = CALL_EXPR_ARG (exp, 0);
7793 tree arg1 = CALL_EXPR_ARG (exp, 1);
7794 tree arg2 = CALL_EXPR_ARG (exp, 2);
7795 rtx op0 = expand_normal (arg0);
7796 rtx op1 = expand_normal (arg1);
7797 rtx op2 = expand_normal (arg2);
7798 rtx pat, addr;
7799 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7800 enum machine_mode mode1 = Pmode;
7801 enum machine_mode mode2 = Pmode;
7802
7803 /* Invalid arguments. Bail before doing anything stoopid! */
7804 if (arg0 == error_mark_node
7805 || arg1 == error_mark_node
7806 || arg2 == error_mark_node)
7807 return const0_rtx;
7808
7809 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7810 op0 = copy_to_mode_reg (tmode, op0);
7811
7812 op2 = copy_to_mode_reg (mode2, op2);
7813
7814 if (op1 == const0_rtx)
7815 {
7816 addr = gen_rtx_MEM (tmode, op2);
7817 }
7818 else
7819 {
7820 op1 = copy_to_mode_reg (mode1, op1);
7821 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7822 }
7823
7824 pat = GEN_FCN (icode) (addr, op0);
7825 if (pat)
7826 emit_insn (pat);
7827 return NULL_RTX;
7828}
7829
6525c0e7 7830static rtx
5039610b 7831altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7832{
5039610b
SL
7833 tree arg0 = CALL_EXPR_ARG (exp, 0);
7834 tree arg1 = CALL_EXPR_ARG (exp, 1);
7835 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7836 rtx op0 = expand_normal (arg0);
7837 rtx op1 = expand_normal (arg1);
7838 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7839 rtx pat, addr;
7840 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7841 enum machine_mode mode1 = Pmode;
7842 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7843
7844 /* Invalid arguments. Bail before doing anything stoopid! */
7845 if (arg0 == error_mark_node
7846 || arg1 == error_mark_node
7847 || arg2 == error_mark_node)
9a171fcd 7848 return const0_rtx;
6525c0e7 7849
b4a62fa0
SB
7850 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7851 op0 = copy_to_mode_reg (tmode, op0);
7852
f676971a 7853 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7854
7855 if (op1 == const0_rtx)
7856 {
7857 addr = gen_rtx_MEM (tmode, op2);
7858 }
7859 else
7860 {
7861 op1 = copy_to_mode_reg (mode1, op1);
7862 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7863 }
6525c0e7 7864
b4a62fa0 7865 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7866 if (pat)
7867 emit_insn (pat);
7868 return NULL_RTX;
7869}
7870
2212663f 7871static rtx
5039610b 7872rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7873{
7874 rtx pat;
5039610b
SL
7875 tree arg0 = CALL_EXPR_ARG (exp, 0);
7876 tree arg1 = CALL_EXPR_ARG (exp, 1);
7877 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7878 rtx op0 = expand_normal (arg0);
7879 rtx op1 = expand_normal (arg1);
7880 rtx op2 = expand_normal (arg2);
2212663f
DB
7881 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7882 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7883 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7884 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7885
774b5662
DE
7886 if (icode == CODE_FOR_nothing)
7887 /* Builtin not supported on this processor. */
7888 return 0;
7889
20e26713
AH
7890 /* If we got invalid arguments bail out before generating bad rtl. */
7891 if (arg0 == error_mark_node
7892 || arg1 == error_mark_node
7893 || arg2 == error_mark_node)
9a171fcd 7894 return const0_rtx;
20e26713 7895
aba5fb01
NS
7896 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7897 || icode == CODE_FOR_altivec_vsldoi_v4si
7898 || icode == CODE_FOR_altivec_vsldoi_v8hi
7899 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7900 {
7901 /* Only allow 4-bit unsigned literals. */
8bb418a3 7902 STRIP_NOPS (arg2);
b44140e7
AH
7903 if (TREE_CODE (arg2) != INTEGER_CST
7904 || TREE_INT_CST_LOW (arg2) & ~0xf)
7905 {
7906 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7907 return const0_rtx;
b44140e7 7908 }
b44140e7
AH
7909 }
7910
c62f2db5 7911 if (target == 0
2212663f
DB
7912 || GET_MODE (target) != tmode
7913 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7914 target = gen_reg_rtx (tmode);
7915
7916 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7917 op0 = copy_to_mode_reg (mode0, op0);
7918 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7919 op1 = copy_to_mode_reg (mode1, op1);
7920 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7921 op2 = copy_to_mode_reg (mode2, op2);
7922
49e39588
RE
7923 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
7924 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
7925 else
7926 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
7927 if (! pat)
7928 return 0;
7929 emit_insn (pat);
7930
7931 return target;
7932}
92898235 7933
3a9b8c7e 7934/* Expand the lvx builtins. */
0ac081f6 7935static rtx
a2369ed3 7936altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7937{
5039610b 7938 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7939 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7940 tree arg0;
7941 enum machine_mode tmode, mode0;
7c3abc73 7942 rtx pat, op0;
3a9b8c7e 7943 enum insn_code icode;
92898235 7944
0ac081f6
AH
7945 switch (fcode)
7946 {
f18c054f 7947 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7948 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7949 break;
f18c054f 7950 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7951 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7952 break;
7953 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7954 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7955 break;
7956 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7957 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7958 break;
7959 default:
7960 *expandedp = false;
7961 return NULL_RTX;
7962 }
0ac081f6 7963
3a9b8c7e 7964 *expandedp = true;
f18c054f 7965
5039610b 7966 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7967 op0 = expand_normal (arg0);
3a9b8c7e
AH
7968 tmode = insn_data[icode].operand[0].mode;
7969 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7970
3a9b8c7e
AH
7971 if (target == 0
7972 || GET_MODE (target) != tmode
7973 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7974 target = gen_reg_rtx (tmode);
24408032 7975
3a9b8c7e
AH
7976 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7977 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7978
3a9b8c7e
AH
7979 pat = GEN_FCN (icode) (target, op0);
7980 if (! pat)
7981 return 0;
7982 emit_insn (pat);
7983 return target;
7984}
f18c054f 7985
3a9b8c7e
AH
7986/* Expand the stvx builtins. */
7987static rtx
f676971a 7988altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7989 bool *expandedp)
3a9b8c7e 7990{
5039610b 7991 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7992 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7993 tree arg0, arg1;
7994 enum machine_mode mode0, mode1;
7c3abc73 7995 rtx pat, op0, op1;
3a9b8c7e 7996 enum insn_code icode;
f18c054f 7997
3a9b8c7e
AH
7998 switch (fcode)
7999 {
8000 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8001 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8002 break;
8003 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8004 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8005 break;
8006 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8007 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8008 break;
8009 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8010 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8011 break;
8012 default:
8013 *expandedp = false;
8014 return NULL_RTX;
8015 }
24408032 8016
5039610b
SL
8017 arg0 = CALL_EXPR_ARG (exp, 0);
8018 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8019 op0 = expand_normal (arg0);
8020 op1 = expand_normal (arg1);
3a9b8c7e
AH
8021 mode0 = insn_data[icode].operand[0].mode;
8022 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8023
3a9b8c7e
AH
8024 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8025 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8026 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8027 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8028
3a9b8c7e
AH
8029 pat = GEN_FCN (icode) (op0, op1);
8030 if (pat)
8031 emit_insn (pat);
f18c054f 8032
3a9b8c7e
AH
8033 *expandedp = true;
8034 return NULL_RTX;
8035}
f18c054f 8036
3a9b8c7e
AH
8037/* Expand the dst builtins. */
8038static rtx
f676971a 8039altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8040 bool *expandedp)
3a9b8c7e 8041{
5039610b 8042 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8043 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8044 tree arg0, arg1, arg2;
8045 enum machine_mode mode0, mode1, mode2;
7c3abc73 8046 rtx pat, op0, op1, op2;
586de218 8047 const struct builtin_description *d;
a3170dc6 8048 size_t i;
f18c054f 8049
3a9b8c7e 8050 *expandedp = false;
f18c054f 8051
3a9b8c7e 8052 /* Handle DST variants. */
586de218 8053 d = bdesc_dst;
3a9b8c7e
AH
8054 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8055 if (d->code == fcode)
8056 {
5039610b
SL
8057 arg0 = CALL_EXPR_ARG (exp, 0);
8058 arg1 = CALL_EXPR_ARG (exp, 1);
8059 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8060 op0 = expand_normal (arg0);
8061 op1 = expand_normal (arg1);
8062 op2 = expand_normal (arg2);
3a9b8c7e
AH
8063 mode0 = insn_data[d->icode].operand[0].mode;
8064 mode1 = insn_data[d->icode].operand[1].mode;
8065 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8066
3a9b8c7e
AH
8067 /* Invalid arguments, bail out before generating bad rtl. */
8068 if (arg0 == error_mark_node
8069 || arg1 == error_mark_node
8070 || arg2 == error_mark_node)
8071 return const0_rtx;
f18c054f 8072
86e7df90 8073 *expandedp = true;
8bb418a3 8074 STRIP_NOPS (arg2);
3a9b8c7e
AH
8075 if (TREE_CODE (arg2) != INTEGER_CST
8076 || TREE_INT_CST_LOW (arg2) & ~0x3)
8077 {
9e637a26 8078 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8079 return const0_rtx;
8080 }
f18c054f 8081
3a9b8c7e 8082 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8083 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8084 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8085 op1 = copy_to_mode_reg (mode1, op1);
24408032 8086
3a9b8c7e
AH
8087 pat = GEN_FCN (d->icode) (op0, op1, op2);
8088 if (pat != 0)
8089 emit_insn (pat);
f18c054f 8090
3a9b8c7e
AH
8091 return NULL_RTX;
8092 }
f18c054f 8093
3a9b8c7e
AH
8094 return NULL_RTX;
8095}
24408032 8096
7a4eca66
DE
8097/* Expand vec_init builtin. */
8098static rtx
5039610b 8099altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8100{
8101 enum machine_mode tmode = TYPE_MODE (type);
8102 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8103 int i, n_elt = GET_MODE_NUNITS (tmode);
8104 rtvec v = rtvec_alloc (n_elt);
8105
8106 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8107 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8108
5039610b 8109 for (i = 0; i < n_elt; ++i)
7a4eca66 8110 {
5039610b 8111 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8112 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8113 }
8114
7a4eca66
DE
8115 if (!target || !register_operand (target, tmode))
8116 target = gen_reg_rtx (tmode);
8117
8118 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8119 return target;
8120}
8121
8122/* Return the integer constant in ARG. Constrain it to be in the range
8123 of the subparts of VEC_TYPE; issue an error if not. */
8124
8125static int
8126get_element_number (tree vec_type, tree arg)
8127{
8128 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8129
8130 if (!host_integerp (arg, 1)
8131 || (elt = tree_low_cst (arg, 1), elt > max))
8132 {
8133 error ("selector must be an integer constant in the range 0..%wi", max);
8134 return 0;
8135 }
8136
8137 return elt;
8138}
8139
8140/* Expand vec_set builtin. */
8141static rtx
5039610b 8142altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8143{
8144 enum machine_mode tmode, mode1;
8145 tree arg0, arg1, arg2;
8146 int elt;
8147 rtx op0, op1;
8148
5039610b
SL
8149 arg0 = CALL_EXPR_ARG (exp, 0);
8150 arg1 = CALL_EXPR_ARG (exp, 1);
8151 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8152
8153 tmode = TYPE_MODE (TREE_TYPE (arg0));
8154 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8155 gcc_assert (VECTOR_MODE_P (tmode));
8156
8157 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8158 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8159 elt = get_element_number (TREE_TYPE (arg0), arg2);
8160
8161 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8162 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8163
8164 op0 = force_reg (tmode, op0);
8165 op1 = force_reg (mode1, op1);
8166
8167 rs6000_expand_vector_set (op0, op1, elt);
8168
8169 return op0;
8170}
8171
8172/* Expand vec_ext builtin. */
8173static rtx
5039610b 8174altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8175{
8176 enum machine_mode tmode, mode0;
8177 tree arg0, arg1;
8178 int elt;
8179 rtx op0;
8180
5039610b
SL
8181 arg0 = CALL_EXPR_ARG (exp, 0);
8182 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8183
84217346 8184 op0 = expand_normal (arg0);
7a4eca66
DE
8185 elt = get_element_number (TREE_TYPE (arg0), arg1);
8186
8187 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8188 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8189 gcc_assert (VECTOR_MODE_P (mode0));
8190
8191 op0 = force_reg (mode0, op0);
8192
8193 if (optimize || !target || !register_operand (target, tmode))
8194 target = gen_reg_rtx (tmode);
8195
8196 rs6000_expand_vector_extract (target, op0, elt);
8197
8198 return target;
8199}
8200
3a9b8c7e
AH
8201/* Expand the builtin in EXP and store the result in TARGET. Store
8202 true in *EXPANDEDP if we found a builtin to expand. */
8203static rtx
a2369ed3 8204altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8205{
586de218
KG
8206 const struct builtin_description *d;
8207 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8208 size_t i;
8209 enum insn_code icode;
5039610b 8210 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8211 tree arg0;
8212 rtx op0, pat;
8213 enum machine_mode tmode, mode0;
3a9b8c7e 8214 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8215
58646b77
PB
8216 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8217 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8218 {
8219 *expandedp = true;
ea40ba9c 8220 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8221 return const0_rtx;
8222 }
8223
3a9b8c7e
AH
8224 target = altivec_expand_ld_builtin (exp, target, expandedp);
8225 if (*expandedp)
8226 return target;
0ac081f6 8227
3a9b8c7e
AH
8228 target = altivec_expand_st_builtin (exp, target, expandedp);
8229 if (*expandedp)
8230 return target;
8231
8232 target = altivec_expand_dst_builtin (exp, target, expandedp);
8233 if (*expandedp)
8234 return target;
8235
8236 *expandedp = true;
95385cbb 8237
3a9b8c7e
AH
8238 switch (fcode)
8239 {
6525c0e7 8240 case ALTIVEC_BUILTIN_STVX:
5039610b 8241 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8242 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8243 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8244 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8245 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8246 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8247 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8248 case ALTIVEC_BUILTIN_STVXL:
5039610b 8249 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8250
95385cbb
AH
8251 case ALTIVEC_BUILTIN_MFVSCR:
8252 icode = CODE_FOR_altivec_mfvscr;
8253 tmode = insn_data[icode].operand[0].mode;
8254
8255 if (target == 0
8256 || GET_MODE (target) != tmode
8257 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8258 target = gen_reg_rtx (tmode);
f676971a 8259
95385cbb 8260 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8261 if (! pat)
8262 return 0;
8263 emit_insn (pat);
95385cbb
AH
8264 return target;
8265
8266 case ALTIVEC_BUILTIN_MTVSCR:
8267 icode = CODE_FOR_altivec_mtvscr;
5039610b 8268 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8269 op0 = expand_normal (arg0);
95385cbb
AH
8270 mode0 = insn_data[icode].operand[0].mode;
8271
8272 /* If we got invalid arguments bail out before generating bad rtl. */
8273 if (arg0 == error_mark_node)
9a171fcd 8274 return const0_rtx;
95385cbb
AH
8275
8276 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8277 op0 = copy_to_mode_reg (mode0, op0);
8278
8279 pat = GEN_FCN (icode) (op0);
8280 if (pat)
8281 emit_insn (pat);
8282 return NULL_RTX;
3a9b8c7e 8283
95385cbb
AH
8284 case ALTIVEC_BUILTIN_DSSALL:
8285 emit_insn (gen_altivec_dssall ());
8286 return NULL_RTX;
8287
8288 case ALTIVEC_BUILTIN_DSS:
8289 icode = CODE_FOR_altivec_dss;
5039610b 8290 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8291 STRIP_NOPS (arg0);
84217346 8292 op0 = expand_normal (arg0);
95385cbb
AH
8293 mode0 = insn_data[icode].operand[0].mode;
8294
8295 /* If we got invalid arguments bail out before generating bad rtl. */
8296 if (arg0 == error_mark_node)
9a171fcd 8297 return const0_rtx;
95385cbb 8298
b44140e7
AH
8299 if (TREE_CODE (arg0) != INTEGER_CST
8300 || TREE_INT_CST_LOW (arg0) & ~0x3)
8301 {
8302 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8303 return const0_rtx;
b44140e7
AH
8304 }
8305
95385cbb
AH
8306 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8307 op0 = copy_to_mode_reg (mode0, op0);
8308
8309 emit_insn (gen_altivec_dss (op0));
0ac081f6 8310 return NULL_RTX;
7a4eca66
DE
8311
8312 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8313 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8314 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8315 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8316 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8317
8318 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8319 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8320 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8321 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8322 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8323
8324 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8325 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8326 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8327 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8328 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8329
8330 default:
8331 break;
8332 /* Fall through. */
0ac081f6 8333 }
24408032 8334
100c4561 8335 /* Expand abs* operations. */
586de218 8336 d = bdesc_abs;
ca7558fc 8337 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8338 if (d->code == fcode)
5039610b 8339 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8340
ae4b4a02 8341 /* Expand the AltiVec predicates. */
586de218 8342 dp = bdesc_altivec_preds;
ca7558fc 8343 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8344 if (dp->code == fcode)
c4ad648e 8345 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8346 exp, target);
ae4b4a02 8347
6525c0e7
AH
8348 /* LV* are funky. We initialized them differently. */
8349 switch (fcode)
8350 {
8351 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8352 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8353 exp, target);
6525c0e7 8354 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8355 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8356 exp, target);
6525c0e7 8357 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8358 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8359 exp, target);
6525c0e7 8360 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8361 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8362 exp, target);
6525c0e7 8363 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8364 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8365 exp, target);
6525c0e7 8366 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8367 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8368 exp, target);
6525c0e7 8369 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8370 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8371 exp, target);
6525c0e7
AH
8372 default:
8373 break;
8374 /* Fall through. */
8375 }
95385cbb 8376
92898235 8377 *expandedp = false;
0ac081f6
AH
8378 return NULL_RTX;
8379}
8380
96038623
DE
8381/* Expand the builtin in EXP and store the result in TARGET. Store
8382 true in *EXPANDEDP if we found a builtin to expand. */
8383static rtx
8384paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8385{
8386 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8387 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8388 const struct builtin_description *d;
96038623
DE
8389 size_t i;
8390
8391 *expandedp = true;
8392
8393 switch (fcode)
8394 {
8395 case PAIRED_BUILTIN_STX:
8396 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8397 case PAIRED_BUILTIN_LX:
8398 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8399 default:
8400 break;
8401 /* Fall through. */
8402 }
8403
8404 /* Expand the paired predicates. */
23a651fc 8405 d = bdesc_paired_preds;
96038623
DE
8406 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8407 if (d->code == fcode)
8408 return paired_expand_predicate_builtin (d->icode, exp, target);
8409
8410 *expandedp = false;
8411 return NULL_RTX;
8412}
8413
a3170dc6
AH
8414/* Binops that need to be initialized manually, but can be expanded
8415 automagically by rs6000_expand_binop_builtin. */
8416static struct builtin_description bdesc_2arg_spe[] =
8417{
8418 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8419 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8420 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8421 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8422 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8423 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8424 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8425 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8426 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8427 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8428 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8429 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8430 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8431 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8432 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8433 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8434 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8435 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8436 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8437 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8438 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8439 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8440};
8441
8442/* Expand the builtin in EXP and store the result in TARGET. Store
8443 true in *EXPANDEDP if we found a builtin to expand.
8444
8445 This expands the SPE builtins that are not simple unary and binary
8446 operations. */
8447static rtx
a2369ed3 8448spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8449{
5039610b 8450 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8451 tree arg1, arg0;
8452 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8453 enum insn_code icode;
8454 enum machine_mode tmode, mode0;
8455 rtx pat, op0;
8456 struct builtin_description *d;
8457 size_t i;
8458
8459 *expandedp = true;
8460
8461 /* Syntax check for a 5-bit unsigned immediate. */
8462 switch (fcode)
8463 {
8464 case SPE_BUILTIN_EVSTDD:
8465 case SPE_BUILTIN_EVSTDH:
8466 case SPE_BUILTIN_EVSTDW:
8467 case SPE_BUILTIN_EVSTWHE:
8468 case SPE_BUILTIN_EVSTWHO:
8469 case SPE_BUILTIN_EVSTWWE:
8470 case SPE_BUILTIN_EVSTWWO:
5039610b 8471 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8472 if (TREE_CODE (arg1) != INTEGER_CST
8473 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8474 {
8475 error ("argument 2 must be a 5-bit unsigned literal");
8476 return const0_rtx;
8477 }
8478 break;
8479 default:
8480 break;
8481 }
8482
00332c9f
AH
8483 /* The evsplat*i instructions are not quite generic. */
8484 switch (fcode)
8485 {
8486 case SPE_BUILTIN_EVSPLATFI:
8487 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8488 exp, target);
00332c9f
AH
8489 case SPE_BUILTIN_EVSPLATI:
8490 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8491 exp, target);
00332c9f
AH
8492 default:
8493 break;
8494 }
8495
a3170dc6
AH
8496 d = (struct builtin_description *) bdesc_2arg_spe;
8497 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8498 if (d->code == fcode)
5039610b 8499 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8500
8501 d = (struct builtin_description *) bdesc_spe_predicates;
8502 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8503 if (d->code == fcode)
5039610b 8504 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8505
8506 d = (struct builtin_description *) bdesc_spe_evsel;
8507 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8508 if (d->code == fcode)
5039610b 8509 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8510
8511 switch (fcode)
8512 {
8513 case SPE_BUILTIN_EVSTDDX:
5039610b 8514 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8515 case SPE_BUILTIN_EVSTDHX:
5039610b 8516 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8517 case SPE_BUILTIN_EVSTDWX:
5039610b 8518 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8519 case SPE_BUILTIN_EVSTWHEX:
5039610b 8520 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8521 case SPE_BUILTIN_EVSTWHOX:
5039610b 8522 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8523 case SPE_BUILTIN_EVSTWWEX:
5039610b 8524 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8525 case SPE_BUILTIN_EVSTWWOX:
5039610b 8526 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8527 case SPE_BUILTIN_EVSTDD:
5039610b 8528 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8529 case SPE_BUILTIN_EVSTDH:
5039610b 8530 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8531 case SPE_BUILTIN_EVSTDW:
5039610b 8532 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8533 case SPE_BUILTIN_EVSTWHE:
5039610b 8534 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8535 case SPE_BUILTIN_EVSTWHO:
5039610b 8536 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8537 case SPE_BUILTIN_EVSTWWE:
5039610b 8538 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8539 case SPE_BUILTIN_EVSTWWO:
5039610b 8540 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8541 case SPE_BUILTIN_MFSPEFSCR:
8542 icode = CODE_FOR_spe_mfspefscr;
8543 tmode = insn_data[icode].operand[0].mode;
8544
8545 if (target == 0
8546 || GET_MODE (target) != tmode
8547 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8548 target = gen_reg_rtx (tmode);
f676971a 8549
a3170dc6
AH
8550 pat = GEN_FCN (icode) (target);
8551 if (! pat)
8552 return 0;
8553 emit_insn (pat);
8554 return target;
8555 case SPE_BUILTIN_MTSPEFSCR:
8556 icode = CODE_FOR_spe_mtspefscr;
5039610b 8557 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8558 op0 = expand_normal (arg0);
a3170dc6
AH
8559 mode0 = insn_data[icode].operand[0].mode;
8560
8561 if (arg0 == error_mark_node)
8562 return const0_rtx;
8563
8564 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8565 op0 = copy_to_mode_reg (mode0, op0);
8566
8567 pat = GEN_FCN (icode) (op0);
8568 if (pat)
8569 emit_insn (pat);
8570 return NULL_RTX;
8571 default:
8572 break;
8573 }
8574
8575 *expandedp = false;
8576 return NULL_RTX;
8577}
8578
96038623
DE
8579static rtx
8580paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8581{
8582 rtx pat, scratch, tmp;
8583 tree form = CALL_EXPR_ARG (exp, 0);
8584 tree arg0 = CALL_EXPR_ARG (exp, 1);
8585 tree arg1 = CALL_EXPR_ARG (exp, 2);
8586 rtx op0 = expand_normal (arg0);
8587 rtx op1 = expand_normal (arg1);
8588 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8589 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8590 int form_int;
8591 enum rtx_code code;
8592
8593 if (TREE_CODE (form) != INTEGER_CST)
8594 {
8595 error ("argument 1 of __builtin_paired_predicate must be a constant");
8596 return const0_rtx;
8597 }
8598 else
8599 form_int = TREE_INT_CST_LOW (form);
8600
8601 gcc_assert (mode0 == mode1);
8602
8603 if (arg0 == error_mark_node || arg1 == error_mark_node)
8604 return const0_rtx;
8605
8606 if (target == 0
8607 || GET_MODE (target) != SImode
8608 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8609 target = gen_reg_rtx (SImode);
8610 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8611 op0 = copy_to_mode_reg (mode0, op0);
8612 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8613 op1 = copy_to_mode_reg (mode1, op1);
8614
8615 scratch = gen_reg_rtx (CCFPmode);
8616
8617 pat = GEN_FCN (icode) (scratch, op0, op1);
8618 if (!pat)
8619 return const0_rtx;
8620
8621 emit_insn (pat);
8622
8623 switch (form_int)
8624 {
8625 /* LT bit. */
8626 case 0:
8627 code = LT;
8628 break;
8629 /* GT bit. */
8630 case 1:
8631 code = GT;
8632 break;
8633 /* EQ bit. */
8634 case 2:
8635 code = EQ;
8636 break;
8637 /* UN bit. */
8638 case 3:
8639 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8640 return target;
8641 default:
8642 error ("argument 1 of __builtin_paired_predicate is out of range");
8643 return const0_rtx;
8644 }
8645
8646 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8647 emit_move_insn (target, tmp);
8648 return target;
8649}
8650
a3170dc6 8651static rtx
5039610b 8652spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8653{
8654 rtx pat, scratch, tmp;
5039610b
SL
8655 tree form = CALL_EXPR_ARG (exp, 0);
8656 tree arg0 = CALL_EXPR_ARG (exp, 1);
8657 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8658 rtx op0 = expand_normal (arg0);
8659 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8660 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8661 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8662 int form_int;
8663 enum rtx_code code;
8664
8665 if (TREE_CODE (form) != INTEGER_CST)
8666 {
8667 error ("argument 1 of __builtin_spe_predicate must be a constant");
8668 return const0_rtx;
8669 }
8670 else
8671 form_int = TREE_INT_CST_LOW (form);
8672
37409796 8673 gcc_assert (mode0 == mode1);
a3170dc6
AH
8674
8675 if (arg0 == error_mark_node || arg1 == error_mark_node)
8676 return const0_rtx;
8677
8678 if (target == 0
8679 || GET_MODE (target) != SImode
8680 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8681 target = gen_reg_rtx (SImode);
8682
8683 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8684 op0 = copy_to_mode_reg (mode0, op0);
8685 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8686 op1 = copy_to_mode_reg (mode1, op1);
8687
8688 scratch = gen_reg_rtx (CCmode);
8689
8690 pat = GEN_FCN (icode) (scratch, op0, op1);
8691 if (! pat)
8692 return const0_rtx;
8693 emit_insn (pat);
8694
8695 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8696 _lower_. We use one compare, but look in different bits of the
8697 CR for each variant.
8698
8699 There are 2 elements in each SPE simd type (upper/lower). The CR
8700 bits are set as follows:
8701
8702 BIT0 | BIT 1 | BIT 2 | BIT 3
8703 U | L | (U | L) | (U & L)
8704
8705 So, for an "all" relationship, BIT 3 would be set.
8706 For an "any" relationship, BIT 2 would be set. Etc.
8707
8708 Following traditional nomenclature, these bits map to:
8709
8710 BIT0 | BIT 1 | BIT 2 | BIT 3
8711 LT | GT | EQ | OV
8712
8713 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8714 */
8715
8716 switch (form_int)
8717 {
8718 /* All variant. OV bit. */
8719 case 0:
8720 /* We need to get to the OV bit, which is the ORDERED bit. We
8721 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8722 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8723 So let's just use another pattern. */
8724 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8725 return target;
8726 /* Any variant. EQ bit. */
8727 case 1:
8728 code = EQ;
8729 break;
8730 /* Upper variant. LT bit. */
8731 case 2:
8732 code = LT;
8733 break;
8734 /* Lower variant. GT bit. */
8735 case 3:
8736 code = GT;
8737 break;
8738 default:
8739 error ("argument 1 of __builtin_spe_predicate is out of range");
8740 return const0_rtx;
8741 }
8742
8743 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8744 emit_move_insn (target, tmp);
8745
8746 return target;
8747}
8748
8749/* The evsel builtins look like this:
8750
8751 e = __builtin_spe_evsel_OP (a, b, c, d);
8752
8753 and work like this:
8754
8755 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8756 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8757*/
8758
8759static rtx
5039610b 8760spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8761{
8762 rtx pat, scratch;
5039610b
SL
8763 tree arg0 = CALL_EXPR_ARG (exp, 0);
8764 tree arg1 = CALL_EXPR_ARG (exp, 1);
8765 tree arg2 = CALL_EXPR_ARG (exp, 2);
8766 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8767 rtx op0 = expand_normal (arg0);
8768 rtx op1 = expand_normal (arg1);
8769 rtx op2 = expand_normal (arg2);
8770 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8771 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8772 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8773
37409796 8774 gcc_assert (mode0 == mode1);
a3170dc6
AH
8775
8776 if (arg0 == error_mark_node || arg1 == error_mark_node
8777 || arg2 == error_mark_node || arg3 == error_mark_node)
8778 return const0_rtx;
8779
8780 if (target == 0
8781 || GET_MODE (target) != mode0
8782 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8783 target = gen_reg_rtx (mode0);
8784
8785 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8786 op0 = copy_to_mode_reg (mode0, op0);
8787 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8788 op1 = copy_to_mode_reg (mode0, op1);
8789 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8790 op2 = copy_to_mode_reg (mode0, op2);
8791 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8792 op3 = copy_to_mode_reg (mode0, op3);
8793
8794 /* Generate the compare. */
8795 scratch = gen_reg_rtx (CCmode);
8796 pat = GEN_FCN (icode) (scratch, op0, op1);
8797 if (! pat)
8798 return const0_rtx;
8799 emit_insn (pat);
8800
8801 if (mode0 == V2SImode)
8802 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8803 else
8804 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8805
8806 return target;
8807}
8808
0ac081f6
AH
8809/* Expand an expression EXP that calls a built-in function,
8810 with result going to TARGET if that's convenient
8811 (and in mode MODE if that's convenient).
8812 SUBTARGET may be used as the target for computing one of EXP's operands.
8813 IGNORE is nonzero if the value is to be ignored. */
8814
8815static rtx
a2369ed3 8816rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8817 enum machine_mode mode ATTRIBUTE_UNUSED,
8818 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8819{
5039610b 8820 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8821 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8822 const struct builtin_description *d;
92898235
AH
8823 size_t i;
8824 rtx ret;
8825 bool success;
f676971a 8826
9c78b944
DE
8827 if (fcode == RS6000_BUILTIN_RECIP)
8828 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8829
8830 if (fcode == RS6000_BUILTIN_RECIPF)
8831 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8832
8833 if (fcode == RS6000_BUILTIN_RSQRTF)
8834 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8835
7ccf35ed
DN
8836 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8837 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8838 {
8839 int icode = (int) CODE_FOR_altivec_lvsr;
8840 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8841 enum machine_mode mode = insn_data[icode].operand[1].mode;
8842 tree arg;
8843 rtx op, addr, pat;
8844
37409796 8845 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8846
5039610b 8847 arg = CALL_EXPR_ARG (exp, 0);
37409796 8848 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8849 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8850 addr = memory_address (mode, op);
8851 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8852 op = addr;
8853 else
8854 {
8855 /* For the load case need to negate the address. */
8856 op = gen_reg_rtx (GET_MODE (addr));
8857 emit_insn (gen_rtx_SET (VOIDmode, op,
8858 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8859 }
7ccf35ed
DN
8860 op = gen_rtx_MEM (mode, op);
8861
8862 if (target == 0
8863 || GET_MODE (target) != tmode
8864 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8865 target = gen_reg_rtx (tmode);
8866
8867 /*pat = gen_altivec_lvsr (target, op);*/
8868 pat = GEN_FCN (icode) (target, op);
8869 if (!pat)
8870 return 0;
8871 emit_insn (pat);
8872
8873 return target;
8874 }
5039610b
SL
8875
8876 /* FIXME: There's got to be a nicer way to handle this case than
8877 constructing a new CALL_EXPR. */
f57d17f1
TM
8878 if (fcode == ALTIVEC_BUILTIN_VCFUX
8879 || fcode == ALTIVEC_BUILTIN_VCFSX)
8880 {
5039610b
SL
8881 if (call_expr_nargs (exp) == 1)
8882 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8883 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8884 }
7ccf35ed 8885
0ac081f6 8886 if (TARGET_ALTIVEC)
92898235
AH
8887 {
8888 ret = altivec_expand_builtin (exp, target, &success);
8889
a3170dc6
AH
8890 if (success)
8891 return ret;
8892 }
8893 if (TARGET_SPE)
8894 {
8895 ret = spe_expand_builtin (exp, target, &success);
8896
92898235
AH
8897 if (success)
8898 return ret;
8899 }
96038623
DE
8900 if (TARGET_PAIRED_FLOAT)
8901 {
8902 ret = paired_expand_builtin (exp, target, &success);
8903
8904 if (success)
8905 return ret;
8906 }
92898235 8907
96038623 8908 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 8909
37409796
NS
8910 /* Handle simple unary operations. */
8911 d = (struct builtin_description *) bdesc_1arg;
8912 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8913 if (d->code == fcode)
5039610b 8914 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8915
37409796
NS
8916 /* Handle simple binary operations. */
8917 d = (struct builtin_description *) bdesc_2arg;
8918 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8919 if (d->code == fcode)
5039610b 8920 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8921
37409796 8922 /* Handle simple ternary operations. */
586de218 8923 d = bdesc_3arg;
37409796
NS
8924 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8925 if (d->code == fcode)
5039610b 8926 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8927
37409796 8928 gcc_unreachable ();
0ac081f6
AH
8929}
8930
7c62e993
PB
8931static tree
8932build_opaque_vector_type (tree node, int nunits)
8933{
8934 node = copy_node (node);
8935 TYPE_MAIN_VARIANT (node) = node;
8936 return build_vector_type (node, nunits);
8937}
8938
0ac081f6 8939static void
863d938c 8940rs6000_init_builtins (void)
0ac081f6 8941{
4a5eab38
PB
8942 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8943 V2SF_type_node = build_vector_type (float_type_node, 2);
8944 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8945 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8946 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8947 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8948 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8949
8950 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8951 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8952 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8953
7c62e993
PB
8954 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8955 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8956 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8957 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8958
8bb418a3
ZL
8959 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8960 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8961 'vector unsigned short'. */
8962
8dd16ecc
NS
8963 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8964 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8965 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8966 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8967
58646b77
PB
8968 long_integer_type_internal_node = long_integer_type_node;
8969 long_unsigned_type_internal_node = long_unsigned_type_node;
8970 intQI_type_internal_node = intQI_type_node;
8971 uintQI_type_internal_node = unsigned_intQI_type_node;
8972 intHI_type_internal_node = intHI_type_node;
8973 uintHI_type_internal_node = unsigned_intHI_type_node;
8974 intSI_type_internal_node = intSI_type_node;
8975 uintSI_type_internal_node = unsigned_intSI_type_node;
8976 float_type_internal_node = float_type_node;
8977 void_type_internal_node = void_type_node;
8978
8bb418a3
ZL
8979 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8980 get_identifier ("__bool char"),
8981 bool_char_type_node));
8982 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8983 get_identifier ("__bool short"),
8984 bool_short_type_node));
8985 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8986 get_identifier ("__bool int"),
8987 bool_int_type_node));
8988 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8989 get_identifier ("__pixel"),
8990 pixel_type_node));
8991
4a5eab38
PB
8992 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8993 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8994 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8995 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
8996
8997 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8998 get_identifier ("__vector unsigned char"),
8999 unsigned_V16QI_type_node));
9000 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9001 get_identifier ("__vector signed char"),
9002 V16QI_type_node));
9003 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9004 get_identifier ("__vector __bool char"),
9005 bool_V16QI_type_node));
9006
9007 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9008 get_identifier ("__vector unsigned short"),
9009 unsigned_V8HI_type_node));
9010 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9011 get_identifier ("__vector signed short"),
9012 V8HI_type_node));
9013 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9014 get_identifier ("__vector __bool short"),
9015 bool_V8HI_type_node));
9016
9017 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9018 get_identifier ("__vector unsigned int"),
9019 unsigned_V4SI_type_node));
9020 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9021 get_identifier ("__vector signed int"),
9022 V4SI_type_node));
9023 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9024 get_identifier ("__vector __bool int"),
9025 bool_V4SI_type_node));
9026
9027 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9028 get_identifier ("__vector float"),
9029 V4SF_type_node));
9030 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9031 get_identifier ("__vector __pixel"),
9032 pixel_V8HI_type_node));
9033
96038623
DE
9034 if (TARGET_PAIRED_FLOAT)
9035 paired_init_builtins ();
a3170dc6 9036 if (TARGET_SPE)
3fdaa45a 9037 spe_init_builtins ();
0ac081f6
AH
9038 if (TARGET_ALTIVEC)
9039 altivec_init_builtins ();
96038623 9040 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9041 rs6000_common_init_builtins ();
9c78b944
DE
9042 if (TARGET_PPC_GFXOPT)
9043 {
9044 tree ftype = build_function_type_list (float_type_node,
9045 float_type_node,
9046 float_type_node,
9047 NULL_TREE);
9048 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9049 RS6000_BUILTIN_RECIPF);
9050
9051 ftype = build_function_type_list (float_type_node,
9052 float_type_node,
9053 NULL_TREE);
9054 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9055 RS6000_BUILTIN_RSQRTF);
9056 }
9057 if (TARGET_POPCNTB)
9058 {
9059 tree ftype = build_function_type_list (double_type_node,
9060 double_type_node,
9061 double_type_node,
9062 NULL_TREE);
9063 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9064 RS6000_BUILTIN_RECIP);
9065
9066 }
69ca3549
DE
9067
9068#if TARGET_XCOFF
9069 /* AIX libm provides clog as __clog. */
9070 if (built_in_decls [BUILT_IN_CLOG])
9071 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9072#endif
0ac081f6
AH
9073}
9074
a3170dc6
AH
9075/* Search through a set of builtins and enable the mask bits.
9076 DESC is an array of builtins.
b6d08ca1 9077 SIZE is the total number of builtins.
a3170dc6
AH
9078 START is the builtin enum at which to start.
9079 END is the builtin enum at which to end. */
0ac081f6 9080static void
a2369ed3 9081enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9082 enum rs6000_builtins start,
a2369ed3 9083 enum rs6000_builtins end)
a3170dc6
AH
9084{
9085 int i;
9086
9087 for (i = 0; i < size; ++i)
9088 if (desc[i].code == start)
9089 break;
9090
9091 if (i == size)
9092 return;
9093
9094 for (; i < size; ++i)
9095 {
9096 /* Flip all the bits on. */
9097 desc[i].mask = target_flags;
9098 if (desc[i].code == end)
9099 break;
9100 }
9101}
9102
9103static void
863d938c 9104spe_init_builtins (void)
0ac081f6 9105{
a3170dc6
AH
9106 tree endlink = void_list_node;
9107 tree puint_type_node = build_pointer_type (unsigned_type_node);
9108 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9109 struct builtin_description *d;
0ac081f6
AH
9110 size_t i;
9111
a3170dc6
AH
9112 tree v2si_ftype_4_v2si
9113 = build_function_type
3fdaa45a
AH
9114 (opaque_V2SI_type_node,
9115 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9116 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9117 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9118 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9119 endlink)))));
9120
9121 tree v2sf_ftype_4_v2sf
9122 = build_function_type
3fdaa45a
AH
9123 (opaque_V2SF_type_node,
9124 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9125 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9126 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9127 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9128 endlink)))));
9129
9130 tree int_ftype_int_v2si_v2si
9131 = build_function_type
9132 (integer_type_node,
9133 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9134 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9135 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9136 endlink))));
9137
9138 tree int_ftype_int_v2sf_v2sf
9139 = build_function_type
9140 (integer_type_node,
9141 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9142 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9143 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9144 endlink))));
9145
9146 tree void_ftype_v2si_puint_int
9147 = build_function_type (void_type_node,
3fdaa45a 9148 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9149 tree_cons (NULL_TREE, puint_type_node,
9150 tree_cons (NULL_TREE,
9151 integer_type_node,
9152 endlink))));
9153
9154 tree void_ftype_v2si_puint_char
9155 = build_function_type (void_type_node,
3fdaa45a 9156 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9157 tree_cons (NULL_TREE, puint_type_node,
9158 tree_cons (NULL_TREE,
9159 char_type_node,
9160 endlink))));
9161
9162 tree void_ftype_v2si_pv2si_int
9163 = build_function_type (void_type_node,
3fdaa45a 9164 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9165 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9166 tree_cons (NULL_TREE,
9167 integer_type_node,
9168 endlink))));
9169
9170 tree void_ftype_v2si_pv2si_char
9171 = build_function_type (void_type_node,
3fdaa45a 9172 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9173 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9174 tree_cons (NULL_TREE,
9175 char_type_node,
9176 endlink))));
9177
9178 tree void_ftype_int
9179 = build_function_type (void_type_node,
9180 tree_cons (NULL_TREE, integer_type_node, endlink));
9181
9182 tree int_ftype_void
36e8d515 9183 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9184
9185 tree v2si_ftype_pv2si_int
3fdaa45a 9186 = build_function_type (opaque_V2SI_type_node,
6035d635 9187 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9188 tree_cons (NULL_TREE, integer_type_node,
9189 endlink)));
9190
9191 tree v2si_ftype_puint_int
3fdaa45a 9192 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9193 tree_cons (NULL_TREE, puint_type_node,
9194 tree_cons (NULL_TREE, integer_type_node,
9195 endlink)));
9196
9197 tree v2si_ftype_pushort_int
3fdaa45a 9198 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9199 tree_cons (NULL_TREE, pushort_type_node,
9200 tree_cons (NULL_TREE, integer_type_node,
9201 endlink)));
9202
00332c9f
AH
9203 tree v2si_ftype_signed_char
9204 = build_function_type (opaque_V2SI_type_node,
9205 tree_cons (NULL_TREE, signed_char_type_node,
9206 endlink));
9207
a3170dc6
AH
9208 /* The initialization of the simple binary and unary builtins is
9209 done in rs6000_common_init_builtins, but we have to enable the
9210 mask bits here manually because we have run out of `target_flags'
9211 bits. We really need to redesign this mask business. */
9212
9213 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9214 ARRAY_SIZE (bdesc_2arg),
9215 SPE_BUILTIN_EVADDW,
9216 SPE_BUILTIN_EVXOR);
9217 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9218 ARRAY_SIZE (bdesc_1arg),
9219 SPE_BUILTIN_EVABS,
9220 SPE_BUILTIN_EVSUBFUSIAAW);
9221 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9222 ARRAY_SIZE (bdesc_spe_predicates),
9223 SPE_BUILTIN_EVCMPEQ,
9224 SPE_BUILTIN_EVFSTSTLT);
9225 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9226 ARRAY_SIZE (bdesc_spe_evsel),
9227 SPE_BUILTIN_EVSEL_CMPGTS,
9228 SPE_BUILTIN_EVSEL_FSTSTEQ);
9229
36252949
AH
9230 (*lang_hooks.decls.pushdecl)
9231 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9232 opaque_V2SI_type_node));
9233
a3170dc6 9234 /* Initialize irregular SPE builtins. */
f676971a 9235
a3170dc6
AH
9236 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9237 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9238 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9239 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9240 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9241 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9242 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9243 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9244 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9245 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9246 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9247 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9248 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9249 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9250 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9251 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9252 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9253 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9254
9255 /* Loads. */
9256 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9257 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9258 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9259 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9260 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9261 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9262 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9263 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9264 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9265 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9266 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9267 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9268 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9269 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9270 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9271 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9272 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9273 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9274 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9275 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9276 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9277 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9278
9279 /* Predicates. */
9280 d = (struct builtin_description *) bdesc_spe_predicates;
9281 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9282 {
9283 tree type;
9284
9285 switch (insn_data[d->icode].operand[1].mode)
9286 {
9287 case V2SImode:
9288 type = int_ftype_int_v2si_v2si;
9289 break;
9290 case V2SFmode:
9291 type = int_ftype_int_v2sf_v2sf;
9292 break;
9293 default:
37409796 9294 gcc_unreachable ();
a3170dc6
AH
9295 }
9296
9297 def_builtin (d->mask, d->name, type, d->code);
9298 }
9299
9300 /* Evsel predicates. */
9301 d = (struct builtin_description *) bdesc_spe_evsel;
9302 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9303 {
9304 tree type;
9305
9306 switch (insn_data[d->icode].operand[1].mode)
9307 {
9308 case V2SImode:
9309 type = v2si_ftype_4_v2si;
9310 break;
9311 case V2SFmode:
9312 type = v2sf_ftype_4_v2sf;
9313 break;
9314 default:
37409796 9315 gcc_unreachable ();
a3170dc6
AH
9316 }
9317
9318 def_builtin (d->mask, d->name, type, d->code);
9319 }
9320}
9321
96038623
DE
9322static void
9323paired_init_builtins (void)
9324{
23a651fc 9325 const struct builtin_description *d;
96038623
DE
9326 size_t i;
9327 tree endlink = void_list_node;
9328
9329 tree int_ftype_int_v2sf_v2sf
9330 = build_function_type
9331 (integer_type_node,
9332 tree_cons (NULL_TREE, integer_type_node,
9333 tree_cons (NULL_TREE, V2SF_type_node,
9334 tree_cons (NULL_TREE, V2SF_type_node,
9335 endlink))));
9336 tree pcfloat_type_node =
9337 build_pointer_type (build_qualified_type
9338 (float_type_node, TYPE_QUAL_CONST));
9339
9340 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9341 long_integer_type_node,
9342 pcfloat_type_node,
9343 NULL_TREE);
9344 tree void_ftype_v2sf_long_pcfloat =
9345 build_function_type_list (void_type_node,
9346 V2SF_type_node,
9347 long_integer_type_node,
9348 pcfloat_type_node,
9349 NULL_TREE);
9350
9351
9352 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9353 PAIRED_BUILTIN_LX);
9354
9355
9356 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9357 PAIRED_BUILTIN_STX);
9358
9359 /* Predicates. */
23a651fc 9360 d = bdesc_paired_preds;
96038623
DE
9361 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9362 {
9363 tree type;
9364
9365 switch (insn_data[d->icode].operand[1].mode)
9366 {
9367 case V2SFmode:
9368 type = int_ftype_int_v2sf_v2sf;
9369 break;
9370 default:
9371 gcc_unreachable ();
9372 }
9373
9374 def_builtin (d->mask, d->name, type, d->code);
9375 }
9376}
9377
a3170dc6 9378static void
863d938c 9379altivec_init_builtins (void)
a3170dc6 9380{
586de218
KG
9381 const struct builtin_description *d;
9382 const struct builtin_description_predicates *dp;
a3170dc6 9383 size_t i;
7a4eca66
DE
9384 tree ftype;
9385
a3170dc6
AH
9386 tree pfloat_type_node = build_pointer_type (float_type_node);
9387 tree pint_type_node = build_pointer_type (integer_type_node);
9388 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9389 tree pchar_type_node = build_pointer_type (char_type_node);
9390
9391 tree pvoid_type_node = build_pointer_type (void_type_node);
9392
0dbc3651
ZW
9393 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9394 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9395 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9396 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9397
9398 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9399
58646b77
PB
9400 tree int_ftype_opaque
9401 = build_function_type_list (integer_type_node,
9402 opaque_V4SI_type_node, NULL_TREE);
9403
9404 tree opaque_ftype_opaque_int
9405 = build_function_type_list (opaque_V4SI_type_node,
9406 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9407 tree opaque_ftype_opaque_opaque_int
9408 = build_function_type_list (opaque_V4SI_type_node,
9409 opaque_V4SI_type_node, opaque_V4SI_type_node,
9410 integer_type_node, NULL_TREE);
9411 tree int_ftype_int_opaque_opaque
9412 = build_function_type_list (integer_type_node,
9413 integer_type_node, opaque_V4SI_type_node,
9414 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9415 tree int_ftype_int_v4si_v4si
9416 = build_function_type_list (integer_type_node,
9417 integer_type_node, V4SI_type_node,
9418 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9419 tree v4sf_ftype_pcfloat
9420 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9421 tree void_ftype_pfloat_v4sf
b4de2f7d 9422 = build_function_type_list (void_type_node,
a3170dc6 9423 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9424 tree v4si_ftype_pcint
9425 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9426 tree void_ftype_pint_v4si
b4de2f7d
AH
9427 = build_function_type_list (void_type_node,
9428 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9429 tree v8hi_ftype_pcshort
9430 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9431 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9432 = build_function_type_list (void_type_node,
9433 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9434 tree v16qi_ftype_pcchar
9435 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9436 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9437 = build_function_type_list (void_type_node,
9438 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9439 tree void_ftype_v4si
b4de2f7d 9440 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9441 tree v8hi_ftype_void
9442 = build_function_type (V8HI_type_node, void_list_node);
9443 tree void_ftype_void
9444 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9445 tree void_ftype_int
9446 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9447
58646b77
PB
9448 tree opaque_ftype_long_pcvoid
9449 = build_function_type_list (opaque_V4SI_type_node,
9450 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9451 tree v16qi_ftype_long_pcvoid
a3170dc6 9452 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9453 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9454 tree v8hi_ftype_long_pcvoid
a3170dc6 9455 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9456 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9457 tree v4si_ftype_long_pcvoid
a3170dc6 9458 = build_function_type_list (V4SI_type_node,
b4a62fa0 9459 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9460
58646b77
PB
9461 tree void_ftype_opaque_long_pvoid
9462 = build_function_type_list (void_type_node,
9463 opaque_V4SI_type_node, long_integer_type_node,
9464 pvoid_type_node, NULL_TREE);
b4a62fa0 9465 tree void_ftype_v4si_long_pvoid
b4de2f7d 9466 = build_function_type_list (void_type_node,
b4a62fa0 9467 V4SI_type_node, long_integer_type_node,
b4de2f7d 9468 pvoid_type_node, NULL_TREE);
b4a62fa0 9469 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9470 = build_function_type_list (void_type_node,
b4a62fa0 9471 V16QI_type_node, long_integer_type_node,
b4de2f7d 9472 pvoid_type_node, NULL_TREE);
b4a62fa0 9473 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9474 = build_function_type_list (void_type_node,
b4a62fa0 9475 V8HI_type_node, long_integer_type_node,
b4de2f7d 9476 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9477 tree int_ftype_int_v8hi_v8hi
9478 = build_function_type_list (integer_type_node,
9479 integer_type_node, V8HI_type_node,
9480 V8HI_type_node, NULL_TREE);
9481 tree int_ftype_int_v16qi_v16qi
9482 = build_function_type_list (integer_type_node,
9483 integer_type_node, V16QI_type_node,
9484 V16QI_type_node, NULL_TREE);
9485 tree int_ftype_int_v4sf_v4sf
9486 = build_function_type_list (integer_type_node,
9487 integer_type_node, V4SF_type_node,
9488 V4SF_type_node, NULL_TREE);
9489 tree v4si_ftype_v4si
9490 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9491 tree v8hi_ftype_v8hi
9492 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9493 tree v16qi_ftype_v16qi
9494 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9495 tree v4sf_ftype_v4sf
9496 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9497 tree void_ftype_pcvoid_int_int
a3170dc6 9498 = build_function_type_list (void_type_node,
0dbc3651 9499 pcvoid_type_node, integer_type_node,
8bb418a3 9500 integer_type_node, NULL_TREE);
8bb418a3 9501
0dbc3651
ZW
9502 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9503 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9504 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9505 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9506 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9507 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9508 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9509 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9510 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9511 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9512 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9513 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9514 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9515 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9516 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9517 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9518 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9519 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9520 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9521 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9522 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9523 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9525 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9526 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9527 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9528 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9529 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9530 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9531 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9532 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9533 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9534 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9535 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9536 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9537 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9538 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9539 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9540 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9541 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9542 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9543 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9544 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9545 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9546 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9547 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9548
9549 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9550
9551 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9552 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9553 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9554 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9555 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9556 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9557 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9558 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9559 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9560 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9561
a3170dc6 9562 /* Add the DST variants. */
586de218 9563 d = bdesc_dst;
a3170dc6 9564 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9565 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9566
9567 /* Initialize the predicates. */
586de218 9568 dp = bdesc_altivec_preds;
a3170dc6
AH
9569 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9570 {
9571 enum machine_mode mode1;
9572 tree type;
58646b77
PB
9573 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9574 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9575
58646b77
PB
9576 if (is_overloaded)
9577 mode1 = VOIDmode;
9578 else
9579 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9580
9581 switch (mode1)
9582 {
58646b77
PB
9583 case VOIDmode:
9584 type = int_ftype_int_opaque_opaque;
9585 break;
a3170dc6
AH
9586 case V4SImode:
9587 type = int_ftype_int_v4si_v4si;
9588 break;
9589 case V8HImode:
9590 type = int_ftype_int_v8hi_v8hi;
9591 break;
9592 case V16QImode:
9593 type = int_ftype_int_v16qi_v16qi;
9594 break;
9595 case V4SFmode:
9596 type = int_ftype_int_v4sf_v4sf;
9597 break;
9598 default:
37409796 9599 gcc_unreachable ();
a3170dc6 9600 }
f676971a 9601
a3170dc6
AH
9602 def_builtin (dp->mask, dp->name, type, dp->code);
9603 }
9604
9605 /* Initialize the abs* operators. */
586de218 9606 d = bdesc_abs;
a3170dc6
AH
9607 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9608 {
9609 enum machine_mode mode0;
9610 tree type;
9611
9612 mode0 = insn_data[d->icode].operand[0].mode;
9613
9614 switch (mode0)
9615 {
9616 case V4SImode:
9617 type = v4si_ftype_v4si;
9618 break;
9619 case V8HImode:
9620 type = v8hi_ftype_v8hi;
9621 break;
9622 case V16QImode:
9623 type = v16qi_ftype_v16qi;
9624 break;
9625 case V4SFmode:
9626 type = v4sf_ftype_v4sf;
9627 break;
9628 default:
37409796 9629 gcc_unreachable ();
a3170dc6 9630 }
f676971a 9631
a3170dc6
AH
9632 def_builtin (d->mask, d->name, type, d->code);
9633 }
7ccf35ed 9634
13c62176
DN
9635 if (TARGET_ALTIVEC)
9636 {
9637 tree decl;
9638
9639 /* Initialize target builtin that implements
9640 targetm.vectorize.builtin_mask_for_load. */
9641
c79efc4d
RÁE
9642 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9643 v16qi_ftype_long_pcvoid,
9644 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9645 BUILT_IN_MD, NULL, NULL_TREE);
9646 TREE_READONLY (decl) = 1;
13c62176
DN
9647 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9648 altivec_builtin_mask_for_load = decl;
13c62176 9649 }
7a4eca66
DE
9650
9651 /* Access to the vec_init patterns. */
9652 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9653 integer_type_node, integer_type_node,
9654 integer_type_node, NULL_TREE);
9655 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9656 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9657
9658 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9659 short_integer_type_node,
9660 short_integer_type_node,
9661 short_integer_type_node,
9662 short_integer_type_node,
9663 short_integer_type_node,
9664 short_integer_type_node,
9665 short_integer_type_node, NULL_TREE);
9666 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9667 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9668
9669 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9670 char_type_node, char_type_node,
9671 char_type_node, char_type_node,
9672 char_type_node, char_type_node,
9673 char_type_node, char_type_node,
9674 char_type_node, char_type_node,
9675 char_type_node, char_type_node,
9676 char_type_node, char_type_node,
9677 char_type_node, NULL_TREE);
9678 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9679 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9680
9681 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9682 float_type_node, float_type_node,
9683 float_type_node, NULL_TREE);
9684 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9685 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9686
9687 /* Access to the vec_set patterns. */
9688 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9689 intSI_type_node,
9690 integer_type_node, NULL_TREE);
9691 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9692 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9693
9694 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9695 intHI_type_node,
9696 integer_type_node, NULL_TREE);
9697 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9698 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9699
9700 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9701 intQI_type_node,
9702 integer_type_node, NULL_TREE);
9703 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9704 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9705
9706 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9707 float_type_node,
9708 integer_type_node, NULL_TREE);
9709 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9710 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9711
9712 /* Access to the vec_extract patterns. */
9713 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9714 integer_type_node, NULL_TREE);
9715 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9716 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9717
9718 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9719 integer_type_node, NULL_TREE);
9720 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9721 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9722
9723 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9724 integer_type_node, NULL_TREE);
9725 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9726 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9727
9728 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9729 integer_type_node, NULL_TREE);
9730 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9731 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9732}
9733
9734static void
863d938c 9735rs6000_common_init_builtins (void)
a3170dc6 9736{
586de218 9737 const struct builtin_description *d;
a3170dc6
AH
9738 size_t i;
9739
96038623
DE
9740 tree v2sf_ftype_v2sf_v2sf_v2sf
9741 = build_function_type_list (V2SF_type_node,
9742 V2SF_type_node, V2SF_type_node,
9743 V2SF_type_node, NULL_TREE);
9744
a3170dc6
AH
9745 tree v4sf_ftype_v4sf_v4sf_v16qi
9746 = build_function_type_list (V4SF_type_node,
9747 V4SF_type_node, V4SF_type_node,
9748 V16QI_type_node, NULL_TREE);
9749 tree v4si_ftype_v4si_v4si_v16qi
9750 = build_function_type_list (V4SI_type_node,
9751 V4SI_type_node, V4SI_type_node,
9752 V16QI_type_node, NULL_TREE);
9753 tree v8hi_ftype_v8hi_v8hi_v16qi
9754 = build_function_type_list (V8HI_type_node,
9755 V8HI_type_node, V8HI_type_node,
9756 V16QI_type_node, NULL_TREE);
9757 tree v16qi_ftype_v16qi_v16qi_v16qi
9758 = build_function_type_list (V16QI_type_node,
9759 V16QI_type_node, V16QI_type_node,
9760 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9761 tree v4si_ftype_int
9762 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9763 tree v8hi_ftype_int
9764 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9765 tree v16qi_ftype_int
9766 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9767 tree v8hi_ftype_v16qi
9768 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9769 tree v4sf_ftype_v4sf
9770 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9771
9772 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9773 = build_function_type_list (opaque_V2SI_type_node,
9774 opaque_V2SI_type_node,
9775 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9776
96038623 9777 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9778 = build_function_type_list (opaque_V2SF_type_node,
9779 opaque_V2SF_type_node,
9780 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9781
96038623
DE
9782 tree v2sf_ftype_v2sf_v2sf
9783 = build_function_type_list (V2SF_type_node,
9784 V2SF_type_node,
9785 V2SF_type_node, NULL_TREE);
9786
9787
a3170dc6 9788 tree v2si_ftype_int_int
2abe3e28 9789 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9790 integer_type_node, integer_type_node,
9791 NULL_TREE);
9792
58646b77
PB
9793 tree opaque_ftype_opaque
9794 = build_function_type_list (opaque_V4SI_type_node,
9795 opaque_V4SI_type_node, NULL_TREE);
9796
a3170dc6 9797 tree v2si_ftype_v2si
2abe3e28
AH
9798 = build_function_type_list (opaque_V2SI_type_node,
9799 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9800
96038623 9801 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9802 = build_function_type_list (opaque_V2SF_type_node,
9803 opaque_V2SF_type_node, NULL_TREE);
f676971a 9804
96038623
DE
9805 tree v2sf_ftype_v2sf
9806 = build_function_type_list (V2SF_type_node,
9807 V2SF_type_node, NULL_TREE);
9808
a3170dc6 9809 tree v2sf_ftype_v2si
2abe3e28
AH
9810 = build_function_type_list (opaque_V2SF_type_node,
9811 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9812
9813 tree v2si_ftype_v2sf
2abe3e28
AH
9814 = build_function_type_list (opaque_V2SI_type_node,
9815 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9816
9817 tree v2si_ftype_v2si_char
2abe3e28
AH
9818 = build_function_type_list (opaque_V2SI_type_node,
9819 opaque_V2SI_type_node,
9820 char_type_node, NULL_TREE);
a3170dc6
AH
9821
9822 tree v2si_ftype_int_char
2abe3e28 9823 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9824 integer_type_node, char_type_node, NULL_TREE);
9825
9826 tree v2si_ftype_char
2abe3e28
AH
9827 = build_function_type_list (opaque_V2SI_type_node,
9828 char_type_node, NULL_TREE);
a3170dc6
AH
9829
9830 tree int_ftype_int_int
9831 = build_function_type_list (integer_type_node,
9832 integer_type_node, integer_type_node,
9833 NULL_TREE);
95385cbb 9834
58646b77
PB
9835 tree opaque_ftype_opaque_opaque
9836 = build_function_type_list (opaque_V4SI_type_node,
9837 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9838 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9839 = build_function_type_list (V4SI_type_node,
9840 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9841 tree v4sf_ftype_v4si_int
b4de2f7d 9842 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9843 V4SI_type_node, integer_type_node, NULL_TREE);
9844 tree v4si_ftype_v4sf_int
b4de2f7d 9845 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9846 V4SF_type_node, integer_type_node, NULL_TREE);
9847 tree v4si_ftype_v4si_int
b4de2f7d 9848 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9849 V4SI_type_node, integer_type_node, NULL_TREE);
9850 tree v8hi_ftype_v8hi_int
b4de2f7d 9851 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9852 V8HI_type_node, integer_type_node, NULL_TREE);
9853 tree v16qi_ftype_v16qi_int
b4de2f7d 9854 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9855 V16QI_type_node, integer_type_node, NULL_TREE);
9856 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9857 = build_function_type_list (V16QI_type_node,
9858 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9859 integer_type_node, NULL_TREE);
9860 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9861 = build_function_type_list (V8HI_type_node,
9862 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9863 integer_type_node, NULL_TREE);
9864 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9865 = build_function_type_list (V4SI_type_node,
9866 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9867 integer_type_node, NULL_TREE);
9868 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9869 = build_function_type_list (V4SF_type_node,
9870 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9871 integer_type_node, NULL_TREE);
0ac081f6 9872 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9873 = build_function_type_list (V4SF_type_node,
9874 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9875 tree opaque_ftype_opaque_opaque_opaque
9876 = build_function_type_list (opaque_V4SI_type_node,
9877 opaque_V4SI_type_node, opaque_V4SI_type_node,
9878 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9879 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9880 = build_function_type_list (V4SF_type_node,
9881 V4SF_type_node, V4SF_type_node,
9882 V4SI_type_node, NULL_TREE);
2212663f 9883 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9884 = build_function_type_list (V4SF_type_node,
9885 V4SF_type_node, V4SF_type_node,
9886 V4SF_type_node, NULL_TREE);
f676971a 9887 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9888 = build_function_type_list (V4SI_type_node,
9889 V4SI_type_node, V4SI_type_node,
9890 V4SI_type_node, NULL_TREE);
0ac081f6 9891 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9892 = build_function_type_list (V8HI_type_node,
9893 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9894 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9895 = build_function_type_list (V8HI_type_node,
9896 V8HI_type_node, V8HI_type_node,
9897 V8HI_type_node, NULL_TREE);
c4ad648e 9898 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9899 = build_function_type_list (V4SI_type_node,
9900 V8HI_type_node, V8HI_type_node,
9901 V4SI_type_node, NULL_TREE);
c4ad648e 9902 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9903 = build_function_type_list (V4SI_type_node,
9904 V16QI_type_node, V16QI_type_node,
9905 V4SI_type_node, NULL_TREE);
0ac081f6 9906 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9907 = build_function_type_list (V16QI_type_node,
9908 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9909 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9910 = build_function_type_list (V4SI_type_node,
9911 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9912 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9913 = build_function_type_list (V8HI_type_node,
9914 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9915 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9916 = build_function_type_list (V4SI_type_node,
9917 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9918 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9919 = build_function_type_list (V8HI_type_node,
9920 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9921 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9922 = build_function_type_list (V16QI_type_node,
9923 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9924 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9925 = build_function_type_list (V4SI_type_node,
9926 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9927 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9928 = build_function_type_list (V4SI_type_node,
9929 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9930 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9931 = build_function_type_list (V4SI_type_node,
9932 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9933 tree v4si_ftype_v8hi
9934 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9935 tree int_ftype_v4si_v4si
9936 = build_function_type_list (integer_type_node,
9937 V4SI_type_node, V4SI_type_node, NULL_TREE);
9938 tree int_ftype_v4sf_v4sf
9939 = build_function_type_list (integer_type_node,
9940 V4SF_type_node, V4SF_type_node, NULL_TREE);
9941 tree int_ftype_v16qi_v16qi
9942 = build_function_type_list (integer_type_node,
9943 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9944 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9945 = build_function_type_list (integer_type_node,
9946 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9947
6f317ef3 9948 /* Add the simple ternary operators. */
586de218 9949 d = bdesc_3arg;
ca7558fc 9950 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9951 {
2212663f
DB
9952 enum machine_mode mode0, mode1, mode2, mode3;
9953 tree type;
58646b77
PB
9954 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9955 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9956
58646b77
PB
9957 if (is_overloaded)
9958 {
9959 mode0 = VOIDmode;
9960 mode1 = VOIDmode;
9961 mode2 = VOIDmode;
9962 mode3 = VOIDmode;
9963 }
9964 else
9965 {
9966 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9967 continue;
f676971a 9968
58646b77
PB
9969 mode0 = insn_data[d->icode].operand[0].mode;
9970 mode1 = insn_data[d->icode].operand[1].mode;
9971 mode2 = insn_data[d->icode].operand[2].mode;
9972 mode3 = insn_data[d->icode].operand[3].mode;
9973 }
bb8df8a6 9974
2212663f
DB
9975 /* When all four are of the same mode. */
9976 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9977 {
9978 switch (mode0)
9979 {
58646b77
PB
9980 case VOIDmode:
9981 type = opaque_ftype_opaque_opaque_opaque;
9982 break;
617e0e1d
DB
9983 case V4SImode:
9984 type = v4si_ftype_v4si_v4si_v4si;
9985 break;
2212663f
DB
9986 case V4SFmode:
9987 type = v4sf_ftype_v4sf_v4sf_v4sf;
9988 break;
9989 case V8HImode:
9990 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 9991 break;
2212663f
DB
9992 case V16QImode:
9993 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9994 break;
96038623
DE
9995 case V2SFmode:
9996 type = v2sf_ftype_v2sf_v2sf_v2sf;
9997 break;
2212663f 9998 default:
37409796 9999 gcc_unreachable ();
2212663f
DB
10000 }
10001 }
10002 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10003 {
2212663f
DB
10004 switch (mode0)
10005 {
10006 case V4SImode:
10007 type = v4si_ftype_v4si_v4si_v16qi;
10008 break;
10009 case V4SFmode:
10010 type = v4sf_ftype_v4sf_v4sf_v16qi;
10011 break;
10012 case V8HImode:
10013 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10014 break;
2212663f
DB
10015 case V16QImode:
10016 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10017 break;
2212663f 10018 default:
37409796 10019 gcc_unreachable ();
2212663f
DB
10020 }
10021 }
f676971a 10022 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10023 && mode3 == V4SImode)
24408032 10024 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10025 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10026 && mode3 == V4SImode)
24408032 10027 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10028 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10029 && mode3 == V4SImode)
24408032
AH
10030 type = v4sf_ftype_v4sf_v4sf_v4si;
10031
a7b376ee 10032 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10033 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10034 && mode3 == QImode)
b9e4e5d1 10035 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10036
a7b376ee 10037 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10038 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10039 && mode3 == QImode)
b9e4e5d1 10040 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10041
a7b376ee 10042 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10043 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10044 && mode3 == QImode)
b9e4e5d1 10045 type = v4si_ftype_v4si_v4si_int;
24408032 10046
a7b376ee 10047 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10048 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10049 && mode3 == QImode)
b9e4e5d1 10050 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10051
2212663f 10052 else
37409796 10053 gcc_unreachable ();
2212663f
DB
10054
10055 def_builtin (d->mask, d->name, type, d->code);
10056 }
10057
0ac081f6 10058 /* Add the simple binary operators. */
00b960c7 10059 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10060 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10061 {
10062 enum machine_mode mode0, mode1, mode2;
10063 tree type;
58646b77
PB
10064 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10065 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10066
58646b77
PB
10067 if (is_overloaded)
10068 {
10069 mode0 = VOIDmode;
10070 mode1 = VOIDmode;
10071 mode2 = VOIDmode;
10072 }
10073 else
bb8df8a6 10074 {
58646b77
PB
10075 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10076 continue;
f676971a 10077
58646b77
PB
10078 mode0 = insn_data[d->icode].operand[0].mode;
10079 mode1 = insn_data[d->icode].operand[1].mode;
10080 mode2 = insn_data[d->icode].operand[2].mode;
10081 }
0ac081f6
AH
10082
10083 /* When all three operands are of the same mode. */
10084 if (mode0 == mode1 && mode1 == mode2)
10085 {
10086 switch (mode0)
10087 {
58646b77
PB
10088 case VOIDmode:
10089 type = opaque_ftype_opaque_opaque;
10090 break;
0ac081f6
AH
10091 case V4SFmode:
10092 type = v4sf_ftype_v4sf_v4sf;
10093 break;
10094 case V4SImode:
10095 type = v4si_ftype_v4si_v4si;
10096 break;
10097 case V16QImode:
10098 type = v16qi_ftype_v16qi_v16qi;
10099 break;
10100 case V8HImode:
10101 type = v8hi_ftype_v8hi_v8hi;
10102 break;
a3170dc6
AH
10103 case V2SImode:
10104 type = v2si_ftype_v2si_v2si;
10105 break;
96038623
DE
10106 case V2SFmode:
10107 if (TARGET_PAIRED_FLOAT)
10108 type = v2sf_ftype_v2sf_v2sf;
10109 else
10110 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10111 break;
10112 case SImode:
10113 type = int_ftype_int_int;
10114 break;
0ac081f6 10115 default:
37409796 10116 gcc_unreachable ();
0ac081f6
AH
10117 }
10118 }
10119
10120 /* A few other combos we really don't want to do manually. */
10121
10122 /* vint, vfloat, vfloat. */
10123 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10124 type = v4si_ftype_v4sf_v4sf;
10125
10126 /* vshort, vchar, vchar. */
10127 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10128 type = v8hi_ftype_v16qi_v16qi;
10129
10130 /* vint, vshort, vshort. */
10131 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10132 type = v4si_ftype_v8hi_v8hi;
10133
10134 /* vshort, vint, vint. */
10135 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10136 type = v8hi_ftype_v4si_v4si;
10137
10138 /* vchar, vshort, vshort. */
10139 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10140 type = v16qi_ftype_v8hi_v8hi;
10141
10142 /* vint, vchar, vint. */
10143 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10144 type = v4si_ftype_v16qi_v4si;
10145
fa066a23
AH
10146 /* vint, vchar, vchar. */
10147 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10148 type = v4si_ftype_v16qi_v16qi;
10149
0ac081f6
AH
10150 /* vint, vshort, vint. */
10151 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10152 type = v4si_ftype_v8hi_v4si;
f676971a 10153
a7b376ee 10154 /* vint, vint, 5-bit literal. */
2212663f 10155 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10156 type = v4si_ftype_v4si_int;
f676971a 10157
a7b376ee 10158 /* vshort, vshort, 5-bit literal. */
2212663f 10159 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10160 type = v8hi_ftype_v8hi_int;
f676971a 10161
a7b376ee 10162 /* vchar, vchar, 5-bit literal. */
2212663f 10163 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10164 type = v16qi_ftype_v16qi_int;
0ac081f6 10165
a7b376ee 10166 /* vfloat, vint, 5-bit literal. */
617e0e1d 10167 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10168 type = v4sf_ftype_v4si_int;
f676971a 10169
a7b376ee 10170 /* vint, vfloat, 5-bit literal. */
617e0e1d 10171 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10172 type = v4si_ftype_v4sf_int;
617e0e1d 10173
a3170dc6
AH
10174 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10175 type = v2si_ftype_int_int;
10176
10177 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10178 type = v2si_ftype_v2si_char;
10179
10180 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10181 type = v2si_ftype_int_char;
10182
37409796 10183 else
0ac081f6 10184 {
37409796
NS
10185 /* int, x, x. */
10186 gcc_assert (mode0 == SImode);
0ac081f6
AH
10187 switch (mode1)
10188 {
10189 case V4SImode:
10190 type = int_ftype_v4si_v4si;
10191 break;
10192 case V4SFmode:
10193 type = int_ftype_v4sf_v4sf;
10194 break;
10195 case V16QImode:
10196 type = int_ftype_v16qi_v16qi;
10197 break;
10198 case V8HImode:
10199 type = int_ftype_v8hi_v8hi;
10200 break;
10201 default:
37409796 10202 gcc_unreachable ();
0ac081f6
AH
10203 }
10204 }
10205
2212663f
DB
10206 def_builtin (d->mask, d->name, type, d->code);
10207 }
24408032 10208
2212663f
DB
10209 /* Add the simple unary operators. */
10210 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10211 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10212 {
10213 enum machine_mode mode0, mode1;
10214 tree type;
58646b77
PB
10215 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10216 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10217
10218 if (is_overloaded)
10219 {
10220 mode0 = VOIDmode;
10221 mode1 = VOIDmode;
10222 }
10223 else
10224 {
10225 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10226 continue;
bb8df8a6 10227
58646b77
PB
10228 mode0 = insn_data[d->icode].operand[0].mode;
10229 mode1 = insn_data[d->icode].operand[1].mode;
10230 }
2212663f
DB
10231
10232 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10233 type = v4si_ftype_int;
2212663f 10234 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10235 type = v8hi_ftype_int;
2212663f 10236 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10237 type = v16qi_ftype_int;
58646b77
PB
10238 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10239 type = opaque_ftype_opaque;
617e0e1d
DB
10240 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10241 type = v4sf_ftype_v4sf;
20e26713
AH
10242 else if (mode0 == V8HImode && mode1 == V16QImode)
10243 type = v8hi_ftype_v16qi;
10244 else if (mode0 == V4SImode && mode1 == V8HImode)
10245 type = v4si_ftype_v8hi;
a3170dc6
AH
10246 else if (mode0 == V2SImode && mode1 == V2SImode)
10247 type = v2si_ftype_v2si;
10248 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10249 {
10250 if (TARGET_PAIRED_FLOAT)
10251 type = v2sf_ftype_v2sf;
10252 else
10253 type = v2sf_ftype_v2sf_spe;
10254 }
a3170dc6
AH
10255 else if (mode0 == V2SFmode && mode1 == V2SImode)
10256 type = v2sf_ftype_v2si;
10257 else if (mode0 == V2SImode && mode1 == V2SFmode)
10258 type = v2si_ftype_v2sf;
10259 else if (mode0 == V2SImode && mode1 == QImode)
10260 type = v2si_ftype_char;
2212663f 10261 else
37409796 10262 gcc_unreachable ();
2212663f 10263
0ac081f6
AH
10264 def_builtin (d->mask, d->name, type, d->code);
10265 }
10266}
10267
c15c90bb
ZW
10268static void
10269rs6000_init_libfuncs (void)
10270{
602ea4d3
JJ
10271 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10272 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10273 {
602ea4d3
JJ
10274 /* AIX library routines for float->int conversion. */
10275 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10276 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10277 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10278 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10279 }
c15c90bb 10280
602ea4d3 10281 if (!TARGET_IEEEQUAD)
98c41d98 10282 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10283 if (!TARGET_XL_COMPAT)
10284 {
10285 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10286 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10287 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10288 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10289
17caeff2 10290 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10291 {
10292 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10293 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10294 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10295 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10296 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10297 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10298 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10299
10300 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10301 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10302 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10303 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10304 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10305 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10306 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10307 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10308 }
b26941b4
JM
10309
10310 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10311 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10312 }
10313 else
10314 {
10315 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10316 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10317 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10318 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10319 }
c9034561 10320 else
c15c90bb 10321 {
c9034561 10322 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10323
10324 set_optab_libfunc (add_optab, TFmode, "_q_add");
10325 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10326 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10327 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10328 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10329 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10330 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10331
c9034561
ZW
10332 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10333 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10334 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10335 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10336 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10337 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10338
85363ca0
ZW
10339 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10340 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10341 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10342 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10343 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10344 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10345 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10346 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10347 }
10348}
fba73eb1
DE
10349
10350\f
10351/* Expand a block clear operation, and return 1 if successful. Return 0
10352 if we should let the compiler generate normal code.
10353
10354 operands[0] is the destination
10355 operands[1] is the length
57e84f18 10356 operands[3] is the alignment */
fba73eb1
DE
10357
10358int
10359expand_block_clear (rtx operands[])
10360{
10361 rtx orig_dest = operands[0];
10362 rtx bytes_rtx = operands[1];
57e84f18 10363 rtx align_rtx = operands[3];
5514620a
GK
10364 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10365 HOST_WIDE_INT align;
10366 HOST_WIDE_INT bytes;
fba73eb1
DE
10367 int offset;
10368 int clear_bytes;
5514620a 10369 int clear_step;
fba73eb1
DE
10370
10371 /* If this is not a fixed size move, just call memcpy */
10372 if (! constp)
10373 return 0;
10374
37409796
NS
10375 /* This must be a fixed size alignment */
10376 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10377 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10378
10379 /* Anything to clear? */
10380 bytes = INTVAL (bytes_rtx);
10381 if (bytes <= 0)
10382 return 1;
10383
5514620a
GK
10384 /* Use the builtin memset after a point, to avoid huge code bloat.
10385 When optimize_size, avoid any significant code bloat; calling
10386 memset is about 4 instructions, so allow for one instruction to
10387 load zero and three to do clearing. */
10388 if (TARGET_ALTIVEC && align >= 128)
10389 clear_step = 16;
10390 else if (TARGET_POWERPC64 && align >= 32)
10391 clear_step = 8;
21d818ff
NF
10392 else if (TARGET_SPE && align >= 64)
10393 clear_step = 8;
5514620a
GK
10394 else
10395 clear_step = 4;
fba73eb1 10396
5514620a
GK
10397 if (optimize_size && bytes > 3 * clear_step)
10398 return 0;
10399 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10400 return 0;
10401
10402 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10403 {
fba73eb1
DE
10404 enum machine_mode mode = BLKmode;
10405 rtx dest;
f676971a 10406
5514620a
GK
10407 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10408 {
10409 clear_bytes = 16;
10410 mode = V4SImode;
10411 }
21d818ff
NF
10412 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10413 {
10414 clear_bytes = 8;
10415 mode = V2SImode;
10416 }
5514620a 10417 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10418 /* 64-bit loads and stores require word-aligned
10419 displacements. */
10420 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10421 {
10422 clear_bytes = 8;
10423 mode = DImode;
fba73eb1 10424 }
5514620a 10425 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10426 { /* move 4 bytes */
10427 clear_bytes = 4;
10428 mode = SImode;
fba73eb1 10429 }
ec53fc93 10430 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10431 { /* move 2 bytes */
10432 clear_bytes = 2;
10433 mode = HImode;
fba73eb1
DE
10434 }
10435 else /* move 1 byte at a time */
10436 {
10437 clear_bytes = 1;
10438 mode = QImode;
fba73eb1 10439 }
f676971a 10440
fba73eb1 10441 dest = adjust_address (orig_dest, mode, offset);
f676971a 10442
5514620a 10443 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10444 }
10445
10446 return 1;
10447}
10448
35aff10b 10449\f
7e69e155
MM
10450/* Expand a block move operation, and return 1 if successful. Return 0
10451 if we should let the compiler generate normal code.
10452
10453 operands[0] is the destination
10454 operands[1] is the source
10455 operands[2] is the length
10456 operands[3] is the alignment */
10457
3933e0e1
MM
10458#define MAX_MOVE_REG 4
10459
7e69e155 10460int
a2369ed3 10461expand_block_move (rtx operands[])
7e69e155 10462{
b6c9286a
MM
10463 rtx orig_dest = operands[0];
10464 rtx orig_src = operands[1];
7e69e155 10465 rtx bytes_rtx = operands[2];
7e69e155 10466 rtx align_rtx = operands[3];
3933e0e1 10467 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10468 int align;
3933e0e1
MM
10469 int bytes;
10470 int offset;
7e69e155 10471 int move_bytes;
cabfd258
GK
10472 rtx stores[MAX_MOVE_REG];
10473 int num_reg = 0;
7e69e155 10474
3933e0e1 10475 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10476 if (! constp)
3933e0e1
MM
10477 return 0;
10478
37409796
NS
10479 /* This must be a fixed size alignment */
10480 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10481 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10482
7e69e155 10483 /* Anything to move? */
3933e0e1
MM
10484 bytes = INTVAL (bytes_rtx);
10485 if (bytes <= 0)
7e69e155
MM
10486 return 1;
10487
ea9982a8 10488 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10489 reg_parm_stack_space. */
ea9982a8 10490 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10491 return 0;
10492
cabfd258 10493 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10494 {
cabfd258 10495 union {
70128ad9 10496 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10497 rtx (*mov) (rtx, rtx);
cabfd258
GK
10498 } gen_func;
10499 enum machine_mode mode = BLKmode;
10500 rtx src, dest;
f676971a 10501
5514620a
GK
10502 /* Altivec first, since it will be faster than a string move
10503 when it applies, and usually not significantly larger. */
10504 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10505 {
10506 move_bytes = 16;
10507 mode = V4SImode;
10508 gen_func.mov = gen_movv4si;
10509 }
21d818ff
NF
10510 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10511 {
10512 move_bytes = 8;
10513 mode = V2SImode;
10514 gen_func.mov = gen_movv2si;
10515 }
5514620a 10516 else if (TARGET_STRING
cabfd258
GK
10517 && bytes > 24 /* move up to 32 bytes at a time */
10518 && ! fixed_regs[5]
10519 && ! fixed_regs[6]
10520 && ! fixed_regs[7]
10521 && ! fixed_regs[8]
10522 && ! fixed_regs[9]
10523 && ! fixed_regs[10]
10524 && ! fixed_regs[11]
10525 && ! fixed_regs[12])
7e69e155 10526 {
cabfd258 10527 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10528 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10529 }
10530 else if (TARGET_STRING
10531 && bytes > 16 /* move up to 24 bytes at a time */
10532 && ! fixed_regs[5]
10533 && ! fixed_regs[6]
10534 && ! fixed_regs[7]
10535 && ! fixed_regs[8]
10536 && ! fixed_regs[9]
10537 && ! fixed_regs[10])
10538 {
10539 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10540 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10541 }
10542 else if (TARGET_STRING
10543 && bytes > 8 /* move up to 16 bytes at a time */
10544 && ! fixed_regs[5]
10545 && ! fixed_regs[6]
10546 && ! fixed_regs[7]
10547 && ! fixed_regs[8])
10548 {
10549 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10550 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10551 }
10552 else if (bytes >= 8 && TARGET_POWERPC64
10553 /* 64-bit loads and stores require word-aligned
10554 displacements. */
fba73eb1 10555 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10556 {
10557 move_bytes = 8;
10558 mode = DImode;
10559 gen_func.mov = gen_movdi;
10560 }
10561 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10562 { /* move up to 8 bytes at a time */
10563 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10564 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10565 }
cd7d9ca4 10566 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10567 { /* move 4 bytes */
10568 move_bytes = 4;
10569 mode = SImode;
10570 gen_func.mov = gen_movsi;
10571 }
ec53fc93 10572 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10573 { /* move 2 bytes */
10574 move_bytes = 2;
10575 mode = HImode;
10576 gen_func.mov = gen_movhi;
10577 }
10578 else if (TARGET_STRING && bytes > 1)
10579 { /* move up to 4 bytes at a time */
10580 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10581 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10582 }
10583 else /* move 1 byte at a time */
10584 {
10585 move_bytes = 1;
10586 mode = QImode;
10587 gen_func.mov = gen_movqi;
10588 }
f676971a 10589
cabfd258
GK
10590 src = adjust_address (orig_src, mode, offset);
10591 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10592
10593 if (mode != BLKmode)
cabfd258
GK
10594 {
10595 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10596
cabfd258
GK
10597 emit_insn ((*gen_func.mov) (tmp_reg, src));
10598 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10599 }
3933e0e1 10600
cabfd258
GK
10601 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10602 {
10603 int i;
10604 for (i = 0; i < num_reg; i++)
10605 emit_insn (stores[i]);
10606 num_reg = 0;
10607 }
35aff10b 10608
cabfd258 10609 if (mode == BLKmode)
7e69e155 10610 {
70128ad9 10611 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10612 patterns require zero offset. */
10613 if (!REG_P (XEXP (src, 0)))
b6c9286a 10614 {
cabfd258
GK
10615 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10616 src = replace_equiv_address (src, src_reg);
b6c9286a 10617 }
cabfd258 10618 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10619
cabfd258 10620 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10621 {
cabfd258
GK
10622 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10623 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10624 }
cabfd258 10625 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10626
70128ad9 10627 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10628 GEN_INT (move_bytes & 31),
10629 align_rtx));
7e69e155 10630 }
7e69e155
MM
10631 }
10632
10633 return 1;
10634}
10635
d62294f5 10636\f
9caa3eb2
DE
10637/* Return a string to perform a load_multiple operation.
10638 operands[0] is the vector.
10639 operands[1] is the source address.
10640 operands[2] is the first destination register. */
10641
10642const char *
a2369ed3 10643rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10644{
10645 /* We have to handle the case where the pseudo used to contain the address
10646 is assigned to one of the output registers. */
10647 int i, j;
10648 int words = XVECLEN (operands[0], 0);
10649 rtx xop[10];
10650
10651 if (XVECLEN (operands[0], 0) == 1)
10652 return "{l|lwz} %2,0(%1)";
10653
10654 for (i = 0; i < words; i++)
10655 if (refers_to_regno_p (REGNO (operands[2]) + i,
10656 REGNO (operands[2]) + i + 1, operands[1], 0))
10657 {
10658 if (i == words-1)
10659 {
10660 xop[0] = GEN_INT (4 * (words-1));
10661 xop[1] = operands[1];
10662 xop[2] = operands[2];
10663 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10664 return "";
10665 }
10666 else if (i == 0)
10667 {
10668 xop[0] = GEN_INT (4 * (words-1));
10669 xop[1] = operands[1];
10670 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10671 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10672 return "";
10673 }
10674 else
10675 {
10676 for (j = 0; j < words; j++)
10677 if (j != i)
10678 {
10679 xop[0] = GEN_INT (j * 4);
10680 xop[1] = operands[1];
10681 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10682 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10683 }
10684 xop[0] = GEN_INT (i * 4);
10685 xop[1] = operands[1];
10686 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10687 return "";
10688 }
10689 }
10690
10691 return "{lsi|lswi} %2,%1,%N0";
10692}
10693
9878760c 10694\f
a4f6c312
SS
10695/* A validation routine: say whether CODE, a condition code, and MODE
10696 match. The other alternatives either don't make sense or should
10697 never be generated. */
39a10a29 10698
48d72335 10699void
a2369ed3 10700validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10701{
37409796
NS
10702 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10703 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10704 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10705
10706 /* These don't make sense. */
37409796
NS
10707 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10708 || mode != CCUNSmode);
39a10a29 10709
37409796
NS
10710 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10711 || mode == CCUNSmode);
39a10a29 10712
37409796
NS
10713 gcc_assert (mode == CCFPmode
10714 || (code != ORDERED && code != UNORDERED
10715 && code != UNEQ && code != LTGT
10716 && code != UNGT && code != UNLT
10717 && code != UNGE && code != UNLE));
f676971a
EC
10718
10719 /* These should never be generated except for
bc9ec0e0 10720 flag_finite_math_only. */
37409796
NS
10721 gcc_assert (mode != CCFPmode
10722 || flag_finite_math_only
10723 || (code != LE && code != GE
10724 && code != UNEQ && code != LTGT
10725 && code != UNGT && code != UNLT));
39a10a29
GK
10726
10727 /* These are invalid; the information is not there. */
37409796 10728 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10729}
10730
9878760c
RK
10731\f
10732/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10733 mask required to convert the result of a rotate insn into a shift
b1765bde 10734 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10735
10736int
a2369ed3 10737includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10738{
e2c953b6
DE
10739 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10740
10741 shift_mask <<= INTVAL (shiftop);
9878760c 10742
b1765bde 10743 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10744}
10745
10746/* Similar, but for right shift. */
10747
10748int
a2369ed3 10749includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10750{
a7653a2c 10751 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10752
10753 shift_mask >>= INTVAL (shiftop);
10754
b1765bde 10755 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10756}
10757
c5059423
AM
10758/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10759 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10760 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10761
10762int
a2369ed3 10763includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10764{
c5059423
AM
10765 if (GET_CODE (andop) == CONST_INT)
10766 {
02071907 10767 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10768
c5059423 10769 c = INTVAL (andop);
02071907 10770 if (c == 0 || c == ~0)
c5059423 10771 return 0;
e2c953b6 10772
02071907 10773 shift_mask = ~0;
c5059423
AM
10774 shift_mask <<= INTVAL (shiftop);
10775
b6d08ca1 10776 /* Find the least significant one bit. */
c5059423
AM
10777 lsb = c & -c;
10778
10779 /* It must coincide with the LSB of the shift mask. */
10780 if (-lsb != shift_mask)
10781 return 0;
e2c953b6 10782
c5059423
AM
10783 /* Invert to look for the next transition (if any). */
10784 c = ~c;
10785
10786 /* Remove the low group of ones (originally low group of zeros). */
10787 c &= -lsb;
10788
10789 /* Again find the lsb, and check we have all 1's above. */
10790 lsb = c & -c;
10791 return c == -lsb;
10792 }
10793 else if (GET_CODE (andop) == CONST_DOUBLE
10794 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10795 {
02071907
AM
10796 HOST_WIDE_INT low, high, lsb;
10797 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10798
10799 low = CONST_DOUBLE_LOW (andop);
10800 if (HOST_BITS_PER_WIDE_INT < 64)
10801 high = CONST_DOUBLE_HIGH (andop);
10802
10803 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10804 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10805 return 0;
10806
10807 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10808 {
02071907 10809 shift_mask_high = ~0;
c5059423
AM
10810 if (INTVAL (shiftop) > 32)
10811 shift_mask_high <<= INTVAL (shiftop) - 32;
10812
10813 lsb = high & -high;
10814
10815 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10816 return 0;
10817
10818 high = ~high;
10819 high &= -lsb;
10820
10821 lsb = high & -high;
10822 return high == -lsb;
10823 }
10824
02071907 10825 shift_mask_low = ~0;
c5059423
AM
10826 shift_mask_low <<= INTVAL (shiftop);
10827
10828 lsb = low & -low;
10829
10830 if (-lsb != shift_mask_low)
10831 return 0;
10832
10833 if (HOST_BITS_PER_WIDE_INT < 64)
10834 high = ~high;
10835 low = ~low;
10836 low &= -lsb;
10837
10838 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10839 {
10840 lsb = high & -high;
10841 return high == -lsb;
10842 }
10843
10844 lsb = low & -low;
10845 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10846 }
10847 else
10848 return 0;
10849}
e2c953b6 10850
c5059423
AM
10851/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10852 to perform a left shift. It must have SHIFTOP or more least
c1207243 10853 significant 0's, with the remainder of the word 1's. */
e2c953b6 10854
c5059423 10855int
a2369ed3 10856includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10857{
e2c953b6 10858 if (GET_CODE (andop) == CONST_INT)
c5059423 10859 {
02071907 10860 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10861
02071907 10862 shift_mask = ~0;
c5059423
AM
10863 shift_mask <<= INTVAL (shiftop);
10864 c = INTVAL (andop);
10865
c1207243 10866 /* Find the least significant one bit. */
c5059423
AM
10867 lsb = c & -c;
10868
10869 /* It must be covered by the shift mask.
a4f6c312 10870 This test also rejects c == 0. */
c5059423
AM
10871 if ((lsb & shift_mask) == 0)
10872 return 0;
10873
10874 /* Check we have all 1's above the transition, and reject all 1's. */
10875 return c == -lsb && lsb != 1;
10876 }
10877 else if (GET_CODE (andop) == CONST_DOUBLE
10878 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10879 {
02071907 10880 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10881
10882 low = CONST_DOUBLE_LOW (andop);
10883
10884 if (HOST_BITS_PER_WIDE_INT < 64)
10885 {
02071907 10886 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10887
10888 high = CONST_DOUBLE_HIGH (andop);
10889
10890 if (low == 0)
10891 {
02071907 10892 shift_mask_high = ~0;
c5059423
AM
10893 if (INTVAL (shiftop) > 32)
10894 shift_mask_high <<= INTVAL (shiftop) - 32;
10895
10896 lsb = high & -high;
10897
10898 if ((lsb & shift_mask_high) == 0)
10899 return 0;
10900
10901 return high == -lsb;
10902 }
10903 if (high != ~0)
10904 return 0;
10905 }
10906
02071907 10907 shift_mask_low = ~0;
c5059423
AM
10908 shift_mask_low <<= INTVAL (shiftop);
10909
10910 lsb = low & -low;
10911
10912 if ((lsb & shift_mask_low) == 0)
10913 return 0;
10914
10915 return low == -lsb && lsb != 1;
10916 }
e2c953b6 10917 else
c5059423 10918 return 0;
9878760c 10919}
35068b43 10920
11ac38b2
DE
10921/* Return 1 if operands will generate a valid arguments to rlwimi
10922instruction for insert with right shift in 64-bit mode. The mask may
10923not start on the first bit or stop on the last bit because wrap-around
10924effects of instruction do not correspond to semantics of RTL insn. */
10925
10926int
10927insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10928{
429ec7dc
DE
10929 if (INTVAL (startop) > 32
10930 && INTVAL (startop) < 64
10931 && INTVAL (sizeop) > 1
10932 && INTVAL (sizeop) + INTVAL (startop) < 64
10933 && INTVAL (shiftop) > 0
10934 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10935 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10936 return 1;
10937
10938 return 0;
10939}
10940
35068b43 10941/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10942 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10943
10944int
a2369ed3 10945registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10946{
10947 /* We might have been passed a SUBREG. */
f676971a 10948 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10949 return 0;
f676971a 10950
90f81f99
AP
10951 /* We might have been passed non floating point registers. */
10952 if (!FP_REGNO_P (REGNO (reg1))
10953 || !FP_REGNO_P (REGNO (reg2)))
10954 return 0;
35068b43
RK
10955
10956 return (REGNO (reg1) == REGNO (reg2) - 1);
10957}
10958
a4f6c312
SS
10959/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10960 addr1 and addr2 must be in consecutive memory locations
10961 (addr2 == addr1 + 8). */
35068b43
RK
10962
10963int
90f81f99 10964mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10965{
90f81f99 10966 rtx addr1, addr2;
bb8df8a6
EC
10967 unsigned int reg1, reg2;
10968 int offset1, offset2;
35068b43 10969
90f81f99
AP
10970 /* The mems cannot be volatile. */
10971 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10972 return 0;
f676971a 10973
90f81f99
AP
10974 addr1 = XEXP (mem1, 0);
10975 addr2 = XEXP (mem2, 0);
10976
35068b43
RK
10977 /* Extract an offset (if used) from the first addr. */
10978 if (GET_CODE (addr1) == PLUS)
10979 {
10980 /* If not a REG, return zero. */
10981 if (GET_CODE (XEXP (addr1, 0)) != REG)
10982 return 0;
10983 else
10984 {
c4ad648e 10985 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
10986 /* The offset must be constant! */
10987 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
10988 return 0;
10989 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
10990 }
10991 }
10992 else if (GET_CODE (addr1) != REG)
10993 return 0;
10994 else
10995 {
10996 reg1 = REGNO (addr1);
10997 /* This was a simple (mem (reg)) expression. Offset is 0. */
10998 offset1 = 0;
10999 }
11000
bb8df8a6
EC
11001 /* And now for the second addr. */
11002 if (GET_CODE (addr2) == PLUS)
11003 {
11004 /* If not a REG, return zero. */
11005 if (GET_CODE (XEXP (addr2, 0)) != REG)
11006 return 0;
11007 else
11008 {
11009 reg2 = REGNO (XEXP (addr2, 0));
11010 /* The offset must be constant. */
11011 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11012 return 0;
11013 offset2 = INTVAL (XEXP (addr2, 1));
11014 }
11015 }
11016 else if (GET_CODE (addr2) != REG)
35068b43 11017 return 0;
bb8df8a6
EC
11018 else
11019 {
11020 reg2 = REGNO (addr2);
11021 /* This was a simple (mem (reg)) expression. Offset is 0. */
11022 offset2 = 0;
11023 }
35068b43 11024
bb8df8a6
EC
11025 /* Both of these must have the same base register. */
11026 if (reg1 != reg2)
35068b43
RK
11027 return 0;
11028
11029 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11030 if (offset2 != offset1 + 8)
35068b43
RK
11031 return 0;
11032
11033 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11034 instructions. */
11035 return 1;
11036}
9878760c
RK
11037\f
11038/* Return the register class of a scratch register needed to copy IN into
11039 or out of a register in CLASS in MODE. If it can be done directly,
11040 NO_REGS is returned. */
11041
11042enum reg_class
3c4774e0
R
11043rs6000_secondary_reload_class (enum reg_class class,
11044 enum machine_mode mode ATTRIBUTE_UNUSED,
11045 rtx in)
9878760c 11046{
5accd822 11047 int regno;
9878760c 11048
ab82a49f
AP
11049 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11050#if TARGET_MACHO
c4ad648e 11051 && MACHOPIC_INDIRECT
ab82a49f 11052#endif
c4ad648e 11053 ))
46fad5b7
DJ
11054 {
11055 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11056 other than BASE_REGS for TARGET_ELF. So indicate that a
11057 register from BASE_REGS is needed as an intermediate
11058 register.
f676971a 11059
46fad5b7
DJ
11060 On Darwin, pic addresses require a load from memory, which
11061 needs a base register. */
11062 if (class != BASE_REGS
c4ad648e
AM
11063 && (GET_CODE (in) == SYMBOL_REF
11064 || GET_CODE (in) == HIGH
11065 || GET_CODE (in) == LABEL_REF
11066 || GET_CODE (in) == CONST))
11067 return BASE_REGS;
46fad5b7 11068 }
e7b7998a 11069
5accd822
DE
11070 if (GET_CODE (in) == REG)
11071 {
11072 regno = REGNO (in);
11073 if (regno >= FIRST_PSEUDO_REGISTER)
11074 {
11075 regno = true_regnum (in);
11076 if (regno >= FIRST_PSEUDO_REGISTER)
11077 regno = -1;
11078 }
11079 }
11080 else if (GET_CODE (in) == SUBREG)
11081 {
11082 regno = true_regnum (in);
11083 if (regno >= FIRST_PSEUDO_REGISTER)
11084 regno = -1;
11085 }
11086 else
11087 regno = -1;
11088
9878760c
RK
11089 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11090 into anything. */
11091 if (class == GENERAL_REGS || class == BASE_REGS
11092 || (regno >= 0 && INT_REGNO_P (regno)))
11093 return NO_REGS;
11094
11095 /* Constants, memory, and FP registers can go into FP registers. */
11096 if ((regno == -1 || FP_REGNO_P (regno))
11097 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
11098 return NO_REGS;
11099
0ac081f6
AH
11100 /* Memory, and AltiVec registers can go into AltiVec registers. */
11101 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11102 && class == ALTIVEC_REGS)
11103 return NO_REGS;
11104
9878760c
RK
11105 /* We can copy among the CR registers. */
11106 if ((class == CR_REGS || class == CR0_REGS)
11107 && regno >= 0 && CR_REGNO_P (regno))
11108 return NO_REGS;
11109
11110 /* Otherwise, we need GENERAL_REGS. */
11111 return GENERAL_REGS;
11112}
11113\f
11114/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11115 know this is a valid comparison.
9878760c
RK
11116
11117 SCC_P is 1 if this is for an scc. That means that %D will have been
11118 used instead of %C, so the bits will be in different places.
11119
b4ac57ab 11120 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11121
11122int
a2369ed3 11123ccr_bit (rtx op, int scc_p)
9878760c
RK
11124{
11125 enum rtx_code code = GET_CODE (op);
11126 enum machine_mode cc_mode;
11127 int cc_regnum;
11128 int base_bit;
9ebbca7d 11129 rtx reg;
9878760c 11130
ec8e098d 11131 if (!COMPARISON_P (op))
9878760c
RK
11132 return -1;
11133
9ebbca7d
GK
11134 reg = XEXP (op, 0);
11135
37409796 11136 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11137
11138 cc_mode = GET_MODE (reg);
11139 cc_regnum = REGNO (reg);
11140 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11141
39a10a29 11142 validate_condition_mode (code, cc_mode);
c5defebb 11143
b7053a3f
GK
11144 /* When generating a sCOND operation, only positive conditions are
11145 allowed. */
37409796
NS
11146 gcc_assert (!scc_p
11147 || code == EQ || code == GT || code == LT || code == UNORDERED
11148 || code == GTU || code == LTU);
f676971a 11149
9878760c
RK
11150 switch (code)
11151 {
11152 case NE:
11153 return scc_p ? base_bit + 3 : base_bit + 2;
11154 case EQ:
11155 return base_bit + 2;
1c882ea4 11156 case GT: case GTU: case UNLE:
9878760c 11157 return base_bit + 1;
1c882ea4 11158 case LT: case LTU: case UNGE:
9878760c 11159 return base_bit;
1c882ea4
GK
11160 case ORDERED: case UNORDERED:
11161 return base_bit + 3;
9878760c
RK
11162
11163 case GE: case GEU:
39a10a29 11164 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11165 unordered position. So test that bit. For integer, this is ! LT
11166 unless this is an scc insn. */
39a10a29 11167 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11168
11169 case LE: case LEU:
39a10a29 11170 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11171
9878760c 11172 default:
37409796 11173 gcc_unreachable ();
9878760c
RK
11174 }
11175}
1ff7789b 11176\f
8d30c4ee 11177/* Return the GOT register. */
1ff7789b 11178
9390387d 11179rtx
a2369ed3 11180rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11181{
a4f6c312
SS
11182 /* The second flow pass currently (June 1999) can't update
11183 regs_ever_live without disturbing other parts of the compiler, so
11184 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11185 if (!can_create_pseudo_p ()
11186 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11187 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11188
8d30c4ee 11189 current_function_uses_pic_offset_table = 1;
3cb999d8 11190
1ff7789b
MM
11191 return pic_offset_table_rtx;
11192}
a7df97e6 11193\f
e2500fed
GK
11194/* Function to init struct machine_function.
11195 This will be called, via a pointer variable,
11196 from push_function_context. */
a7df97e6 11197
e2500fed 11198static struct machine_function *
863d938c 11199rs6000_init_machine_status (void)
a7df97e6 11200{
e2500fed 11201 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11202}
9878760c 11203\f
0ba1b2ff
AM
11204/* These macros test for integers and extract the low-order bits. */
11205#define INT_P(X) \
11206((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11207 && GET_MODE (X) == VOIDmode)
11208
11209#define INT_LOWPART(X) \
11210 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11211
11212int
a2369ed3 11213extract_MB (rtx op)
0ba1b2ff
AM
11214{
11215 int i;
11216 unsigned long val = INT_LOWPART (op);
11217
11218 /* If the high bit is zero, the value is the first 1 bit we find
11219 from the left. */
11220 if ((val & 0x80000000) == 0)
11221 {
37409796 11222 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11223
11224 i = 1;
11225 while (((val <<= 1) & 0x80000000) == 0)
11226 ++i;
11227 return i;
11228 }
11229
11230 /* If the high bit is set and the low bit is not, or the mask is all
11231 1's, the value is zero. */
11232 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11233 return 0;
11234
11235 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11236 from the right. */
11237 i = 31;
11238 while (((val >>= 1) & 1) != 0)
11239 --i;
11240
11241 return i;
11242}
11243
11244int
a2369ed3 11245extract_ME (rtx op)
0ba1b2ff
AM
11246{
11247 int i;
11248 unsigned long val = INT_LOWPART (op);
11249
11250 /* If the low bit is zero, the value is the first 1 bit we find from
11251 the right. */
11252 if ((val & 1) == 0)
11253 {
37409796 11254 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11255
11256 i = 30;
11257 while (((val >>= 1) & 1) == 0)
11258 --i;
11259
11260 return i;
11261 }
11262
11263 /* If the low bit is set and the high bit is not, or the mask is all
11264 1's, the value is 31. */
11265 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11266 return 31;
11267
11268 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11269 from the left. */
11270 i = 0;
11271 while (((val <<= 1) & 0x80000000) != 0)
11272 ++i;
11273
11274 return i;
11275}
11276
c4501e62
JJ
11277/* Locate some local-dynamic symbol still in use by this function
11278 so that we can print its name in some tls_ld pattern. */
11279
11280static const char *
863d938c 11281rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11282{
11283 rtx insn;
11284
11285 if (cfun->machine->some_ld_name)
11286 return cfun->machine->some_ld_name;
11287
11288 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11289 if (INSN_P (insn)
11290 && for_each_rtx (&PATTERN (insn),
11291 rs6000_get_some_local_dynamic_name_1, 0))
11292 return cfun->machine->some_ld_name;
11293
37409796 11294 gcc_unreachable ();
c4501e62
JJ
11295}
11296
11297/* Helper function for rs6000_get_some_local_dynamic_name. */
11298
11299static int
a2369ed3 11300rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11301{
11302 rtx x = *px;
11303
11304 if (GET_CODE (x) == SYMBOL_REF)
11305 {
11306 const char *str = XSTR (x, 0);
11307 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11308 {
11309 cfun->machine->some_ld_name = str;
11310 return 1;
11311 }
11312 }
11313
11314 return 0;
11315}
11316
85b776df
AM
11317/* Write out a function code label. */
11318
11319void
11320rs6000_output_function_entry (FILE *file, const char *fname)
11321{
11322 if (fname[0] != '.')
11323 {
11324 switch (DEFAULT_ABI)
11325 {
11326 default:
37409796 11327 gcc_unreachable ();
85b776df
AM
11328
11329 case ABI_AIX:
11330 if (DOT_SYMBOLS)
11331 putc ('.', file);
11332 else
11333 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11334 break;
11335
11336 case ABI_V4:
11337 case ABI_DARWIN:
11338 break;
11339 }
11340 }
11341 if (TARGET_AIX)
11342 RS6000_OUTPUT_BASENAME (file, fname);
11343 else
11344 assemble_name (file, fname);
11345}
11346
9878760c
RK
11347/* Print an operand. Recognize special options, documented below. */
11348
38c1f2d7 11349#if TARGET_ELF
d9407988 11350#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11351#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11352#else
11353#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11354#define SMALL_DATA_REG 0
ba5e43aa
MM
11355#endif
11356
9878760c 11357void
a2369ed3 11358print_operand (FILE *file, rtx x, int code)
9878760c
RK
11359{
11360 int i;
a260abc9 11361 HOST_WIDE_INT val;
0ba1b2ff 11362 unsigned HOST_WIDE_INT uval;
9878760c
RK
11363
11364 switch (code)
11365 {
a8b3aeda 11366 case '.':
a85d226b
RK
11367 /* Write out an instruction after the call which may be replaced
11368 with glue code by the loader. This depends on the AIX version. */
11369 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11370 return;
11371
81eace42
GK
11372 /* %a is output_address. */
11373
9854d9ed
RK
11374 case 'A':
11375 /* If X is a constant integer whose low-order 5 bits are zero,
11376 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11377 in the AIX assembler where "sri" with a zero shift count
20e26713 11378 writes a trash instruction. */
9854d9ed 11379 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11380 putc ('l', file);
9854d9ed 11381 else
76229ac8 11382 putc ('r', file);
9854d9ed
RK
11383 return;
11384
11385 case 'b':
e2c953b6
DE
11386 /* If constant, low-order 16 bits of constant, unsigned.
11387 Otherwise, write normally. */
11388 if (INT_P (x))
11389 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11390 else
11391 print_operand (file, x, 0);
cad12a8d
RK
11392 return;
11393
a260abc9
DE
11394 case 'B':
11395 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11396 for 64-bit mask direction. */
9390387d 11397 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11398 return;
a260abc9 11399
81eace42
GK
11400 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11401 output_operand. */
11402
423c1189
AH
11403 case 'c':
11404 /* X is a CR register. Print the number of the GT bit of the CR. */
11405 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11406 output_operand_lossage ("invalid %%E value");
11407 else
11408 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11409 return;
11410
11411 case 'D':
cef6b86c 11412 /* Like 'J' but get to the GT bit only. */
37409796 11413 gcc_assert (GET_CODE (x) == REG);
423c1189 11414
cef6b86c
EB
11415 /* Bit 1 is GT bit. */
11416 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11417
cef6b86c
EB
11418 /* Add one for shift count in rlinm for scc. */
11419 fprintf (file, "%d", i + 1);
423c1189
AH
11420 return;
11421
9854d9ed 11422 case 'E':
39a10a29 11423 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11424 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11425 output_operand_lossage ("invalid %%E value");
78fbdbf7 11426 else
39a10a29 11427 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11428 return;
9854d9ed
RK
11429
11430 case 'f':
11431 /* X is a CR register. Print the shift count needed to move it
11432 to the high-order four bits. */
11433 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11434 output_operand_lossage ("invalid %%f value");
11435 else
9ebbca7d 11436 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11437 return;
11438
11439 case 'F':
11440 /* Similar, but print the count for the rotate in the opposite
11441 direction. */
11442 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11443 output_operand_lossage ("invalid %%F value");
11444 else
9ebbca7d 11445 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11446 return;
11447
11448 case 'G':
11449 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11450 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11451 if (GET_CODE (x) != CONST_INT)
11452 output_operand_lossage ("invalid %%G value");
11453 else if (INTVAL (x) >= 0)
76229ac8 11454 putc ('z', file);
9854d9ed 11455 else
76229ac8 11456 putc ('m', file);
9854d9ed 11457 return;
e2c953b6 11458
9878760c 11459 case 'h':
a4f6c312
SS
11460 /* If constant, output low-order five bits. Otherwise, write
11461 normally. */
9878760c 11462 if (INT_P (x))
5f59ecb7 11463 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11464 else
11465 print_operand (file, x, 0);
11466 return;
11467
64305719 11468 case 'H':
a4f6c312
SS
11469 /* If constant, output low-order six bits. Otherwise, write
11470 normally. */
64305719 11471 if (INT_P (x))
5f59ecb7 11472 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11473 else
11474 print_operand (file, x, 0);
11475 return;
11476
9854d9ed
RK
11477 case 'I':
11478 /* Print `i' if this is a constant, else nothing. */
9878760c 11479 if (INT_P (x))
76229ac8 11480 putc ('i', file);
9878760c
RK
11481 return;
11482
9854d9ed
RK
11483 case 'j':
11484 /* Write the bit number in CCR for jump. */
11485 i = ccr_bit (x, 0);
11486 if (i == -1)
11487 output_operand_lossage ("invalid %%j code");
9878760c 11488 else
9854d9ed 11489 fprintf (file, "%d", i);
9878760c
RK
11490 return;
11491
9854d9ed
RK
11492 case 'J':
11493 /* Similar, but add one for shift count in rlinm for scc and pass
11494 scc flag to `ccr_bit'. */
11495 i = ccr_bit (x, 1);
11496 if (i == -1)
11497 output_operand_lossage ("invalid %%J code");
11498 else
a0466a68
RK
11499 /* If we want bit 31, write a shift count of zero, not 32. */
11500 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11501 return;
11502
9854d9ed
RK
11503 case 'k':
11504 /* X must be a constant. Write the 1's complement of the
11505 constant. */
9878760c 11506 if (! INT_P (x))
9854d9ed 11507 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11508 else
11509 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11510 return;
11511
81eace42 11512 case 'K':
9ebbca7d
GK
11513 /* X must be a symbolic constant on ELF. Write an
11514 expression suitable for an 'addi' that adds in the low 16
11515 bits of the MEM. */
11516 if (GET_CODE (x) != CONST)
11517 {
11518 print_operand_address (file, x);
11519 fputs ("@l", file);
11520 }
11521 else
11522 {
11523 if (GET_CODE (XEXP (x, 0)) != PLUS
11524 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11525 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11526 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11527 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11528 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11529 fputs ("@l", file);
ed8d2920
MM
11530 /* For GNU as, there must be a non-alphanumeric character
11531 between 'l' and the number. The '-' is added by
11532 print_operand() already. */
11533 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11534 fputs ("+", file);
9ebbca7d
GK
11535 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11536 }
81eace42
GK
11537 return;
11538
11539 /* %l is output_asm_label. */
9ebbca7d 11540
9854d9ed
RK
11541 case 'L':
11542 /* Write second word of DImode or DFmode reference. Works on register
11543 or non-indexed memory only. */
11544 if (GET_CODE (x) == REG)
fb5c67a7 11545 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11546 else if (GET_CODE (x) == MEM)
11547 {
11548 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11549 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11550 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11551 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11552 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11553 UNITS_PER_WORD));
6fb5fa3c
DB
11554 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11555 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11556 UNITS_PER_WORD));
9854d9ed 11557 else
d7624dc0
RK
11558 output_address (XEXP (adjust_address_nv (x, SImode,
11559 UNITS_PER_WORD),
11560 0));
ed8908e7 11561
ba5e43aa 11562 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11563 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11564 reg_names[SMALL_DATA_REG]);
9854d9ed 11565 }
9878760c 11566 return;
f676971a 11567
9878760c
RK
11568 case 'm':
11569 /* MB value for a mask operand. */
b1765bde 11570 if (! mask_operand (x, SImode))
9878760c
RK
11571 output_operand_lossage ("invalid %%m value");
11572
0ba1b2ff 11573 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11574 return;
11575
11576 case 'M':
11577 /* ME value for a mask operand. */
b1765bde 11578 if (! mask_operand (x, SImode))
a260abc9 11579 output_operand_lossage ("invalid %%M value");
9878760c 11580
0ba1b2ff 11581 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11582 return;
11583
81eace42
GK
11584 /* %n outputs the negative of its operand. */
11585
9878760c
RK
11586 case 'N':
11587 /* Write the number of elements in the vector times 4. */
11588 if (GET_CODE (x) != PARALLEL)
11589 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11590 else
11591 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11592 return;
11593
11594 case 'O':
11595 /* Similar, but subtract 1 first. */
11596 if (GET_CODE (x) != PARALLEL)
1427100a 11597 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11598 else
11599 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11600 return;
11601
9854d9ed
RK
11602 case 'p':
11603 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11604 if (! INT_P (x)
2bfcf297 11605 || INT_LOWPART (x) < 0
9854d9ed
RK
11606 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11607 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11608 else
11609 fprintf (file, "%d", i);
9854d9ed
RK
11610 return;
11611
9878760c
RK
11612 case 'P':
11613 /* The operand must be an indirect memory reference. The result
8bb418a3 11614 is the register name. */
9878760c
RK
11615 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11616 || REGNO (XEXP (x, 0)) >= 32)
11617 output_operand_lossage ("invalid %%P value");
e2c953b6 11618 else
fb5c67a7 11619 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11620 return;
11621
dfbdccdb
GK
11622 case 'q':
11623 /* This outputs the logical code corresponding to a boolean
11624 expression. The expression may have one or both operands
39a10a29 11625 negated (if one, only the first one). For condition register
c4ad648e
AM
11626 logical operations, it will also treat the negated
11627 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11628 {
63bc1d05 11629 const char *const *t = 0;
dfbdccdb
GK
11630 const char *s;
11631 enum rtx_code code = GET_CODE (x);
11632 static const char * const tbl[3][3] = {
11633 { "and", "andc", "nor" },
11634 { "or", "orc", "nand" },
11635 { "xor", "eqv", "xor" } };
11636
11637 if (code == AND)
11638 t = tbl[0];
11639 else if (code == IOR)
11640 t = tbl[1];
11641 else if (code == XOR)
11642 t = tbl[2];
11643 else
11644 output_operand_lossage ("invalid %%q value");
11645
11646 if (GET_CODE (XEXP (x, 0)) != NOT)
11647 s = t[0];
11648 else
11649 {
11650 if (GET_CODE (XEXP (x, 1)) == NOT)
11651 s = t[2];
11652 else
11653 s = t[1];
11654 }
f676971a 11655
dfbdccdb
GK
11656 fputs (s, file);
11657 }
11658 return;
11659
2c4a9cff
DE
11660 case 'Q':
11661 if (TARGET_MFCRF)
3b6ce0af 11662 fputc (',', file);
5efb1046 11663 /* FALLTHRU */
2c4a9cff
DE
11664 else
11665 return;
11666
9854d9ed
RK
11667 case 'R':
11668 /* X is a CR register. Print the mask for `mtcrf'. */
11669 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11670 output_operand_lossage ("invalid %%R value");
11671 else
9ebbca7d 11672 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11673 return;
9854d9ed
RK
11674
11675 case 's':
11676 /* Low 5 bits of 32 - value */
11677 if (! INT_P (x))
11678 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11679 else
11680 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11681 return;
9854d9ed 11682
a260abc9 11683 case 'S':
0ba1b2ff 11684 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11685 CONST_INT 32-bit mask is considered sign-extended so any
11686 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11687 if (! mask64_operand (x, DImode))
a260abc9
DE
11688 output_operand_lossage ("invalid %%S value");
11689
0ba1b2ff 11690 uval = INT_LOWPART (x);
a260abc9 11691
0ba1b2ff 11692 if (uval & 1) /* Clear Left */
a260abc9 11693 {
f099d360
GK
11694#if HOST_BITS_PER_WIDE_INT > 64
11695 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11696#endif
0ba1b2ff 11697 i = 64;
a260abc9 11698 }
0ba1b2ff 11699 else /* Clear Right */
a260abc9 11700 {
0ba1b2ff 11701 uval = ~uval;
f099d360
GK
11702#if HOST_BITS_PER_WIDE_INT > 64
11703 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11704#endif
0ba1b2ff 11705 i = 63;
a260abc9 11706 }
0ba1b2ff
AM
11707 while (uval != 0)
11708 --i, uval >>= 1;
37409796 11709 gcc_assert (i >= 0);
0ba1b2ff
AM
11710 fprintf (file, "%d", i);
11711 return;
a260abc9 11712
a3170dc6
AH
11713 case 't':
11714 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11715 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11716
11717 /* Bit 3 is OV bit. */
11718 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11719
11720 /* If we want bit 31, write a shift count of zero, not 32. */
11721 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11722 return;
11723
cccf3bdc
DE
11724 case 'T':
11725 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11726 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11727 && REGNO (x) != CTR_REGNO))
cccf3bdc 11728 output_operand_lossage ("invalid %%T value");
1de43f85 11729 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11730 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11731 else
11732 fputs ("ctr", file);
11733 return;
11734
9854d9ed 11735 case 'u':
802a0058 11736 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11737 if (! INT_P (x))
11738 output_operand_lossage ("invalid %%u value");
e2c953b6 11739 else
f676971a 11740 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11741 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11742 return;
11743
802a0058
MM
11744 case 'v':
11745 /* High-order 16 bits of constant for use in signed operand. */
11746 if (! INT_P (x))
11747 output_operand_lossage ("invalid %%v value");
e2c953b6 11748 else
134c32f6
DE
11749 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11750 (INT_LOWPART (x) >> 16) & 0xffff);
11751 return;
802a0058 11752
9854d9ed
RK
11753 case 'U':
11754 /* Print `u' if this has an auto-increment or auto-decrement. */
11755 if (GET_CODE (x) == MEM
11756 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11757 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11758 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11759 putc ('u', file);
9854d9ed 11760 return;
9878760c 11761
e0cd0770
JC
11762 case 'V':
11763 /* Print the trap code for this operand. */
11764 switch (GET_CODE (x))
11765 {
11766 case EQ:
11767 fputs ("eq", file); /* 4 */
11768 break;
11769 case NE:
11770 fputs ("ne", file); /* 24 */
11771 break;
11772 case LT:
11773 fputs ("lt", file); /* 16 */
11774 break;
11775 case LE:
11776 fputs ("le", file); /* 20 */
11777 break;
11778 case GT:
11779 fputs ("gt", file); /* 8 */
11780 break;
11781 case GE:
11782 fputs ("ge", file); /* 12 */
11783 break;
11784 case LTU:
11785 fputs ("llt", file); /* 2 */
11786 break;
11787 case LEU:
11788 fputs ("lle", file); /* 6 */
11789 break;
11790 case GTU:
11791 fputs ("lgt", file); /* 1 */
11792 break;
11793 case GEU:
11794 fputs ("lge", file); /* 5 */
11795 break;
11796 default:
37409796 11797 gcc_unreachable ();
e0cd0770
JC
11798 }
11799 break;
11800
9854d9ed
RK
11801 case 'w':
11802 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11803 normally. */
11804 if (INT_P (x))
f676971a 11805 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11806 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11807 else
11808 print_operand (file, x, 0);
9878760c
RK
11809 return;
11810
9854d9ed 11811 case 'W':
e2c953b6 11812 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11813 val = (GET_CODE (x) == CONST_INT
11814 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11815
11816 if (val < 0)
11817 i = -1;
9854d9ed 11818 else
e2c953b6
DE
11819 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11820 if ((val <<= 1) < 0)
11821 break;
11822
11823#if HOST_BITS_PER_WIDE_INT == 32
11824 if (GET_CODE (x) == CONST_INT && i >= 0)
11825 i += 32; /* zero-extend high-part was all 0's */
11826 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11827 {
11828 val = CONST_DOUBLE_LOW (x);
11829
37409796
NS
11830 gcc_assert (val);
11831 if (val < 0)
e2c953b6
DE
11832 --i;
11833 else
11834 for ( ; i < 64; i++)
11835 if ((val <<= 1) < 0)
11836 break;
11837 }
11838#endif
11839
11840 fprintf (file, "%d", i + 1);
9854d9ed 11841 return;
9878760c 11842
9854d9ed
RK
11843 case 'X':
11844 if (GET_CODE (x) == MEM
6fb5fa3c
DB
11845 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
11846 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
11847 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 11848 putc ('x', file);
9854d9ed 11849 return;
9878760c 11850
9854d9ed
RK
11851 case 'Y':
11852 /* Like 'L', for third word of TImode */
11853 if (GET_CODE (x) == REG)
fb5c67a7 11854 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11855 else if (GET_CODE (x) == MEM)
9878760c 11856 {
9854d9ed
RK
11857 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11858 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11859 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
11860 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11861 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11862 else
d7624dc0 11863 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11864 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11865 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11866 reg_names[SMALL_DATA_REG]);
9878760c
RK
11867 }
11868 return;
f676971a 11869
9878760c 11870 case 'z':
b4ac57ab
RS
11871 /* X is a SYMBOL_REF. Write out the name preceded by a
11872 period and without any trailing data in brackets. Used for function
4d30c363
MM
11873 names. If we are configured for System V (or the embedded ABI) on
11874 the PowerPC, do not emit the period, since those systems do not use
11875 TOCs and the like. */
37409796 11876 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11877
c4ad648e
AM
11878 /* Mark the decl as referenced so that cgraph will output the
11879 function. */
9bf6462a 11880 if (SYMBOL_REF_DECL (x))
c4ad648e 11881 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11882
85b776df 11883 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11884 if (TARGET_MACHO)
11885 {
11886 const char *name = XSTR (x, 0);
a031e781 11887#if TARGET_MACHO
3b48085e 11888 if (MACHOPIC_INDIRECT
11abc112
MM
11889 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11890 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11891#endif
11892 assemble_name (file, name);
11893 }
85b776df 11894 else if (!DOT_SYMBOLS)
9739c90c 11895 assemble_name (file, XSTR (x, 0));
85b776df
AM
11896 else
11897 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11898 return;
11899
9854d9ed
RK
11900 case 'Z':
11901 /* Like 'L', for last word of TImode. */
11902 if (GET_CODE (x) == REG)
fb5c67a7 11903 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11904 else if (GET_CODE (x) == MEM)
11905 {
11906 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11907 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11908 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
11909 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11910 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11911 else
d7624dc0 11912 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11913 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11914 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11915 reg_names[SMALL_DATA_REG]);
9854d9ed 11916 }
5c23c401 11917 return;
0ac081f6 11918
a3170dc6 11919 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11920 case 'y':
11921 {
11922 rtx tmp;
11923
37409796 11924 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11925
11926 tmp = XEXP (x, 0);
11927
90d3ff1c 11928 /* Ugly hack because %y is overloaded. */
8ef65e3d 11929 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11930 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11931 || GET_MODE (x) == TFmode
11932 || GET_MODE (x) == TImode))
a3170dc6
AH
11933 {
11934 /* Handle [reg]. */
11935 if (GET_CODE (tmp) == REG)
11936 {
11937 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11938 break;
11939 }
11940 /* Handle [reg+UIMM]. */
11941 else if (GET_CODE (tmp) == PLUS &&
11942 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11943 {
11944 int x;
11945
37409796 11946 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11947
11948 x = INTVAL (XEXP (tmp, 1));
11949 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11950 break;
11951 }
11952
11953 /* Fall through. Must be [reg+reg]. */
11954 }
850e8d3d
DN
11955 if (TARGET_ALTIVEC
11956 && GET_CODE (tmp) == AND
11957 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11958 && INTVAL (XEXP (tmp, 1)) == -16)
11959 tmp = XEXP (tmp, 0);
0ac081f6 11960 if (GET_CODE (tmp) == REG)
c62f2db5 11961 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11962 else
0ac081f6 11963 {
37409796 11964 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11965 && REG_P (XEXP (tmp, 0))
11966 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11967
0ac081f6
AH
11968 if (REGNO (XEXP (tmp, 0)) == 0)
11969 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11970 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11971 else
11972 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11973 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11974 }
0ac081f6
AH
11975 break;
11976 }
f676971a 11977
9878760c
RK
11978 case 0:
11979 if (GET_CODE (x) == REG)
11980 fprintf (file, "%s", reg_names[REGNO (x)]);
11981 else if (GET_CODE (x) == MEM)
11982 {
11983 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11984 know the width from the mode. */
11985 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
11986 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
11987 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 11988 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
11989 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
11990 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
11991 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11992 output_address (XEXP (XEXP (x, 0), 1));
9878760c 11993 else
a54d04b7 11994 output_address (XEXP (x, 0));
9878760c
RK
11995 }
11996 else
a54d04b7 11997 output_addr_const (file, x);
a85d226b 11998 return;
9878760c 11999
c4501e62
JJ
12000 case '&':
12001 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12002 return;
12003
9878760c
RK
12004 default:
12005 output_operand_lossage ("invalid %%xn code");
12006 }
12007}
12008\f
12009/* Print the address of an operand. */
12010
12011void
a2369ed3 12012print_operand_address (FILE *file, rtx x)
9878760c
RK
12013{
12014 if (GET_CODE (x) == REG)
4697a36c 12015 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12016 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12017 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12018 {
12019 output_addr_const (file, x);
ba5e43aa 12020 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12021 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12022 reg_names[SMALL_DATA_REG]);
37409796
NS
12023 else
12024 gcc_assert (!TARGET_TOC);
9878760c
RK
12025 }
12026 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12027 {
9024f4b8 12028 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12029 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12030 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12031 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12032 else
4697a36c
MM
12033 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12034 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12035 }
12036 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12037 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12038 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12039#if TARGET_ELF
12040 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12041 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12042 {
12043 output_addr_const (file, XEXP (x, 1));
12044 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12045 }
c859cda6
DJ
12046#endif
12047#if TARGET_MACHO
12048 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12049 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12050 {
12051 fprintf (file, "lo16(");
12052 output_addr_const (file, XEXP (x, 1));
12053 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12054 }
3cb999d8 12055#endif
4d588c14 12056 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12057 {
2bfcf297 12058 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12059 {
2bfcf297
DB
12060 rtx contains_minus = XEXP (x, 1);
12061 rtx minus, symref;
12062 const char *name;
f676971a 12063
9ebbca7d 12064 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12065 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12066 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12067 contains_minus = XEXP (contains_minus, 0);
12068
2bfcf297
DB
12069 minus = XEXP (contains_minus, 0);
12070 symref = XEXP (minus, 0);
12071 XEXP (contains_minus, 0) = symref;
12072 if (TARGET_ELF)
12073 {
12074 char *newname;
12075
12076 name = XSTR (symref, 0);
12077 newname = alloca (strlen (name) + sizeof ("@toc"));
12078 strcpy (newname, name);
12079 strcat (newname, "@toc");
12080 XSTR (symref, 0) = newname;
12081 }
12082 output_addr_const (file, XEXP (x, 1));
12083 if (TARGET_ELF)
12084 XSTR (symref, 0) = name;
9ebbca7d
GK
12085 XEXP (contains_minus, 0) = minus;
12086 }
12087 else
12088 output_addr_const (file, XEXP (x, 1));
12089
12090 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12091 }
9878760c 12092 else
37409796 12093 gcc_unreachable ();
9878760c
RK
12094}
12095\f
88cad84b 12096/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12097 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12098 is defined. It also needs to handle DI-mode objects on 64-bit
12099 targets. */
12100
12101static bool
a2369ed3 12102rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12103{
f4f4921e 12104#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12105 /* Special handling for SI values. */
84dcde01 12106 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12107 {
301d03af 12108 static int recurse = 0;
f676971a 12109
301d03af
RS
12110 /* For -mrelocatable, we mark all addresses that need to be fixed up
12111 in the .fixup section. */
12112 if (TARGET_RELOCATABLE
d6b5193b
RS
12113 && in_section != toc_section
12114 && in_section != text_section
4325ca90 12115 && !unlikely_text_section_p (in_section)
301d03af
RS
12116 && !recurse
12117 && GET_CODE (x) != CONST_INT
12118 && GET_CODE (x) != CONST_DOUBLE
12119 && CONSTANT_P (x))
12120 {
12121 char buf[256];
12122
12123 recurse = 1;
12124 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12125 fixuplabelno++;
12126 ASM_OUTPUT_LABEL (asm_out_file, buf);
12127 fprintf (asm_out_file, "\t.long\t(");
12128 output_addr_const (asm_out_file, x);
12129 fprintf (asm_out_file, ")@fixup\n");
12130 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12131 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12132 fprintf (asm_out_file, "\t.long\t");
12133 assemble_name (asm_out_file, buf);
12134 fprintf (asm_out_file, "\n\t.previous\n");
12135 recurse = 0;
12136 return true;
12137 }
12138 /* Remove initial .'s to turn a -mcall-aixdesc function
12139 address into the address of the descriptor, not the function
12140 itself. */
12141 else if (GET_CODE (x) == SYMBOL_REF
12142 && XSTR (x, 0)[0] == '.'
12143 && DEFAULT_ABI == ABI_AIX)
12144 {
12145 const char *name = XSTR (x, 0);
12146 while (*name == '.')
12147 name++;
12148
12149 fprintf (asm_out_file, "\t.long\t%s\n", name);
12150 return true;
12151 }
12152 }
f4f4921e 12153#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12154 return default_assemble_integer (x, size, aligned_p);
12155}
93638d7a
AM
12156
12157#ifdef HAVE_GAS_HIDDEN
12158/* Emit an assembler directive to set symbol visibility for DECL to
12159 VISIBILITY_TYPE. */
12160
5add3202 12161static void
a2369ed3 12162rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12163{
93638d7a
AM
12164 /* Functions need to have their entry point symbol visibility set as
12165 well as their descriptor symbol visibility. */
85b776df
AM
12166 if (DEFAULT_ABI == ABI_AIX
12167 && DOT_SYMBOLS
12168 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12169 {
25fdb4dc 12170 static const char * const visibility_types[] = {
c4ad648e 12171 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12172 };
12173
12174 const char *name, *type;
93638d7a
AM
12175
12176 name = ((* targetm.strip_name_encoding)
12177 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12178 type = visibility_types[vis];
93638d7a 12179
25fdb4dc
RH
12180 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12181 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12182 }
25fdb4dc
RH
12183 else
12184 default_assemble_visibility (decl, vis);
93638d7a
AM
12185}
12186#endif
301d03af 12187\f
39a10a29 12188enum rtx_code
a2369ed3 12189rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12190{
12191 /* Reversal of FP compares takes care -- an ordered compare
12192 becomes an unordered compare and vice versa. */
f676971a 12193 if (mode == CCFPmode
bc9ec0e0
GK
12194 && (!flag_finite_math_only
12195 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12196 || code == UNEQ || code == LTGT))
bab6226b 12197 return reverse_condition_maybe_unordered (code);
39a10a29 12198 else
bab6226b 12199 return reverse_condition (code);
39a10a29
GK
12200}
12201
39a10a29
GK
12202/* Generate a compare for CODE. Return a brand-new rtx that
12203 represents the result of the compare. */
a4f6c312 12204
39a10a29 12205static rtx
a2369ed3 12206rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12207{
12208 enum machine_mode comp_mode;
12209 rtx compare_result;
12210
12211 if (rs6000_compare_fp_p)
12212 comp_mode = CCFPmode;
12213 else if (code == GTU || code == LTU
c4ad648e 12214 || code == GEU || code == LEU)
39a10a29 12215 comp_mode = CCUNSmode;
60934f9c
NS
12216 else if ((code == EQ || code == NE)
12217 && GET_CODE (rs6000_compare_op0) == SUBREG
12218 && GET_CODE (rs6000_compare_op1) == SUBREG
12219 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12220 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12221 /* These are unsigned values, perhaps there will be a later
12222 ordering compare that can be shared with this one.
12223 Unfortunately we cannot detect the signedness of the operands
12224 for non-subregs. */
12225 comp_mode = CCUNSmode;
39a10a29
GK
12226 else
12227 comp_mode = CCmode;
12228
12229 /* First, the compare. */
12230 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12231
cef6b86c 12232 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12233 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12234 && rs6000_compare_fp_p)
a3170dc6 12235 {
64022b5d 12236 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12237 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12238
12239 if (op_mode == VOIDmode)
12240 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12241
cef6b86c
EB
12242 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12243 This explains the following mess. */
423c1189 12244
a3170dc6
AH
12245 switch (code)
12246 {
423c1189 12247 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12248 switch (op_mode)
12249 {
12250 case SFmode:
12251 cmp = flag_unsafe_math_optimizations
12252 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12253 rs6000_compare_op1)
12254 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12255 rs6000_compare_op1);
12256 break;
12257
12258 case DFmode:
12259 cmp = flag_unsafe_math_optimizations
12260 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12261 rs6000_compare_op1)
12262 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12263 rs6000_compare_op1);
12264 break;
12265
17caeff2
JM
12266 case TFmode:
12267 cmp = flag_unsafe_math_optimizations
12268 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12269 rs6000_compare_op1)
12270 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12271 rs6000_compare_op1);
12272 break;
12273
37409796
NS
12274 default:
12275 gcc_unreachable ();
12276 }
a3170dc6 12277 break;
bb8df8a6 12278
423c1189 12279 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12280 switch (op_mode)
12281 {
12282 case SFmode:
12283 cmp = flag_unsafe_math_optimizations
12284 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12285 rs6000_compare_op1)
12286 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12287 rs6000_compare_op1);
12288 break;
bb8df8a6 12289
37409796
NS
12290 case DFmode:
12291 cmp = flag_unsafe_math_optimizations
12292 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12293 rs6000_compare_op1)
12294 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12295 rs6000_compare_op1);
12296 break;
12297
17caeff2
JM
12298 case TFmode:
12299 cmp = flag_unsafe_math_optimizations
12300 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12301 rs6000_compare_op1)
12302 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12303 rs6000_compare_op1);
12304 break;
12305
37409796
NS
12306 default:
12307 gcc_unreachable ();
12308 }
a3170dc6 12309 break;
bb8df8a6 12310
423c1189 12311 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12312 switch (op_mode)
12313 {
12314 case SFmode:
12315 cmp = flag_unsafe_math_optimizations
12316 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12317 rs6000_compare_op1)
12318 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12319 rs6000_compare_op1);
12320 break;
bb8df8a6 12321
37409796
NS
12322 case DFmode:
12323 cmp = flag_unsafe_math_optimizations
12324 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12325 rs6000_compare_op1)
12326 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12327 rs6000_compare_op1);
12328 break;
12329
17caeff2
JM
12330 case TFmode:
12331 cmp = flag_unsafe_math_optimizations
12332 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12333 rs6000_compare_op1)
12334 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12335 rs6000_compare_op1);
12336 break;
12337
37409796
NS
12338 default:
12339 gcc_unreachable ();
12340 }
a3170dc6 12341 break;
4d4cbc0e 12342 default:
37409796 12343 gcc_unreachable ();
a3170dc6
AH
12344 }
12345
12346 /* Synthesize LE and GE from LT/GT || EQ. */
12347 if (code == LE || code == GE || code == LEU || code == GEU)
12348 {
a3170dc6
AH
12349 emit_insn (cmp);
12350
12351 switch (code)
12352 {
12353 case LE: code = LT; break;
12354 case GE: code = GT; break;
12355 case LEU: code = LT; break;
12356 case GEU: code = GT; break;
37409796 12357 default: gcc_unreachable ();
a3170dc6
AH
12358 }
12359
a3170dc6
AH
12360 compare_result2 = gen_reg_rtx (CCFPmode);
12361
12362 /* Do the EQ. */
37409796
NS
12363 switch (op_mode)
12364 {
12365 case SFmode:
12366 cmp = flag_unsafe_math_optimizations
12367 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12368 rs6000_compare_op1)
12369 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12370 rs6000_compare_op1);
12371 break;
12372
12373 case DFmode:
12374 cmp = flag_unsafe_math_optimizations
12375 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12376 rs6000_compare_op1)
12377 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12378 rs6000_compare_op1);
12379 break;
12380
17caeff2
JM
12381 case TFmode:
12382 cmp = flag_unsafe_math_optimizations
12383 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12384 rs6000_compare_op1)
12385 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12386 rs6000_compare_op1);
12387 break;
12388
37409796
NS
12389 default:
12390 gcc_unreachable ();
12391 }
a3170dc6
AH
12392 emit_insn (cmp);
12393
a3170dc6 12394 /* OR them together. */
64022b5d
AH
12395 or_result = gen_reg_rtx (CCFPmode);
12396 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12397 compare_result2);
a3170dc6
AH
12398 compare_result = or_result;
12399 code = EQ;
12400 }
12401 else
12402 {
a3170dc6 12403 if (code == NE || code == LTGT)
a3170dc6 12404 code = NE;
423c1189
AH
12405 else
12406 code = EQ;
a3170dc6
AH
12407 }
12408
12409 emit_insn (cmp);
12410 }
12411 else
de17c25f
DE
12412 {
12413 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12414 CLOBBERs to match cmptf_internal2 pattern. */
12415 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12416 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12417 && !TARGET_IEEEQUAD
de17c25f
DE
12418 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12419 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12420 gen_rtvec (9,
12421 gen_rtx_SET (VOIDmode,
12422 compare_result,
12423 gen_rtx_COMPARE (comp_mode,
12424 rs6000_compare_op0,
12425 rs6000_compare_op1)),
12426 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12427 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12428 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12429 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12430 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12431 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12432 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12433 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12434 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12435 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12436 {
12437 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12438 comp_mode = CCEQmode;
12439 compare_result = gen_reg_rtx (CCEQmode);
12440 if (TARGET_64BIT)
12441 emit_insn (gen_stack_protect_testdi (compare_result,
12442 rs6000_compare_op0, op1));
12443 else
12444 emit_insn (gen_stack_protect_testsi (compare_result,
12445 rs6000_compare_op0, op1));
12446 }
de17c25f
DE
12447 else
12448 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12449 gen_rtx_COMPARE (comp_mode,
12450 rs6000_compare_op0,
12451 rs6000_compare_op1)));
12452 }
f676971a 12453
ca5adc63 12454 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12455 under flag_finite_math_only we don't bother. */
39a10a29 12456 if (rs6000_compare_fp_p
e7108df9 12457 && !flag_finite_math_only
8ef65e3d 12458 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12459 && (code == LE || code == GE
12460 || code == UNEQ || code == LTGT
12461 || code == UNGT || code == UNLT))
12462 {
12463 enum rtx_code or1, or2;
12464 rtx or1_rtx, or2_rtx, compare2_rtx;
12465 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12466
39a10a29
GK
12467 switch (code)
12468 {
12469 case LE: or1 = LT; or2 = EQ; break;
12470 case GE: or1 = GT; or2 = EQ; break;
12471 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12472 case LTGT: or1 = LT; or2 = GT; break;
12473 case UNGT: or1 = UNORDERED; or2 = GT; break;
12474 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12475 default: gcc_unreachable ();
39a10a29
GK
12476 }
12477 validate_condition_mode (or1, comp_mode);
12478 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12479 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12480 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12481 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12482 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12483 const_true_rtx);
12484 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12485
12486 compare_result = or_result;
12487 code = EQ;
12488 }
12489
12490 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12491
1c563bed 12492 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12493}
12494
12495
12496/* Emit the RTL for an sCOND pattern. */
12497
12498void
a2369ed3 12499rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12500{
12501 rtx condition_rtx;
12502 enum machine_mode op_mode;
b7053a3f 12503 enum rtx_code cond_code;
39a10a29
GK
12504
12505 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12506 cond_code = GET_CODE (condition_rtx);
12507
8ef65e3d 12508 if (rs6000_compare_fp_p
423c1189
AH
12509 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12510 {
12511 rtx t;
12512
12513 PUT_MODE (condition_rtx, SImode);
12514 t = XEXP (condition_rtx, 0);
12515
37409796 12516 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12517
12518 if (cond_code == NE)
64022b5d 12519 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12520
64022b5d 12521 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12522 return;
12523 }
12524
b7053a3f
GK
12525 if (cond_code == NE
12526 || cond_code == GE || cond_code == LE
12527 || cond_code == GEU || cond_code == LEU
12528 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12529 {
12530 rtx not_result = gen_reg_rtx (CCEQmode);
12531 rtx not_op, rev_cond_rtx;
12532 enum machine_mode cc_mode;
f676971a 12533
b7053a3f
GK
12534 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12535
1c563bed 12536 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12537 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12538 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12539 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12540 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12541 }
39a10a29
GK
12542
12543 op_mode = GET_MODE (rs6000_compare_op0);
12544 if (op_mode == VOIDmode)
12545 op_mode = GET_MODE (rs6000_compare_op1);
12546
12547 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12548 {
12549 PUT_MODE (condition_rtx, DImode);
12550 convert_move (result, condition_rtx, 0);
12551 }
12552 else
12553 {
12554 PUT_MODE (condition_rtx, SImode);
12555 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12556 }
12557}
12558
39a10a29
GK
12559/* Emit a branch of kind CODE to location LOC. */
12560
12561void
a2369ed3 12562rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12563{
12564 rtx condition_rtx, loc_ref;
12565
12566 condition_rtx = rs6000_generate_compare (code);
12567 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12568 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12569 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12570 loc_ref, pc_rtx)));
12571}
12572
12a4e8c5
GK
12573/* Return the string to output a conditional branch to LABEL, which is
12574 the operand number of the label, or -1 if the branch is really a
f676971a 12575 conditional return.
12a4e8c5
GK
12576
12577 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12578 condition code register and its mode specifies what kind of
12579 comparison we made.
12580
a0ab749a 12581 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12582
12583 INSN is the insn. */
12584
12585char *
a2369ed3 12586output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12587{
12588 static char string[64];
12589 enum rtx_code code = GET_CODE (op);
12590 rtx cc_reg = XEXP (op, 0);
12591 enum machine_mode mode = GET_MODE (cc_reg);
12592 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12593 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12594 int really_reversed = reversed ^ need_longbranch;
12595 char *s = string;
12596 const char *ccode;
12597 const char *pred;
12598 rtx note;
12599
39a10a29
GK
12600 validate_condition_mode (code, mode);
12601
12602 /* Work out which way this really branches. We could use
12603 reverse_condition_maybe_unordered here always but this
12604 makes the resulting assembler clearer. */
12a4e8c5 12605 if (really_reversed)
de40e1df
DJ
12606 {
12607 /* Reversal of FP compares takes care -- an ordered compare
12608 becomes an unordered compare and vice versa. */
12609 if (mode == CCFPmode)
12610 code = reverse_condition_maybe_unordered (code);
12611 else
12612 code = reverse_condition (code);
12613 }
12a4e8c5 12614
8ef65e3d 12615 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12616 {
12617 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12618 to the GT bit. */
37409796
NS
12619 switch (code)
12620 {
12621 case EQ:
12622 /* Opposite of GT. */
12623 code = GT;
12624 break;
12625
12626 case NE:
12627 code = UNLE;
12628 break;
12629
12630 default:
12631 gcc_unreachable ();
12632 }
a3170dc6
AH
12633 }
12634
39a10a29 12635 switch (code)
12a4e8c5
GK
12636 {
12637 /* Not all of these are actually distinct opcodes, but
12638 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12639 case NE: case LTGT:
12640 ccode = "ne"; break;
12641 case EQ: case UNEQ:
12642 ccode = "eq"; break;
f676971a 12643 case GE: case GEU:
50a0b056 12644 ccode = "ge"; break;
f676971a 12645 case GT: case GTU: case UNGT:
50a0b056 12646 ccode = "gt"; break;
f676971a 12647 case LE: case LEU:
50a0b056 12648 ccode = "le"; break;
f676971a 12649 case LT: case LTU: case UNLT:
50a0b056 12650 ccode = "lt"; break;
12a4e8c5
GK
12651 case UNORDERED: ccode = "un"; break;
12652 case ORDERED: ccode = "nu"; break;
12653 case UNGE: ccode = "nl"; break;
12654 case UNLE: ccode = "ng"; break;
12655 default:
37409796 12656 gcc_unreachable ();
12a4e8c5 12657 }
f676971a
EC
12658
12659 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12660 The old mnemonics don't have a way to specify this information. */
f4857b9b 12661 pred = "";
12a4e8c5
GK
12662 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12663 if (note != NULL_RTX)
12664 {
12665 /* PROB is the difference from 50%. */
12666 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12667
12668 /* Only hint for highly probable/improbable branches on newer
12669 cpus as static prediction overrides processor dynamic
12670 prediction. For older cpus we may as well always hint, but
12671 assume not taken for branches that are very close to 50% as a
12672 mispredicted taken branch is more expensive than a
f676971a 12673 mispredicted not-taken branch. */
ec507f2d 12674 if (rs6000_always_hint
2c9e13f3
JH
12675 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12676 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12677 {
12678 if (abs (prob) > REG_BR_PROB_BASE / 20
12679 && ((prob > 0) ^ need_longbranch))
c4ad648e 12680 pred = "+";
f4857b9b
AM
12681 else
12682 pred = "-";
12683 }
12a4e8c5 12684 }
12a4e8c5
GK
12685
12686 if (label == NULL)
94a54f47 12687 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12688 else
94a54f47 12689 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12690
37c67319 12691 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12692 Assume they'd only be the first character.... */
37c67319
GK
12693 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12694 *s++ = '%';
94a54f47 12695 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12696
12697 if (label != NULL)
12698 {
12699 /* If the branch distance was too far, we may have to use an
12700 unconditional branch to go the distance. */
12701 if (need_longbranch)
44518ddd 12702 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12703 else
12704 s += sprintf (s, ",%s", label);
12705 }
12706
12707 return string;
12708}
50a0b056 12709
64022b5d 12710/* Return the string to flip the GT bit on a CR. */
423c1189 12711char *
64022b5d 12712output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12713{
12714 static char string[64];
12715 int a, b;
12716
37409796
NS
12717 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12718 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12719
64022b5d
AH
12720 /* GT bit. */
12721 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12722 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12723
12724 sprintf (string, "crnot %d,%d", a, b);
12725 return string;
12726}
12727
21213b4c
DP
12728/* Return insn index for the vector compare instruction for given CODE,
12729 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12730 not available. */
12731
12732static int
94ff898d 12733get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12734 enum machine_mode dest_mode,
12735 enum machine_mode op_mode)
12736{
12737 if (!TARGET_ALTIVEC)
12738 return INSN_NOT_AVAILABLE;
12739
12740 switch (code)
12741 {
12742 case EQ:
12743 if (dest_mode == V16QImode && op_mode == V16QImode)
12744 return UNSPEC_VCMPEQUB;
12745 if (dest_mode == V8HImode && op_mode == V8HImode)
12746 return UNSPEC_VCMPEQUH;
12747 if (dest_mode == V4SImode && op_mode == V4SImode)
12748 return UNSPEC_VCMPEQUW;
12749 if (dest_mode == V4SImode && op_mode == V4SFmode)
12750 return UNSPEC_VCMPEQFP;
12751 break;
12752 case GE:
12753 if (dest_mode == V4SImode && op_mode == V4SFmode)
12754 return UNSPEC_VCMPGEFP;
12755 case GT:
12756 if (dest_mode == V16QImode && op_mode == V16QImode)
12757 return UNSPEC_VCMPGTSB;
12758 if (dest_mode == V8HImode && op_mode == V8HImode)
12759 return UNSPEC_VCMPGTSH;
12760 if (dest_mode == V4SImode && op_mode == V4SImode)
12761 return UNSPEC_VCMPGTSW;
12762 if (dest_mode == V4SImode && op_mode == V4SFmode)
12763 return UNSPEC_VCMPGTFP;
12764 break;
12765 case GTU:
12766 if (dest_mode == V16QImode && op_mode == V16QImode)
12767 return UNSPEC_VCMPGTUB;
12768 if (dest_mode == V8HImode && op_mode == V8HImode)
12769 return UNSPEC_VCMPGTUH;
12770 if (dest_mode == V4SImode && op_mode == V4SImode)
12771 return UNSPEC_VCMPGTUW;
12772 break;
12773 default:
12774 break;
12775 }
12776 return INSN_NOT_AVAILABLE;
12777}
12778
12779/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12780 DMODE is expected destination mode. This is a recursive function. */
12781
12782static rtx
12783rs6000_emit_vector_compare (enum rtx_code rcode,
12784 rtx op0, rtx op1,
12785 enum machine_mode dmode)
12786{
12787 int vec_cmp_insn;
12788 rtx mask;
12789 enum machine_mode dest_mode;
12790 enum machine_mode op_mode = GET_MODE (op1);
12791
37409796
NS
12792 gcc_assert (TARGET_ALTIVEC);
12793 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12794
12795 /* Floating point vector compare instructions uses destination V4SImode.
12796 Move destination to appropriate mode later. */
12797 if (dmode == V4SFmode)
12798 dest_mode = V4SImode;
12799 else
12800 dest_mode = dmode;
12801
12802 mask = gen_reg_rtx (dest_mode);
12803 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12804
12805 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12806 {
12807 bool swap_operands = false;
12808 bool try_again = false;
12809 switch (rcode)
12810 {
12811 case LT:
12812 rcode = GT;
12813 swap_operands = true;
12814 try_again = true;
12815 break;
12816 case LTU:
12817 rcode = GTU;
12818 swap_operands = true;
12819 try_again = true;
12820 break;
12821 case NE:
370df7db
JC
12822 case UNLE:
12823 case UNLT:
12824 case UNGE:
12825 case UNGT:
12826 /* Invert condition and try again.
12827 e.g., A != B becomes ~(A==B). */
21213b4c 12828 {
370df7db 12829 enum rtx_code rev_code;
21213b4c 12830 enum insn_code nor_code;
d1123cde 12831 rtx eq_rtx;
370df7db
JC
12832
12833 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12834 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12835 dest_mode);
94ff898d 12836
166cdb08 12837 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 12838 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12839 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12840
12841 if (dmode != dest_mode)
12842 {
12843 rtx temp = gen_reg_rtx (dest_mode);
12844 convert_move (temp, mask, 0);
12845 return temp;
12846 }
12847 return mask;
12848 }
12849 break;
12850 case GE:
12851 case GEU:
12852 case LE:
12853 case LEU:
12854 /* Try GT/GTU/LT/LTU OR EQ */
12855 {
12856 rtx c_rtx, eq_rtx;
12857 enum insn_code ior_code;
12858 enum rtx_code new_code;
12859
37409796
NS
12860 switch (rcode)
12861 {
12862 case GE:
12863 new_code = GT;
12864 break;
12865
12866 case GEU:
12867 new_code = GTU;
12868 break;
12869
12870 case LE:
12871 new_code = LT;
12872 break;
12873
12874 case LEU:
12875 new_code = LTU;
12876 break;
12877
12878 default:
12879 gcc_unreachable ();
12880 }
21213b4c
DP
12881
12882 c_rtx = rs6000_emit_vector_compare (new_code,
12883 op0, op1, dest_mode);
12884 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12885 dest_mode);
12886
166cdb08 12887 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 12888 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12889 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12890 if (dmode != dest_mode)
12891 {
12892 rtx temp = gen_reg_rtx (dest_mode);
12893 convert_move (temp, mask, 0);
12894 return temp;
12895 }
12896 return mask;
12897 }
12898 break;
12899 default:
37409796 12900 gcc_unreachable ();
21213b4c
DP
12901 }
12902
12903 if (try_again)
12904 {
12905 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12906 /* You only get two chances. */
12907 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12908 }
12909
12910 if (swap_operands)
12911 {
12912 rtx tmp;
12913 tmp = op0;
12914 op0 = op1;
12915 op1 = tmp;
12916 }
12917 }
12918
915167f5
GK
12919 emit_insn (gen_rtx_SET (VOIDmode, mask,
12920 gen_rtx_UNSPEC (dest_mode,
12921 gen_rtvec (2, op0, op1),
12922 vec_cmp_insn)));
21213b4c
DP
12923 if (dmode != dest_mode)
12924 {
12925 rtx temp = gen_reg_rtx (dest_mode);
12926 convert_move (temp, mask, 0);
12927 return temp;
12928 }
12929 return mask;
12930}
12931
12932/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12933 valid insn doesn exist for given mode. */
12934
12935static int
12936get_vsel_insn (enum machine_mode mode)
12937{
12938 switch (mode)
12939 {
12940 case V4SImode:
12941 return UNSPEC_VSEL4SI;
12942 break;
12943 case V4SFmode:
12944 return UNSPEC_VSEL4SF;
12945 break;
12946 case V8HImode:
12947 return UNSPEC_VSEL8HI;
12948 break;
12949 case V16QImode:
12950 return UNSPEC_VSEL16QI;
12951 break;
12952 default:
12953 return INSN_NOT_AVAILABLE;
12954 break;
12955 }
12956 return INSN_NOT_AVAILABLE;
12957}
12958
12959/* Emit vector select insn where DEST is destination using
12960 operands OP1, OP2 and MASK. */
12961
12962static void
12963rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12964{
12965 rtx t, temp;
12966 enum machine_mode dest_mode = GET_MODE (dest);
12967 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12968
12969 temp = gen_reg_rtx (dest_mode);
94ff898d 12970
bb8df8a6 12971 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12972 select op2. */
915167f5
GK
12973 t = gen_rtx_SET (VOIDmode, temp,
12974 gen_rtx_UNSPEC (dest_mode,
12975 gen_rtvec (3, op2, op1, mask),
12976 vsel_insn_index));
21213b4c
DP
12977 emit_insn (t);
12978 emit_move_insn (dest, temp);
12979 return;
12980}
12981
94ff898d 12982/* Emit vector conditional expression.
21213b4c
DP
12983 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12984 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12985
12986int
12987rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
12988 rtx cond, rtx cc_op0, rtx cc_op1)
12989{
12990 enum machine_mode dest_mode = GET_MODE (dest);
12991 enum rtx_code rcode = GET_CODE (cond);
12992 rtx mask;
12993
12994 if (!TARGET_ALTIVEC)
12995 return 0;
12996
12997 /* Get the vector mask for the given relational operations. */
12998 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
12999
13000 rs6000_emit_vector_select (dest, op1, op2, mask);
13001
13002 return 1;
13003}
13004
50a0b056
GK
13005/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13006 operands of the last comparison is nonzero/true, FALSE_COND if it
13007 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13008
50a0b056 13009int
a2369ed3 13010rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13011{
13012 enum rtx_code code = GET_CODE (op);
13013 rtx op0 = rs6000_compare_op0;
13014 rtx op1 = rs6000_compare_op1;
13015 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13016 enum machine_mode compare_mode = GET_MODE (op0);
13017 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13018 rtx temp;
add2402e 13019 bool is_against_zero;
50a0b056 13020
a3c9585f 13021 /* These modes should always match. */
a3170dc6
AH
13022 if (GET_MODE (op1) != compare_mode
13023 /* In the isel case however, we can use a compare immediate, so
13024 op1 may be a small constant. */
13025 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13026 return 0;
178c3eff 13027 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13028 return 0;
178c3eff 13029 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13030 return 0;
13031
50a0b056 13032 /* First, work out if the hardware can do this at all, or
a3c9585f 13033 if it's too slow.... */
50a0b056 13034 if (! rs6000_compare_fp_p)
a3170dc6
AH
13035 {
13036 if (TARGET_ISEL)
13037 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13038 return 0;
13039 }
8ef65e3d 13040 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13041 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13042 return 0;
50a0b056 13043
add2402e 13044 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13045
add2402e
GK
13046 /* A floating-point subtract might overflow, underflow, or produce
13047 an inexact result, thus changing the floating-point flags, so it
13048 can't be generated if we care about that. It's safe if one side
13049 of the construct is zero, since then no subtract will be
13050 generated. */
ebb109ad 13051 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13052 && flag_trapping_math && ! is_against_zero)
13053 return 0;
13054
50a0b056
GK
13055 /* Eliminate half of the comparisons by switching operands, this
13056 makes the remaining code simpler. */
13057 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13058 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13059 {
13060 code = reverse_condition_maybe_unordered (code);
13061 temp = true_cond;
13062 true_cond = false_cond;
13063 false_cond = temp;
13064 }
13065
13066 /* UNEQ and LTGT take four instructions for a comparison with zero,
13067 it'll probably be faster to use a branch here too. */
bc9ec0e0 13068 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13069 return 0;
f676971a 13070
50a0b056
GK
13071 if (GET_CODE (op1) == CONST_DOUBLE)
13072 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13073
b6d08ca1 13074 /* We're going to try to implement comparisons by performing
50a0b056
GK
13075 a subtract, then comparing against zero. Unfortunately,
13076 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13077 know that the operand is finite and the comparison
50a0b056 13078 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13079 if (HONOR_INFINITIES (compare_mode)
50a0b056 13080 && code != GT && code != UNGE
045572c7 13081 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13082 /* Constructs of the form (a OP b ? a : b) are safe. */
13083 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13084 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13085 && ! rtx_equal_p (op1, true_cond))))
13086 return 0;
add2402e 13087
50a0b056
GK
13088 /* At this point we know we can use fsel. */
13089
13090 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13091 if (! is_against_zero)
13092 {
13093 temp = gen_reg_rtx (compare_mode);
13094 emit_insn (gen_rtx_SET (VOIDmode, temp,
13095 gen_rtx_MINUS (compare_mode, op0, op1)));
13096 op0 = temp;
13097 op1 = CONST0_RTX (compare_mode);
13098 }
50a0b056
GK
13099
13100 /* If we don't care about NaNs we can reduce some of the comparisons
13101 down to faster ones. */
bc9ec0e0 13102 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13103 switch (code)
13104 {
13105 case GT:
13106 code = LE;
13107 temp = true_cond;
13108 true_cond = false_cond;
13109 false_cond = temp;
13110 break;
13111 case UNGE:
13112 code = GE;
13113 break;
13114 case UNEQ:
13115 code = EQ;
13116 break;
13117 default:
13118 break;
13119 }
13120
13121 /* Now, reduce everything down to a GE. */
13122 switch (code)
13123 {
13124 case GE:
13125 break;
13126
13127 case LE:
3148ad6d
DJ
13128 temp = gen_reg_rtx (compare_mode);
13129 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13130 op0 = temp;
13131 break;
13132
13133 case ORDERED:
3148ad6d
DJ
13134 temp = gen_reg_rtx (compare_mode);
13135 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13136 op0 = temp;
13137 break;
13138
13139 case EQ:
3148ad6d 13140 temp = gen_reg_rtx (compare_mode);
f676971a 13141 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13142 gen_rtx_NEG (compare_mode,
13143 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13144 op0 = temp;
13145 break;
13146
13147 case UNGE:
bc9ec0e0 13148 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13149 temp = gen_reg_rtx (result_mode);
50a0b056 13150 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13151 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13152 gen_rtx_GE (VOIDmode,
13153 op0, op1),
13154 true_cond, false_cond)));
bc9ec0e0
GK
13155 false_cond = true_cond;
13156 true_cond = temp;
50a0b056 13157
3148ad6d
DJ
13158 temp = gen_reg_rtx (compare_mode);
13159 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13160 op0 = temp;
13161 break;
13162
13163 case GT:
bc9ec0e0 13164 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13165 temp = gen_reg_rtx (result_mode);
50a0b056 13166 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13167 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13168 gen_rtx_GE (VOIDmode,
13169 op0, op1),
13170 true_cond, false_cond)));
bc9ec0e0
GK
13171 true_cond = false_cond;
13172 false_cond = temp;
50a0b056 13173
3148ad6d
DJ
13174 temp = gen_reg_rtx (compare_mode);
13175 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13176 op0 = temp;
13177 break;
13178
13179 default:
37409796 13180 gcc_unreachable ();
50a0b056
GK
13181 }
13182
13183 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13184 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13185 gen_rtx_GE (VOIDmode,
13186 op0, op1),
13187 true_cond, false_cond)));
13188 return 1;
13189}
13190
a3170dc6
AH
13191/* Same as above, but for ints (isel). */
13192
13193static int
a2369ed3 13194rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13195{
13196 rtx condition_rtx, cr;
13197
13198 /* All isel implementations thus far are 32-bits. */
13199 if (GET_MODE (rs6000_compare_op0) != SImode)
13200 return 0;
13201
13202 /* We still have to do the compare, because isel doesn't do a
13203 compare, it just looks at the CRx bits set by a previous compare
13204 instruction. */
13205 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13206 cr = XEXP (condition_rtx, 0);
13207
13208 if (GET_MODE (cr) == CCmode)
13209 emit_insn (gen_isel_signed (dest, condition_rtx,
13210 true_cond, false_cond, cr));
13211 else
13212 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13213 true_cond, false_cond, cr));
13214
13215 return 1;
13216}
13217
13218const char *
a2369ed3 13219output_isel (rtx *operands)
a3170dc6
AH
13220{
13221 enum rtx_code code;
13222
13223 code = GET_CODE (operands[1]);
13224 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13225 {
13226 PUT_CODE (operands[1], reverse_condition (code));
13227 return "isel %0,%3,%2,%j1";
13228 }
13229 else
13230 return "isel %0,%2,%3,%j1";
13231}
13232
50a0b056 13233void
a2369ed3 13234rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13235{
13236 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13237 enum rtx_code c;
50a0b056 13238 rtx target;
5dc8d536
AH
13239
13240 if (code == SMAX || code == SMIN)
13241 c = GE;
13242 else
13243 c = GEU;
13244
50a0b056 13245 if (code == SMAX || code == UMAX)
f676971a 13246 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13247 op0, op1, mode, 0);
13248 else
f676971a 13249 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13250 op1, op0, mode, 0);
37409796 13251 gcc_assert (target);
50a0b056
GK
13252 if (target != dest)
13253 emit_move_insn (dest, target);
13254}
46c07df8 13255
915167f5
GK
13256/* Emit instructions to perform a load-reserved/store-conditional operation.
13257 The operation performed is an atomic
13258 (set M (CODE:MODE M OP))
13259 If not NULL, BEFORE is atomically set to M before the operation, and
13260 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13261 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13262 Either OP or M may be wrapped in a NOT operation. */
13263
13264void
13265rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13266 rtx m, rtx op, rtx before_param, rtx after_param,
13267 bool sync_p)
13268{
13269 enum machine_mode used_mode;
13270 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13271 rtx used_m;
13272 rtvec vec;
13273 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13274 rtx shift = NULL_RTX;
bb8df8a6 13275
915167f5
GK
13276 if (sync_p)
13277 emit_insn (gen_memory_barrier ());
bb8df8a6 13278
915167f5
GK
13279 if (GET_CODE (m) == NOT)
13280 used_m = XEXP (m, 0);
13281 else
13282 used_m = m;
13283
13284 /* If this is smaller than SImode, we'll have to use SImode with
13285 adjustments. */
13286 if (mode == QImode || mode == HImode)
13287 {
13288 rtx newop, oldop;
13289
13290 if (MEM_ALIGN (used_m) >= 32)
13291 {
13292 int ishift = 0;
13293 if (BYTES_BIG_ENDIAN)
13294 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13295
915167f5 13296 shift = GEN_INT (ishift);
c75c6d11 13297 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13298 }
13299 else
13300 {
13301 rtx addrSI, aligned_addr;
a9c9d3fa 13302 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13303
c75c6d11
JJ
13304 addrSI = gen_lowpart_common (SImode,
13305 force_reg (Pmode, XEXP (used_m, 0)));
13306 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13307 shift = gen_reg_rtx (SImode);
13308
13309 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13310 GEN_INT (shift_mask)));
13311 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13312
13313 aligned_addr = expand_binop (Pmode, and_optab,
13314 XEXP (used_m, 0),
13315 GEN_INT (-4), NULL_RTX,
13316 1, OPTAB_LIB_WIDEN);
13317 used_m = change_address (used_m, SImode, aligned_addr);
13318 set_mem_align (used_m, 32);
915167f5 13319 }
c75c6d11
JJ
13320 /* It's safe to keep the old alias set of USED_M, because
13321 the operation is atomic and only affects the original
13322 USED_M. */
13323 if (GET_CODE (m) == NOT)
13324 m = gen_rtx_NOT (SImode, used_m);
13325 else
13326 m = used_m;
915167f5
GK
13327
13328 if (GET_CODE (op) == NOT)
13329 {
13330 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13331 oldop = gen_rtx_NOT (SImode, oldop);
13332 }
13333 else
13334 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13335
915167f5
GK
13336 switch (code)
13337 {
13338 case IOR:
13339 case XOR:
13340 newop = expand_binop (SImode, and_optab,
13341 oldop, GEN_INT (imask), NULL_RTX,
13342 1, OPTAB_LIB_WIDEN);
13343 emit_insn (gen_ashlsi3 (newop, newop, shift));
13344 break;
13345
13346 case AND:
13347 newop = expand_binop (SImode, ior_optab,
13348 oldop, GEN_INT (~imask), NULL_RTX,
13349 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13350 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13351 break;
13352
13353 case PLUS:
9f0076e5 13354 case MINUS:
915167f5
GK
13355 {
13356 rtx mask;
bb8df8a6 13357
915167f5
GK
13358 newop = expand_binop (SImode, and_optab,
13359 oldop, GEN_INT (imask), NULL_RTX,
13360 1, OPTAB_LIB_WIDEN);
13361 emit_insn (gen_ashlsi3 (newop, newop, shift));
13362
13363 mask = gen_reg_rtx (SImode);
13364 emit_move_insn (mask, GEN_INT (imask));
13365 emit_insn (gen_ashlsi3 (mask, mask, shift));
13366
9f0076e5
DE
13367 if (code == PLUS)
13368 newop = gen_rtx_PLUS (SImode, m, newop);
13369 else
13370 newop = gen_rtx_MINUS (SImode, m, newop);
13371 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13372 newop = gen_rtx_IOR (SImode, newop,
13373 gen_rtx_AND (SImode,
13374 gen_rtx_NOT (SImode, mask),
13375 m));
13376 break;
13377 }
13378
13379 default:
13380 gcc_unreachable ();
13381 }
13382
a9c9d3fa
GK
13383 if (GET_CODE (m) == NOT)
13384 {
13385 rtx mask, xorm;
13386
13387 mask = gen_reg_rtx (SImode);
13388 emit_move_insn (mask, GEN_INT (imask));
13389 emit_insn (gen_ashlsi3 (mask, mask, shift));
13390
13391 xorm = gen_rtx_XOR (SImode, used_m, mask);
13392 /* Depending on the value of 'op', the XOR or the operation might
13393 be able to be simplified away. */
13394 newop = simplify_gen_binary (code, SImode, xorm, newop);
13395 }
915167f5
GK
13396 op = newop;
13397 used_mode = SImode;
13398 before = gen_reg_rtx (used_mode);
13399 after = gen_reg_rtx (used_mode);
13400 }
13401 else
13402 {
13403 used_mode = mode;
13404 before = before_param;
13405 after = after_param;
13406
13407 if (before == NULL_RTX)
13408 before = gen_reg_rtx (used_mode);
13409 if (after == NULL_RTX)
13410 after = gen_reg_rtx (used_mode);
13411 }
bb8df8a6 13412
9f0076e5
DE
13413 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13414 && used_mode != mode)
915167f5
GK
13415 the_op = op; /* Computed above. */
13416 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13417 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13418 else
13419 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13420
13421 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13422 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13423 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13424 gen_rtx_UNSPEC (used_mode,
13425 gen_rtvec (1, the_op),
13426 UNSPEC_SYNC_OP));
915167f5
GK
13427 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13428
9f0076e5 13429 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13430 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13431 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13432 else
13433 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13434 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13435
13436 /* Shift and mask the return values properly. */
13437 if (used_mode != mode && before_param)
13438 {
13439 emit_insn (gen_lshrsi3 (before, before, shift));
13440 convert_move (before_param, before, 1);
13441 }
13442
13443 if (used_mode != mode && after_param)
13444 {
13445 emit_insn (gen_lshrsi3 (after, after, shift));
13446 convert_move (after_param, after, 1);
13447 }
13448
13449 /* The previous sequence will end with a branch that's dependent on
13450 the conditional store, so placing an isync will ensure that no
13451 other instructions (especially, no load or store instructions)
13452 can start before the atomic operation completes. */
13453 if (sync_p)
13454 emit_insn (gen_isync ());
13455}
13456
b52110d4
DE
13457/* A subroutine of the atomic operation splitters. Jump to LABEL if
13458 COND is true. Mark the jump as unlikely to be taken. */
13459
13460static void
13461emit_unlikely_jump (rtx cond, rtx label)
13462{
13463 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13464 rtx x;
13465
13466 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13467 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13468 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13469}
13470
13471/* A subroutine of the atomic operation splitters. Emit a load-locked
13472 instruction in MODE. */
13473
13474static void
13475emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13476{
13477 rtx (*fn) (rtx, rtx) = NULL;
13478 if (mode == SImode)
13479 fn = gen_load_locked_si;
13480 else if (mode == DImode)
13481 fn = gen_load_locked_di;
13482 emit_insn (fn (reg, mem));
13483}
13484
13485/* A subroutine of the atomic operation splitters. Emit a store-conditional
13486 instruction in MODE. */
13487
13488static void
13489emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13490{
13491 rtx (*fn) (rtx, rtx, rtx) = NULL;
13492 if (mode == SImode)
13493 fn = gen_store_conditional_si;
13494 else if (mode == DImode)
13495 fn = gen_store_conditional_di;
13496
9f0076e5 13497 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13498 if (PPC405_ERRATUM77)
13499 emit_insn (gen_memory_barrier ());
13500
13501 emit_insn (fn (res, mem, val));
13502}
13503
ea2c620c 13504/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13505 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13506 operand of the binary operator. BEFORE and AFTER are optional locations to
13507 return the value of MEM either before of after the operation. SCRATCH is
13508 a scratch register. */
13509
13510void
13511rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13512 rtx before, rtx after, rtx scratch)
13513{
13514 enum machine_mode mode = GET_MODE (mem);
13515 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13516
13517 emit_insn (gen_memory_barrier ());
13518
13519 label = gen_label_rtx ();
13520 emit_label (label);
13521 label = gen_rtx_LABEL_REF (VOIDmode, label);
13522
13523 if (before == NULL_RTX)
13524 before = scratch;
13525 emit_load_locked (mode, before, mem);
13526
13527 if (code == NOT)
13528 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13529 else if (code == AND)
13530 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13531 else
13532 x = gen_rtx_fmt_ee (code, mode, before, val);
13533
13534 if (after != NULL_RTX)
13535 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13536 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13537
13538 emit_store_conditional (mode, cond, mem, scratch);
13539
13540 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13541 emit_unlikely_jump (x, label);
13542
13543 emit_insn (gen_isync ());
13544}
13545
b52110d4
DE
13546/* Expand an atomic compare and swap operation. MEM is the memory on which
13547 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13548 value to be stored. SCRATCH is a scratch GPR. */
13549
13550void
13551rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13552 rtx scratch)
13553{
13554 enum machine_mode mode = GET_MODE (mem);
13555 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13556
13557 emit_insn (gen_memory_barrier ());
13558
13559 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13560 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13561 emit_label (XEXP (label1, 0));
13562
13563 emit_load_locked (mode, retval, mem);
13564
13565 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13566 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13567
13568 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13569 emit_unlikely_jump (x, label2);
13570
13571 emit_move_insn (scratch, newval);
13572 emit_store_conditional (mode, cond, mem, scratch);
13573
13574 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13575 emit_unlikely_jump (x, label1);
13576
13577 emit_insn (gen_isync ());
13578 emit_label (XEXP (label2, 0));
13579}
13580
13581/* Expand an atomic test and set operation. MEM is the memory on which
13582 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13583
13584void
13585rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13586{
13587 enum machine_mode mode = GET_MODE (mem);
13588 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13589
13590 emit_insn (gen_memory_barrier ());
13591
13592 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13593 emit_label (XEXP (label, 0));
13594
13595 emit_load_locked (mode, retval, mem);
13596 emit_move_insn (scratch, val);
13597 emit_store_conditional (mode, cond, mem, scratch);
13598
13599 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13600 emit_unlikely_jump (x, label);
13601
13602 emit_insn (gen_isync ());
13603}
13604
9fc75b97
DE
13605void
13606rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13607{
13608 enum machine_mode mode = GET_MODE (mem);
13609 rtx addrSI, align, wdst, shift, mask;
13610 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13611 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13612
13613 /* Shift amount for subword relative to aligned word. */
13614 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13615 shift = gen_reg_rtx (SImode);
13616 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13617 GEN_INT (shift_mask)));
13618 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13619
13620 /* Shift and mask old value into position within word. */
13621 oldval = convert_modes (SImode, mode, oldval, 1);
13622 oldval = expand_binop (SImode, and_optab,
13623 oldval, GEN_INT (imask), NULL_RTX,
13624 1, OPTAB_LIB_WIDEN);
13625 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13626
13627 /* Shift and mask new value into position within word. */
13628 newval = convert_modes (SImode, mode, newval, 1);
13629 newval = expand_binop (SImode, and_optab,
13630 newval, GEN_INT (imask), NULL_RTX,
13631 1, OPTAB_LIB_WIDEN);
13632 emit_insn (gen_ashlsi3 (newval, newval, shift));
13633
13634 /* Mask for insertion. */
13635 mask = gen_reg_rtx (SImode);
13636 emit_move_insn (mask, GEN_INT (imask));
13637 emit_insn (gen_ashlsi3 (mask, mask, shift));
13638
13639 /* Address of aligned word containing subword. */
13640 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13641 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13642 mem = change_address (mem, SImode, align);
13643 set_mem_align (mem, 32);
13644 MEM_VOLATILE_P (mem) = 1;
13645
13646 wdst = gen_reg_rtx (SImode);
13647 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13648 oldval, newval, mem));
13649
13650 emit_move_insn (dst, gen_lowpart (mode, wdst));
13651}
13652
13653void
13654rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13655 rtx oldval, rtx newval, rtx mem,
13656 rtx scratch)
13657{
13658 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13659
13660 emit_insn (gen_memory_barrier ());
13661 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13662 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13663 emit_label (XEXP (label1, 0));
13664
13665 emit_load_locked (SImode, scratch, mem);
13666
13667 /* Mask subword within loaded value for comparison with oldval.
13668 Use UNSPEC_AND to avoid clobber.*/
13669 emit_insn (gen_rtx_SET (SImode, dest,
13670 gen_rtx_UNSPEC (SImode,
13671 gen_rtvec (2, scratch, mask),
13672 UNSPEC_AND)));
13673
13674 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13675 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13676
13677 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13678 emit_unlikely_jump (x, label2);
13679
13680 /* Clear subword within loaded value for insertion of new value. */
13681 emit_insn (gen_rtx_SET (SImode, scratch,
13682 gen_rtx_AND (SImode,
13683 gen_rtx_NOT (SImode, mask), scratch)));
13684 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13685 emit_store_conditional (SImode, cond, mem, scratch);
13686
13687 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13688 emit_unlikely_jump (x, label1);
13689
13690 emit_insn (gen_isync ());
13691 emit_label (XEXP (label2, 0));
13692}
13693
13694
b52110d4 13695 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13696 multi-register moves. It will emit at most one instruction for
13697 each register that is accessed; that is, it won't emit li/lis pairs
13698 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13699 register. */
46c07df8 13700
46c07df8 13701void
a9baceb1 13702rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13703{
a9baceb1
GK
13704 /* The register number of the first register being moved. */
13705 int reg;
13706 /* The mode that is to be moved. */
13707 enum machine_mode mode;
13708 /* The mode that the move is being done in, and its size. */
13709 enum machine_mode reg_mode;
13710 int reg_mode_size;
13711 /* The number of registers that will be moved. */
13712 int nregs;
13713
13714 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13715 mode = GET_MODE (dst);
c8b622ff 13716 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13717 if (FP_REGNO_P (reg))
7393f7f8 13718 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13719 else if (ALTIVEC_REGNO_P (reg))
13720 reg_mode = V16QImode;
8521c414
JM
13721 else if (TARGET_E500_DOUBLE && mode == TFmode)
13722 reg_mode = DFmode;
a9baceb1
GK
13723 else
13724 reg_mode = word_mode;
13725 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13726
37409796 13727 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13728
a9baceb1
GK
13729 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13730 {
13731 /* Move register range backwards, if we might have destructive
13732 overlap. */
13733 int i;
13734 for (i = nregs - 1; i >= 0; i--)
f676971a 13735 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13736 simplify_gen_subreg (reg_mode, dst, mode,
13737 i * reg_mode_size),
13738 simplify_gen_subreg (reg_mode, src, mode,
13739 i * reg_mode_size)));
13740 }
46c07df8
HP
13741 else
13742 {
a9baceb1
GK
13743 int i;
13744 int j = -1;
13745 bool used_update = false;
46c07df8 13746
c1e55850 13747 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13748 {
13749 rtx breg;
3a1f863f 13750
a9baceb1
GK
13751 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13752 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13753 {
13754 rtx delta_rtx;
a9baceb1 13755 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13756 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13757 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13758 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13759 emit_insn (TARGET_32BIT
13760 ? gen_addsi3 (breg, breg, delta_rtx)
13761 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13762 src = replace_equiv_address (src, breg);
3a1f863f 13763 }
d04b6e6e 13764 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13765 {
13e2e16e 13766 rtx basereg;
c1e55850
GK
13767 basereg = gen_rtx_REG (Pmode, reg);
13768 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13769 src = replace_equiv_address (src, basereg);
c1e55850 13770 }
3a1f863f 13771
0423421f
AM
13772 breg = XEXP (src, 0);
13773 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13774 breg = XEXP (breg, 0);
13775
13776 /* If the base register we are using to address memory is
13777 also a destination reg, then change that register last. */
13778 if (REG_P (breg)
13779 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13780 && REGNO (breg) < REGNO (dst) + nregs)
13781 j = REGNO (breg) - REGNO (dst);
c4ad648e 13782 }
46c07df8 13783
a9baceb1 13784 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13785 {
13786 rtx breg;
13787
a9baceb1
GK
13788 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13789 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13790 {
13791 rtx delta_rtx;
a9baceb1 13792 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13793 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13794 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13795 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13796
13797 /* We have to update the breg before doing the store.
13798 Use store with update, if available. */
13799
13800 if (TARGET_UPDATE)
13801 {
a9baceb1 13802 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13803 emit_insn (TARGET_32BIT
13804 ? (TARGET_POWERPC64
13805 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13806 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13807 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13808 used_update = true;
3a1f863f
DE
13809 }
13810 else
a9baceb1
GK
13811 emit_insn (TARGET_32BIT
13812 ? gen_addsi3 (breg, breg, delta_rtx)
13813 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13814 dst = replace_equiv_address (dst, breg);
3a1f863f 13815 }
37409796 13816 else
d04b6e6e 13817 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13818 }
13819
46c07df8 13820 for (i = 0; i < nregs; i++)
f676971a 13821 {
3a1f863f
DE
13822 /* Calculate index to next subword. */
13823 ++j;
f676971a 13824 if (j == nregs)
3a1f863f 13825 j = 0;
46c07df8 13826
112cdef5 13827 /* If compiler already emitted move of first word by
a9baceb1 13828 store with update, no need to do anything. */
3a1f863f 13829 if (j == 0 && used_update)
a9baceb1 13830 continue;
f676971a 13831
a9baceb1
GK
13832 emit_insn (gen_rtx_SET (VOIDmode,
13833 simplify_gen_subreg (reg_mode, dst, mode,
13834 j * reg_mode_size),
13835 simplify_gen_subreg (reg_mode, src, mode,
13836 j * reg_mode_size)));
3a1f863f 13837 }
46c07df8
HP
13838 }
13839}
13840
12a4e8c5 13841\f
a4f6c312
SS
13842/* This page contains routines that are used to determine what the
13843 function prologue and epilogue code will do and write them out. */
9878760c 13844
a4f6c312
SS
13845/* Return the first fixed-point register that is required to be
13846 saved. 32 if none. */
9878760c
RK
13847
13848int
863d938c 13849first_reg_to_save (void)
9878760c
RK
13850{
13851 int first_reg;
13852
13853 /* Find lowest numbered live register. */
13854 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 13855 if (df_regs_ever_live_p (first_reg)
a38d360d 13856 && (! call_used_regs[first_reg]
1db02437 13857 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13858 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13859 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13860 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13861 break;
13862
ee890fe2 13863#if TARGET_MACHO
93638d7a
AM
13864 if (flag_pic
13865 && current_function_uses_pic_offset_table
13866 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13867 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13868#endif
13869
9878760c
RK
13870 return first_reg;
13871}
13872
13873/* Similar, for FP regs. */
13874
13875int
863d938c 13876first_fp_reg_to_save (void)
9878760c
RK
13877{
13878 int first_reg;
13879
13880 /* Find lowest numbered live register. */
13881 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 13882 if (df_regs_ever_live_p (first_reg))
9878760c
RK
13883 break;
13884
13885 return first_reg;
13886}
00b960c7
AH
13887
13888/* Similar, for AltiVec regs. */
13889
13890static int
863d938c 13891first_altivec_reg_to_save (void)
00b960c7
AH
13892{
13893 int i;
13894
13895 /* Stack frame remains as is unless we are in AltiVec ABI. */
13896 if (! TARGET_ALTIVEC_ABI)
13897 return LAST_ALTIVEC_REGNO + 1;
13898
22fa69da 13899 /* On Darwin, the unwind routines are compiled without
982afe02 13900 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13901 altivec registers when necessary. */
13902 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13903 && ! TARGET_ALTIVEC)
13904 return FIRST_ALTIVEC_REGNO + 20;
13905
00b960c7
AH
13906 /* Find lowest numbered live register. */
13907 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13908 if (df_regs_ever_live_p (i))
00b960c7
AH
13909 break;
13910
13911 return i;
13912}
13913
13914/* Return a 32-bit mask of the AltiVec registers we need to set in
13915 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13916 the 32-bit word is 0. */
13917
13918static unsigned int
863d938c 13919compute_vrsave_mask (void)
00b960c7
AH
13920{
13921 unsigned int i, mask = 0;
13922
22fa69da 13923 /* On Darwin, the unwind routines are compiled without
982afe02 13924 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13925 call-saved altivec registers when necessary. */
13926 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13927 && ! TARGET_ALTIVEC)
13928 mask |= 0xFFF;
13929
00b960c7
AH
13930 /* First, find out if we use _any_ altivec registers. */
13931 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13932 if (df_regs_ever_live_p (i))
00b960c7
AH
13933 mask |= ALTIVEC_REG_BIT (i);
13934
13935 if (mask == 0)
13936 return mask;
13937
00b960c7
AH
13938 /* Next, remove the argument registers from the set. These must
13939 be in the VRSAVE mask set by the caller, so we don't need to add
13940 them in again. More importantly, the mask we compute here is
13941 used to generate CLOBBERs in the set_vrsave insn, and we do not
13942 wish the argument registers to die. */
a6cf80f2 13943 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13944 mask &= ~ALTIVEC_REG_BIT (i);
13945
13946 /* Similarly, remove the return value from the set. */
13947 {
13948 bool yes = false;
13949 diddle_return_value (is_altivec_return_reg, &yes);
13950 if (yes)
13951 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13952 }
13953
13954 return mask;
13955}
13956
d62294f5 13957/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13958 size of prologues/epilogues by calling our own save/restore-the-world
13959 routines. */
d62294f5
FJ
13960
13961static void
f57fe068
AM
13962compute_save_world_info (rs6000_stack_t *info_ptr)
13963{
13964 info_ptr->world_save_p = 1;
13965 info_ptr->world_save_p
13966 = (WORLD_SAVE_P (info_ptr)
13967 && DEFAULT_ABI == ABI_DARWIN
13968 && ! (current_function_calls_setjmp && flag_exceptions)
13969 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13970 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13971 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13972 && info_ptr->cr_save_p);
f676971a 13973
d62294f5
FJ
13974 /* This will not work in conjunction with sibcalls. Make sure there
13975 are none. (This check is expensive, but seldom executed.) */
f57fe068 13976 if (WORLD_SAVE_P (info_ptr))
f676971a 13977 {
d62294f5
FJ
13978 rtx insn;
13979 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
13980 if ( GET_CODE (insn) == CALL_INSN
13981 && SIBLING_CALL_P (insn))
13982 {
13983 info_ptr->world_save_p = 0;
13984 break;
13985 }
d62294f5 13986 }
f676971a 13987
f57fe068 13988 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
13989 {
13990 /* Even if we're not touching VRsave, make sure there's room on the
13991 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 13992 will attempt to save it. */
d62294f5
FJ
13993 info_ptr->vrsave_size = 4;
13994
13995 /* "Save" the VRsave register too if we're saving the world. */
13996 if (info_ptr->vrsave_mask == 0)
c4ad648e 13997 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
13998
13999 /* Because the Darwin register save/restore routines only handle
c4ad648e 14000 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14001 check. */
37409796
NS
14002 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14003 && (info_ptr->first_altivec_reg_save
14004 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14005 }
f676971a 14006 return;
d62294f5
FJ
14007}
14008
14009
00b960c7 14010static void
a2369ed3 14011is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14012{
14013 bool *yes = (bool *) xyes;
14014 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14015 *yes = true;
14016}
14017
4697a36c
MM
14018\f
14019/* Calculate the stack information for the current function. This is
14020 complicated by having two separate calling sequences, the AIX calling
14021 sequence and the V.4 calling sequence.
14022
592696dd 14023 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14024 32-bit 64-bit
4697a36c 14025 SP----> +---------------------------------------+
a260abc9 14026 | back chain to caller | 0 0
4697a36c 14027 +---------------------------------------+
a260abc9 14028 | saved CR | 4 8 (8-11)
4697a36c 14029 +---------------------------------------+
a260abc9 14030 | saved LR | 8 16
4697a36c 14031 +---------------------------------------+
a260abc9 14032 | reserved for compilers | 12 24
4697a36c 14033 +---------------------------------------+
a260abc9 14034 | reserved for binders | 16 32
4697a36c 14035 +---------------------------------------+
a260abc9 14036 | saved TOC pointer | 20 40
4697a36c 14037 +---------------------------------------+
a260abc9 14038 | Parameter save area (P) | 24 48
4697a36c 14039 +---------------------------------------+
a260abc9 14040 | Alloca space (A) | 24+P etc.
802a0058 14041 +---------------------------------------+
a7df97e6 14042 | Local variable space (L) | 24+P+A
4697a36c 14043 +---------------------------------------+
a7df97e6 14044 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14045 +---------------------------------------+
00b960c7
AH
14046 | Save area for AltiVec registers (W) | 24+P+A+L+X
14047 +---------------------------------------+
14048 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14049 +---------------------------------------+
14050 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14051 +---------------------------------------+
00b960c7
AH
14052 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14053 +---------------------------------------+
14054 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14055 +---------------------------------------+
14056 old SP->| back chain to caller's caller |
14057 +---------------------------------------+
14058
5376a30c
KR
14059 The required alignment for AIX configurations is two words (i.e., 8
14060 or 16 bytes).
14061
14062
4697a36c
MM
14063 V.4 stack frames look like:
14064
14065 SP----> +---------------------------------------+
14066 | back chain to caller | 0
14067 +---------------------------------------+
5eb387b8 14068 | caller's saved LR | 4
4697a36c
MM
14069 +---------------------------------------+
14070 | Parameter save area (P) | 8
14071 +---------------------------------------+
a7df97e6 14072 | Alloca space (A) | 8+P
f676971a 14073 +---------------------------------------+
a7df97e6 14074 | Varargs save area (V) | 8+P+A
f676971a 14075 +---------------------------------------+
a7df97e6 14076 | Local variable space (L) | 8+P+A+V
f676971a 14077 +---------------------------------------+
a7df97e6 14078 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14079 +---------------------------------------+
00b960c7
AH
14080 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14081 +---------------------------------------+
14082 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14083 +---------------------------------------+
14084 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14085 +---------------------------------------+
c4ad648e
AM
14086 | SPE: area for 64-bit GP registers |
14087 +---------------------------------------+
14088 | SPE alignment padding |
14089 +---------------------------------------+
00b960c7 14090 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14091 +---------------------------------------+
00b960c7 14092 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14093 +---------------------------------------+
00b960c7 14094 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14095 +---------------------------------------+
14096 old SP->| back chain to caller's caller |
14097 +---------------------------------------+
b6c9286a 14098
5376a30c
KR
14099 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14100 given. (But note below and in sysv4.h that we require only 8 and
14101 may round up the size of our stack frame anyways. The historical
14102 reason is early versions of powerpc-linux which didn't properly
14103 align the stack at program startup. A happy side-effect is that
14104 -mno-eabi libraries can be used with -meabi programs.)
14105
50d440bc 14106 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14107 the stack alignment requirements may differ. If -mno-eabi is not
14108 given, the required stack alignment is 8 bytes; if -mno-eabi is
14109 given, the required alignment is 16 bytes. (But see V.4 comment
14110 above.) */
4697a36c 14111
61b2fbe7
MM
14112#ifndef ABI_STACK_BOUNDARY
14113#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14114#endif
14115
d1d0c603 14116static rs6000_stack_t *
863d938c 14117rs6000_stack_info (void)
4697a36c 14118{
022123e6 14119 static rs6000_stack_t info;
4697a36c 14120 rs6000_stack_t *info_ptr = &info;
327e5343 14121 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14122 int ehrd_size;
64045029 14123 int save_align;
8070c91a 14124 int first_gp;
44688022 14125 HOST_WIDE_INT non_fixed_size;
4697a36c 14126
022123e6 14127 memset (&info, 0, sizeof (info));
4697a36c 14128
c19de7aa
AH
14129 if (TARGET_SPE)
14130 {
14131 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14132 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14133 cfun->machine->insn_chain_scanned_p
14134 = spe_func_has_64bit_regs_p () + 1;
14135 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14136 }
14137
a4f6c312 14138 /* Select which calling sequence. */
178274da 14139 info_ptr->abi = DEFAULT_ABI;
9878760c 14140
a4f6c312 14141 /* Calculate which registers need to be saved & save area size. */
4697a36c 14142 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14143 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14144 even if it currently looks like we won't. Reload may need it to
14145 get at a constant; if so, it will have already created a constant
14146 pool entry for it. */
2bfcf297 14147 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14148 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14149 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14150 && current_function_uses_const_pool
1db02437 14151 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14152 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14153 else
8070c91a
DJ
14154 first_gp = info_ptr->first_gp_reg_save;
14155
14156 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14157
a3170dc6
AH
14158 /* For the SPE, we have an additional upper 32-bits on each GPR.
14159 Ideally we should save the entire 64-bits only when the upper
14160 half is used in SIMD instructions. Since we only record
14161 registers live (not the size they are used in), this proves
14162 difficult because we'd have to traverse the instruction chain at
14163 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14164 so we opt to save the GPRs in 64-bits always if but one register
14165 gets used in 64-bits. Otherwise, all the registers in the frame
14166 get saved in 32-bits.
a3170dc6 14167
c19de7aa 14168 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14169 traditional GP save area will be empty. */
c19de7aa 14170 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14171 info_ptr->gp_size = 0;
14172
4697a36c
MM
14173 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14174 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14175
00b960c7
AH
14176 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14177 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14178 - info_ptr->first_altivec_reg_save);
14179
592696dd 14180 /* Does this function call anything? */
71f123ca
FS
14181 info_ptr->calls_p = (! current_function_is_leaf
14182 || cfun->machine->ra_needs_full_frame);
b6c9286a 14183
a4f6c312 14184 /* Determine if we need to save the link register. */
022123e6
AM
14185 if ((DEFAULT_ABI == ABI_AIX
14186 && current_function_profile
14187 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14188#ifdef TARGET_RELOCATABLE
14189 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14190#endif
14191 || (info_ptr->first_fp_reg_save != 64
14192 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 14193 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 14194 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14195 || info_ptr->calls_p
14196 || rs6000_ra_ever_killed ())
4697a36c
MM
14197 {
14198 info_ptr->lr_save_p = 1;
1de43f85 14199 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14200 }
14201
9ebbca7d 14202 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14203 if (df_regs_ever_live_p (CR2_REGNO)
14204 || df_regs_ever_live_p (CR3_REGNO)
14205 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14206 {
14207 info_ptr->cr_save_p = 1;
178274da 14208 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14209 info_ptr->cr_size = reg_size;
14210 }
14211
83720594
RH
14212 /* If the current function calls __builtin_eh_return, then we need
14213 to allocate stack space for registers that will hold data for
14214 the exception handler. */
14215 if (current_function_calls_eh_return)
14216 {
14217 unsigned int i;
14218 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14219 continue;
a3170dc6
AH
14220
14221 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14222 ehrd_size = i * (TARGET_SPE_ABI
14223 && info_ptr->spe_64bit_regs_used != 0
14224 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14225 }
14226 else
14227 ehrd_size = 0;
14228
592696dd 14229 /* Determine various sizes. */
4697a36c
MM
14230 info_ptr->reg_size = reg_size;
14231 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14232 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14233 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14234 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14235 if (FRAME_GROWS_DOWNWARD)
14236 info_ptr->vars_size
5b667039
JJ
14237 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14238 + info_ptr->parm_size,
7d5175e1 14239 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14240 - (info_ptr->fixed_size + info_ptr->vars_size
14241 + info_ptr->parm_size);
00b960c7 14242
c19de7aa 14243 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14244 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14245 else
14246 info_ptr->spe_gp_size = 0;
14247
4d774ff8
HP
14248 if (TARGET_ALTIVEC_ABI)
14249 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14250 else
4d774ff8
HP
14251 info_ptr->vrsave_mask = 0;
14252
14253 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14254 info_ptr->vrsave_size = 4;
14255 else
14256 info_ptr->vrsave_size = 0;
b6c9286a 14257
d62294f5
FJ
14258 compute_save_world_info (info_ptr);
14259
592696dd 14260 /* Calculate the offsets. */
178274da 14261 switch (DEFAULT_ABI)
4697a36c 14262 {
b6c9286a 14263 case ABI_NONE:
24d304eb 14264 default:
37409796 14265 gcc_unreachable ();
b6c9286a
MM
14266
14267 case ABI_AIX:
ee890fe2 14268 case ABI_DARWIN:
b6c9286a
MM
14269 info_ptr->fp_save_offset = - info_ptr->fp_size;
14270 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14271
14272 if (TARGET_ALTIVEC_ABI)
14273 {
14274 info_ptr->vrsave_save_offset
14275 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14276
982afe02 14277 /* Align stack so vector save area is on a quadword boundary.
9278121c 14278 The padding goes above the vectors. */
00b960c7
AH
14279 if (info_ptr->altivec_size != 0)
14280 info_ptr->altivec_padding_size
9278121c 14281 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14282 else
14283 info_ptr->altivec_padding_size = 0;
14284
14285 info_ptr->altivec_save_offset
14286 = info_ptr->vrsave_save_offset
14287 - info_ptr->altivec_padding_size
14288 - info_ptr->altivec_size;
9278121c
GK
14289 gcc_assert (info_ptr->altivec_size == 0
14290 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14291
14292 /* Adjust for AltiVec case. */
14293 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14294 }
14295 else
14296 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14297 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14298 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14299 break;
14300
14301 case ABI_V4:
b6c9286a
MM
14302 info_ptr->fp_save_offset = - info_ptr->fp_size;
14303 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14304 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14305
c19de7aa 14306 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14307 {
14308 /* Align stack so SPE GPR save area is aligned on a
14309 double-word boundary. */
14310 if (info_ptr->spe_gp_size != 0)
14311 info_ptr->spe_padding_size
14312 = 8 - (-info_ptr->cr_save_offset % 8);
14313 else
14314 info_ptr->spe_padding_size = 0;
14315
14316 info_ptr->spe_gp_save_offset
14317 = info_ptr->cr_save_offset
14318 - info_ptr->spe_padding_size
14319 - info_ptr->spe_gp_size;
14320
14321 /* Adjust for SPE case. */
022123e6 14322 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14323 }
a3170dc6 14324 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14325 {
14326 info_ptr->vrsave_save_offset
14327 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14328
14329 /* Align stack so vector save area is on a quadword boundary. */
14330 if (info_ptr->altivec_size != 0)
14331 info_ptr->altivec_padding_size
14332 = 16 - (-info_ptr->vrsave_save_offset % 16);
14333 else
14334 info_ptr->altivec_padding_size = 0;
14335
14336 info_ptr->altivec_save_offset
14337 = info_ptr->vrsave_save_offset
14338 - info_ptr->altivec_padding_size
14339 - info_ptr->altivec_size;
14340
14341 /* Adjust for AltiVec case. */
022123e6 14342 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14343 }
14344 else
022123e6
AM
14345 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14346 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14347 info_ptr->lr_save_offset = reg_size;
14348 break;
4697a36c
MM
14349 }
14350
64045029 14351 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14352 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14353 + info_ptr->gp_size
14354 + info_ptr->altivec_size
14355 + info_ptr->altivec_padding_size
a3170dc6
AH
14356 + info_ptr->spe_gp_size
14357 + info_ptr->spe_padding_size
00b960c7
AH
14358 + ehrd_size
14359 + info_ptr->cr_size
022123e6 14360 + info_ptr->vrsave_size,
64045029 14361 save_align);
00b960c7 14362
44688022 14363 non_fixed_size = (info_ptr->vars_size
ff381587 14364 + info_ptr->parm_size
5b667039 14365 + info_ptr->save_size);
ff381587 14366
44688022
AM
14367 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14368 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14369
14370 /* Determine if we need to allocate any stack frame:
14371
a4f6c312
SS
14372 For AIX we need to push the stack if a frame pointer is needed
14373 (because the stack might be dynamically adjusted), if we are
14374 debugging, if we make calls, or if the sum of fp_save, gp_save,
14375 and local variables are more than the space needed to save all
14376 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14377 + 18*8 = 288 (GPR13 reserved).
ff381587 14378
a4f6c312
SS
14379 For V.4 we don't have the stack cushion that AIX uses, but assume
14380 that the debugger can handle stackless frames. */
ff381587
MM
14381
14382 if (info_ptr->calls_p)
14383 info_ptr->push_p = 1;
14384
178274da 14385 else if (DEFAULT_ABI == ABI_V4)
44688022 14386 info_ptr->push_p = non_fixed_size != 0;
ff381587 14387
178274da
AM
14388 else if (frame_pointer_needed)
14389 info_ptr->push_p = 1;
14390
14391 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14392 info_ptr->push_p = 1;
14393
ff381587 14394 else
44688022 14395 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14396
a4f6c312 14397 /* Zero offsets if we're not saving those registers. */
8dda1a21 14398 if (info_ptr->fp_size == 0)
4697a36c
MM
14399 info_ptr->fp_save_offset = 0;
14400
8dda1a21 14401 if (info_ptr->gp_size == 0)
4697a36c
MM
14402 info_ptr->gp_save_offset = 0;
14403
00b960c7
AH
14404 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14405 info_ptr->altivec_save_offset = 0;
14406
14407 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14408 info_ptr->vrsave_save_offset = 0;
14409
c19de7aa
AH
14410 if (! TARGET_SPE_ABI
14411 || info_ptr->spe_64bit_regs_used == 0
14412 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14413 info_ptr->spe_gp_save_offset = 0;
14414
c81fc13e 14415 if (! info_ptr->lr_save_p)
4697a36c
MM
14416 info_ptr->lr_save_offset = 0;
14417
c81fc13e 14418 if (! info_ptr->cr_save_p)
4697a36c
MM
14419 info_ptr->cr_save_offset = 0;
14420
14421 return info_ptr;
14422}
14423
c19de7aa
AH
14424/* Return true if the current function uses any GPRs in 64-bit SIMD
14425 mode. */
14426
14427static bool
863d938c 14428spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14429{
14430 rtx insns, insn;
14431
14432 /* Functions that save and restore all the call-saved registers will
14433 need to save/restore the registers in 64-bits. */
14434 if (current_function_calls_eh_return
14435 || current_function_calls_setjmp
14436 || current_function_has_nonlocal_goto)
14437 return true;
14438
14439 insns = get_insns ();
14440
14441 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14442 {
14443 if (INSN_P (insn))
14444 {
14445 rtx i;
14446
b5a5beb9
AH
14447 /* FIXME: This should be implemented with attributes...
14448
14449 (set_attr "spe64" "true")....then,
14450 if (get_spe64(insn)) return true;
14451
14452 It's the only reliable way to do the stuff below. */
14453
c19de7aa 14454 i = PATTERN (insn);
f82f556d
AH
14455 if (GET_CODE (i) == SET)
14456 {
14457 enum machine_mode mode = GET_MODE (SET_SRC (i));
14458
14459 if (SPE_VECTOR_MODE (mode))
14460 return true;
17caeff2 14461 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
14462 return true;
14463 }
c19de7aa
AH
14464 }
14465 }
14466
14467 return false;
14468}
14469
d1d0c603 14470static void
a2369ed3 14471debug_stack_info (rs6000_stack_t *info)
9878760c 14472{
d330fd93 14473 const char *abi_string;
24d304eb 14474
c81fc13e 14475 if (! info)
4697a36c
MM
14476 info = rs6000_stack_info ();
14477
14478 fprintf (stderr, "\nStack information for function %s:\n",
14479 ((current_function_decl && DECL_NAME (current_function_decl))
14480 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14481 : "<unknown>"));
14482
24d304eb
RK
14483 switch (info->abi)
14484 {
b6c9286a
MM
14485 default: abi_string = "Unknown"; break;
14486 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14487 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14488 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14489 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14490 }
14491
14492 fprintf (stderr, "\tABI = %5s\n", abi_string);
14493
00b960c7
AH
14494 if (TARGET_ALTIVEC_ABI)
14495 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14496
a3170dc6
AH
14497 if (TARGET_SPE_ABI)
14498 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14499
4697a36c
MM
14500 if (info->first_gp_reg_save != 32)
14501 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14502
14503 if (info->first_fp_reg_save != 64)
14504 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14505
00b960c7
AH
14506 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14507 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14508 info->first_altivec_reg_save);
14509
4697a36c
MM
14510 if (info->lr_save_p)
14511 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14512
4697a36c
MM
14513 if (info->cr_save_p)
14514 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14515
00b960c7
AH
14516 if (info->vrsave_mask)
14517 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14518
4697a36c
MM
14519 if (info->push_p)
14520 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14521
14522 if (info->calls_p)
14523 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14524
4697a36c
MM
14525 if (info->gp_save_offset)
14526 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14527
14528 if (info->fp_save_offset)
14529 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14530
00b960c7
AH
14531 if (info->altivec_save_offset)
14532 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14533 info->altivec_save_offset);
14534
a3170dc6
AH
14535 if (info->spe_gp_save_offset)
14536 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14537 info->spe_gp_save_offset);
14538
00b960c7
AH
14539 if (info->vrsave_save_offset)
14540 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14541 info->vrsave_save_offset);
14542
4697a36c
MM
14543 if (info->lr_save_offset)
14544 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14545
14546 if (info->cr_save_offset)
14547 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14548
14549 if (info->varargs_save_offset)
14550 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14551
14552 if (info->total_size)
d1d0c603
JJ
14553 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14554 info->total_size);
4697a36c 14555
4697a36c 14556 if (info->vars_size)
d1d0c603
JJ
14557 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14558 info->vars_size);
4697a36c
MM
14559
14560 if (info->parm_size)
14561 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14562
14563 if (info->fixed_size)
14564 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14565
14566 if (info->gp_size)
14567 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14568
a3170dc6
AH
14569 if (info->spe_gp_size)
14570 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14571
4697a36c
MM
14572 if (info->fp_size)
14573 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14574
00b960c7
AH
14575 if (info->altivec_size)
14576 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14577
14578 if (info->vrsave_size)
14579 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14580
14581 if (info->altivec_padding_size)
14582 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14583 info->altivec_padding_size);
14584
a3170dc6
AH
14585 if (info->spe_padding_size)
14586 fprintf (stderr, "\tspe_padding_size = %5d\n",
14587 info->spe_padding_size);
14588
4697a36c
MM
14589 if (info->cr_size)
14590 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14591
14592 if (info->save_size)
14593 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14594
14595 if (info->reg_size != 4)
14596 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14597
14598 fprintf (stderr, "\n");
9878760c 14599}
71f123ca
FS
14600
14601rtx
a2369ed3 14602rs6000_return_addr (int count, rtx frame)
71f123ca 14603{
a4f6c312
SS
14604 /* Currently we don't optimize very well between prolog and body
14605 code and for PIC code the code can be actually quite bad, so
14606 don't try to be too clever here. */
f1384257 14607 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14608 {
14609 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14610
14611 return
14612 gen_rtx_MEM
14613 (Pmode,
14614 memory_address
14615 (Pmode,
14616 plus_constant (copy_to_reg
14617 (gen_rtx_MEM (Pmode,
14618 memory_address (Pmode, frame))),
14619 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14620 }
14621
8c29550d 14622 cfun->machine->ra_need_lr = 1;
1de43f85 14623 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14624}
14625
5e1bf043
DJ
14626/* Say whether a function is a candidate for sibcall handling or not.
14627 We do not allow indirect calls to be optimized into sibling calls.
14628 Also, we can't do it if there are any vector parameters; there's
14629 nowhere to put the VRsave code so it works; note that functions with
14630 vector parameters are required to have a prototype, so the argument
14631 type info must be available here. (The tail recursion case can work
14632 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14633static bool
a2369ed3 14634rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14635{
14636 tree type;
4977bab6 14637 if (decl)
5e1bf043
DJ
14638 {
14639 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14640 {
4977bab6 14641 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14642 type; type = TREE_CHAIN (type))
14643 {
c15b529f 14644 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14645 return false;
5e1bf043 14646 }
c4ad648e 14647 }
5e1bf043 14648 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14649 || ((*targetm.binds_local_p) (decl)
14650 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14651 {
4977bab6 14652 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14653
14654 if (!lookup_attribute ("longcall", attr_list)
14655 || lookup_attribute ("shortcall", attr_list))
4977bab6 14656 return true;
2bcc50d0 14657 }
5e1bf043 14658 }
4977bab6 14659 return false;
5e1bf043
DJ
14660}
14661
e7e64a25
AS
14662/* NULL if INSN insn is valid within a low-overhead loop.
14663 Otherwise return why doloop cannot be applied.
9419649c
DE
14664 PowerPC uses the COUNT register for branch on table instructions. */
14665
e7e64a25 14666static const char *
3101faab 14667rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14668{
14669 if (CALL_P (insn))
e7e64a25 14670 return "Function call in the loop.";
9419649c
DE
14671
14672 if (JUMP_P (insn)
14673 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14674 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14675 return "Computed branch in the loop.";
9419649c 14676
e7e64a25 14677 return NULL;
9419649c
DE
14678}
14679
71f123ca 14680static int
863d938c 14681rs6000_ra_ever_killed (void)
71f123ca
FS
14682{
14683 rtx top;
5e1bf043
DJ
14684 rtx reg;
14685 rtx insn;
71f123ca 14686
dd292d0a 14687 if (current_function_is_thunk)
71f123ca 14688 return 0;
eb0424da 14689
36f7e964
AH
14690 /* regs_ever_live has LR marked as used if any sibcalls are present,
14691 but this should not force saving and restoring in the
14692 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14693 clobbers LR, so that is inappropriate. */
36f7e964 14694
5e1bf043
DJ
14695 /* Also, the prologue can generate a store into LR that
14696 doesn't really count, like this:
36f7e964 14697
5e1bf043
DJ
14698 move LR->R0
14699 bcl to set PIC register
14700 move LR->R31
14701 move R0->LR
36f7e964
AH
14702
14703 When we're called from the epilogue, we need to avoid counting
14704 this as a store. */
f676971a 14705
71f123ca
FS
14706 push_topmost_sequence ();
14707 top = get_insns ();
14708 pop_topmost_sequence ();
1de43f85 14709 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14710
5e1bf043
DJ
14711 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14712 {
14713 if (INSN_P (insn))
14714 {
022123e6
AM
14715 if (CALL_P (insn))
14716 {
14717 if (!SIBLING_CALL_P (insn))
14718 return 1;
14719 }
1de43f85 14720 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14721 return 1;
36f7e964
AH
14722 else if (set_of (reg, insn) != NULL_RTX
14723 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14724 return 1;
14725 }
14726 }
14727 return 0;
71f123ca 14728}
4697a36c 14729\f
9ebbca7d 14730/* Emit instructions needed to load the TOC register.
c7ca610e 14731 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14732 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14733
14734void
a2369ed3 14735rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14736{
6fb5fa3c 14737 rtx dest;
1db02437 14738 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14739
7f970b70 14740 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14741 {
7f970b70 14742 char buf[30];
e65a3857 14743 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14744
14745 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14746 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14747 if (flag_pic == 2)
14748 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14749 else
14750 got = rs6000_got_sym ();
14751 tmp1 = tmp2 = dest;
14752 if (!fromprolog)
14753 {
14754 tmp1 = gen_reg_rtx (Pmode);
14755 tmp2 = gen_reg_rtx (Pmode);
14756 }
6fb5fa3c
DB
14757 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14758 emit_move_insn (tmp1,
1de43f85 14759 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14760 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14761 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14762 }
14763 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14764 {
6fb5fa3c 14765 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14766 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14767 }
14768 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14769 {
14770 char buf[30];
20b71b17
AM
14771 rtx temp0 = (fromprolog
14772 ? gen_rtx_REG (Pmode, 0)
14773 : gen_reg_rtx (Pmode));
20b71b17 14774
20b71b17
AM
14775 if (fromprolog)
14776 {
ccbca5e4 14777 rtx symF, symL;
38c1f2d7 14778
20b71b17
AM
14779 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14780 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14781
20b71b17
AM
14782 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14783 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14784
6fb5fa3c
DB
14785 emit_insn (gen_load_toc_v4_PIC_1 (symF));
14786 emit_move_insn (dest,
1de43f85 14787 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 14788 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
14789 }
14790 else
20b71b17
AM
14791 {
14792 rtx tocsym;
20b71b17
AM
14793
14794 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14795 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14796 emit_move_insn (dest,
1de43f85 14797 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 14798 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14799 }
6fb5fa3c 14800 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 14801 }
20b71b17
AM
14802 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14803 {
14804 /* This is for AIX code running in non-PIC ELF32. */
14805 char buf[30];
14806 rtx realsym;
14807 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14808 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14809
6fb5fa3c
DB
14810 emit_insn (gen_elf_high (dest, realsym));
14811 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 14812 }
37409796 14813 else
9ebbca7d 14814 {
37409796 14815 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14816
9ebbca7d 14817 if (TARGET_32BIT)
6fb5fa3c 14818 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14819 else
6fb5fa3c 14820 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
14821 }
14822}
14823
d1d0c603
JJ
14824/* Emit instructions to restore the link register after determining where
14825 its value has been stored. */
14826
14827void
14828rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14829{
14830 rs6000_stack_t *info = rs6000_stack_info ();
14831 rtx operands[2];
14832
14833 operands[0] = source;
14834 operands[1] = scratch;
14835
14836 if (info->lr_save_p)
14837 {
14838 rtx frame_rtx = stack_pointer_rtx;
14839 HOST_WIDE_INT sp_offset = 0;
14840 rtx tmp;
14841
14842 if (frame_pointer_needed
14843 || current_function_calls_alloca
14844 || info->total_size > 32767)
14845 {
0be76840 14846 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14847 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14848 frame_rtx = operands[1];
14849 }
14850 else if (info->push_p)
14851 sp_offset = info->total_size;
14852
14853 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14854 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14855 emit_move_insn (tmp, operands[0]);
14856 }
14857 else
1de43f85 14858 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
14859}
14860
4862826d 14861static GTY(()) alias_set_type set = -1;
f103e34d 14862
4862826d 14863alias_set_type
863d938c 14864get_TOC_alias_set (void)
9ebbca7d 14865{
f103e34d
GK
14866 if (set == -1)
14867 set = new_alias_set ();
14868 return set;
f676971a 14869}
9ebbca7d 14870
c1207243 14871/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14872 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14873 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14874#if TARGET_ELF
3c9eb5f4 14875static int
f676971a 14876uses_TOC (void)
9ebbca7d 14877{
c4501e62 14878 rtx insn;
38c1f2d7 14879
c4501e62
JJ
14880 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14881 if (INSN_P (insn))
14882 {
14883 rtx pat = PATTERN (insn);
14884 int i;
9ebbca7d 14885
f676971a 14886 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14887 for (i = 0; i < XVECLEN (pat, 0); i++)
14888 {
14889 rtx sub = XVECEXP (pat, 0, i);
14890 if (GET_CODE (sub) == USE)
14891 {
14892 sub = XEXP (sub, 0);
14893 if (GET_CODE (sub) == UNSPEC
14894 && XINT (sub, 1) == UNSPEC_TOC)
14895 return 1;
14896 }
14897 }
14898 }
14899 return 0;
9ebbca7d 14900}
c954844a 14901#endif
38c1f2d7 14902
9ebbca7d 14903rtx
f676971a 14904create_TOC_reference (rtx symbol)
9ebbca7d 14905{
b3a13419 14906 if (!can_create_pseudo_p ())
6fb5fa3c 14907 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 14908 return gen_rtx_PLUS (Pmode,
a8a05998 14909 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14910 gen_rtx_CONST (Pmode,
14911 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14912 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14913}
38c1f2d7 14914
fc4767bb
JJ
14915/* If _Unwind_* has been called from within the same module,
14916 toc register is not guaranteed to be saved to 40(1) on function
14917 entry. Save it there in that case. */
c7ca610e 14918
9ebbca7d 14919void
863d938c 14920rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14921{
14922 rtx mem;
14923 rtx stack_top = gen_reg_rtx (Pmode);
14924 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14925 rtx opcode = gen_reg_rtx (SImode);
14926 rtx tocompare = gen_reg_rtx (SImode);
14927 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14928
8308679f 14929 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14930 emit_move_insn (stack_top, mem);
14931
8308679f
DE
14932 mem = gen_frame_mem (Pmode,
14933 gen_rtx_PLUS (Pmode, stack_top,
14934 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14935 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14936 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14937 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14938 : 0xE8410028, SImode));
9ebbca7d 14939
fc4767bb 14940 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14941 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14942 no_toc_save_needed);
9ebbca7d 14943
8308679f
DE
14944 mem = gen_frame_mem (Pmode,
14945 gen_rtx_PLUS (Pmode, stack_top,
14946 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14947 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14948 emit_label (no_toc_save_needed);
9ebbca7d 14949}
38c1f2d7 14950\f
0be76840
DE
14951/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14952 and the change to the stack pointer. */
ba4828e0 14953
9ebbca7d 14954static void
863d938c 14955rs6000_emit_stack_tie (void)
9ebbca7d 14956{
0be76840
DE
14957 rtx mem = gen_frame_mem (BLKmode,
14958 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14959
9ebbca7d
GK
14960 emit_insn (gen_stack_tie (mem));
14961}
38c1f2d7 14962
9ebbca7d
GK
14963/* Emit the correct code for allocating stack space, as insns.
14964 If COPY_R12, make sure a copy of the old frame is left in r12.
14965 The generated code may use hard register 0 as a temporary. */
14966
14967static void
a2369ed3 14968rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14969{
9ebbca7d
GK
14970 rtx insn;
14971 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14972 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14973 rtx todec = gen_int_mode (-size, Pmode);
14974
14975 if (INTVAL (todec) != -size)
14976 {
d4ee4d25 14977 warning (0, "stack frame too large");
61168ff1
RS
14978 emit_insn (gen_trap ());
14979 return;
14980 }
a157febd
GK
14981
14982 if (current_function_limit_stack)
14983 {
14984 if (REG_P (stack_limit_rtx)
f676971a 14985 && REGNO (stack_limit_rtx) > 1
a157febd
GK
14986 && REGNO (stack_limit_rtx) <= 31)
14987 {
5b71a4e7 14988 emit_insn (TARGET_32BIT
9ebbca7d
GK
14989 ? gen_addsi3 (tmp_reg,
14990 stack_limit_rtx,
14991 GEN_INT (size))
14992 : gen_adddi3 (tmp_reg,
14993 stack_limit_rtx,
14994 GEN_INT (size)));
5b71a4e7 14995
9ebbca7d
GK
14996 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14997 const0_rtx));
a157febd
GK
14998 }
14999 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15000 && TARGET_32BIT
f607bc57 15001 && DEFAULT_ABI == ABI_V4)
a157febd 15002 {
9ebbca7d 15003 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15004 gen_rtx_PLUS (Pmode,
15005 stack_limit_rtx,
9ebbca7d 15006 GEN_INT (size)));
5b71a4e7 15007
9ebbca7d
GK
15008 emit_insn (gen_elf_high (tmp_reg, toload));
15009 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15010 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15011 const0_rtx));
a157febd
GK
15012 }
15013 else
d4ee4d25 15014 warning (0, "stack limit expression is not supported");
a157febd
GK
15015 }
15016
9ebbca7d
GK
15017 if (copy_r12 || ! TARGET_UPDATE)
15018 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
15019
38c1f2d7
MM
15020 if (TARGET_UPDATE)
15021 {
9ebbca7d 15022 if (size > 32767)
38c1f2d7 15023 {
9ebbca7d 15024 /* Need a note here so that try_split doesn't get confused. */
9390387d 15025 if (get_last_insn () == NULL_RTX)
2e040219 15026 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15027 insn = emit_move_insn (tmp_reg, todec);
15028 try_split (PATTERN (insn), insn, 0);
15029 todec = tmp_reg;
38c1f2d7 15030 }
5b71a4e7
DE
15031
15032 insn = emit_insn (TARGET_32BIT
15033 ? gen_movsi_update (stack_reg, stack_reg,
15034 todec, stack_reg)
c4ad648e 15035 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15036 todec, stack_reg));
38c1f2d7
MM
15037 }
15038 else
15039 {
5b71a4e7
DE
15040 insn = emit_insn (TARGET_32BIT
15041 ? gen_addsi3 (stack_reg, stack_reg, todec)
15042 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
15043 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
15044 gen_rtx_REG (Pmode, 12));
15045 }
f676971a 15046
9ebbca7d 15047 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15048 REG_NOTES (insn) =
9ebbca7d 15049 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15050 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15051 gen_rtx_PLUS (Pmode, stack_reg,
15052 GEN_INT (-size))),
15053 REG_NOTES (insn));
15054}
15055
a4f6c312
SS
15056/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15057 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15058 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15059 deduce these equivalences by itself so it wasn't necessary to hold
15060 its hand so much. */
9ebbca7d
GK
15061
15062static void
f676971a 15063rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15064 rtx reg2, rtx rreg)
9ebbca7d
GK
15065{
15066 rtx real, temp;
15067
e56c4463
JL
15068 /* copy_rtx will not make unique copies of registers, so we need to
15069 ensure we don't have unwanted sharing here. */
15070 if (reg == reg2)
15071 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15072
15073 if (reg == rreg)
15074 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15075
9ebbca7d
GK
15076 real = copy_rtx (PATTERN (insn));
15077
89e7058f
AH
15078 if (reg2 != NULL_RTX)
15079 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15080
15081 real = replace_rtx (real, reg,
9ebbca7d
GK
15082 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15083 STACK_POINTER_REGNUM),
15084 GEN_INT (val)));
f676971a 15085
9ebbca7d
GK
15086 /* We expect that 'real' is either a SET or a PARALLEL containing
15087 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15088 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15089
15090 if (GET_CODE (real) == SET)
15091 {
15092 rtx set = real;
f676971a 15093
9ebbca7d
GK
15094 temp = simplify_rtx (SET_SRC (set));
15095 if (temp)
15096 SET_SRC (set) = temp;
15097 temp = simplify_rtx (SET_DEST (set));
15098 if (temp)
15099 SET_DEST (set) = temp;
15100 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15101 {
9ebbca7d
GK
15102 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15103 if (temp)
15104 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15105 }
38c1f2d7 15106 }
37409796 15107 else
9ebbca7d
GK
15108 {
15109 int i;
37409796
NS
15110
15111 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15112 for (i = 0; i < XVECLEN (real, 0); i++)
15113 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15114 {
15115 rtx set = XVECEXP (real, 0, i);
f676971a 15116
9ebbca7d
GK
15117 temp = simplify_rtx (SET_SRC (set));
15118 if (temp)
15119 SET_SRC (set) = temp;
15120 temp = simplify_rtx (SET_DEST (set));
15121 if (temp)
15122 SET_DEST (set) = temp;
15123 if (GET_CODE (SET_DEST (set)) == MEM)
15124 {
15125 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15126 if (temp)
15127 XEXP (SET_DEST (set), 0) = temp;
15128 }
15129 RTX_FRAME_RELATED_P (set) = 1;
15130 }
15131 }
c19de7aa
AH
15132
15133 if (TARGET_SPE)
15134 real = spe_synthesize_frame_save (real);
15135
9ebbca7d
GK
15136 RTX_FRAME_RELATED_P (insn) = 1;
15137 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15138 real,
15139 REG_NOTES (insn));
38c1f2d7
MM
15140}
15141
c19de7aa
AH
15142/* Given an SPE frame note, return a PARALLEL of SETs with the
15143 original note, plus a synthetic register save. */
15144
15145static rtx
a2369ed3 15146spe_synthesize_frame_save (rtx real)
c19de7aa
AH
15147{
15148 rtx synth, offset, reg, real2;
15149
15150 if (GET_CODE (real) != SET
15151 || GET_MODE (SET_SRC (real)) != V2SImode)
15152 return real;
15153
15154 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
15155 frame related note. The parallel contains a set of the register
41f3a930 15156 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
15157 This is so we can differentiate between 64-bit and 32-bit saves.
15158 Words cannot describe this nastiness. */
15159
37409796
NS
15160 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
15161 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
15162 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
15163
15164 /* Transform:
15165 (set (mem (plus (reg x) (const y)))
15166 (reg z))
15167 into:
15168 (set (mem (plus (reg x) (const y+4)))
41f3a930 15169 (reg z+1200))
c19de7aa
AH
15170 */
15171
15172 real2 = copy_rtx (real);
15173 PUT_MODE (SET_DEST (real2), SImode);
15174 reg = SET_SRC (real2);
15175 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
15176 synth = copy_rtx (real2);
15177
15178 if (BYTES_BIG_ENDIAN)
15179 {
15180 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
15181 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
15182 }
15183
15184 reg = SET_SRC (synth);
41f3a930 15185
c19de7aa 15186 synth = replace_rtx (synth, reg,
41f3a930 15187 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
15188
15189 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
15190 synth = replace_rtx (synth, offset,
15191 GEN_INT (INTVAL (offset)
15192 + (BYTES_BIG_ENDIAN ? 0 : 4)));
15193
15194 RTX_FRAME_RELATED_P (synth) = 1;
15195 RTX_FRAME_RELATED_P (real2) = 1;
15196 if (BYTES_BIG_ENDIAN)
15197 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
15198 else
15199 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
15200
15201 return real;
15202}
15203
00b960c7
AH
15204/* Returns an insn that has a vrsave set operation with the
15205 appropriate CLOBBERs. */
15206
15207static rtx
a2369ed3 15208generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15209{
15210 int nclobs, i;
15211 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15212 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15213
a004eb82
AH
15214 clobs[0]
15215 = gen_rtx_SET (VOIDmode,
15216 vrsave,
15217 gen_rtx_UNSPEC_VOLATILE (SImode,
15218 gen_rtvec (2, reg, vrsave),
3aca4bff 15219 UNSPECV_SET_VRSAVE));
00b960c7
AH
15220
15221 nclobs = 1;
15222
9aa86737
AH
15223 /* We need to clobber the registers in the mask so the scheduler
15224 does not move sets to VRSAVE before sets of AltiVec registers.
15225
15226 However, if the function receives nonlocal gotos, reload will set
15227 all call saved registers live. We will end up with:
15228
15229 (set (reg 999) (mem))
15230 (parallel [ (set (reg vrsave) (unspec blah))
15231 (clobber (reg 999))])
15232
15233 The clobber will cause the store into reg 999 to be dead, and
15234 flow will attempt to delete an epilogue insn. In this case, we
15235 need an unspec use/set of the register. */
00b960c7
AH
15236
15237 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15238 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15239 {
15240 if (!epiloguep || call_used_regs [i])
15241 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15242 gen_rtx_REG (V4SImode, i));
15243 else
15244 {
15245 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15246
15247 clobs[nclobs++]
a004eb82
AH
15248 = gen_rtx_SET (VOIDmode,
15249 reg,
15250 gen_rtx_UNSPEC (V4SImode,
15251 gen_rtvec (1, reg), 27));
9aa86737
AH
15252 }
15253 }
00b960c7
AH
15254
15255 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15256
15257 for (i = 0; i < nclobs; ++i)
15258 XVECEXP (insn, 0, i) = clobs[i];
15259
15260 return insn;
15261}
15262
89e7058f
AH
15263/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15264 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15265
15266static void
f676971a 15267emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15268 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15269{
15270 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15271 rtx replacea, replaceb;
15272
15273 int_rtx = GEN_INT (offset);
15274
15275 /* Some cases that need register indexed addressing. */
15276 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 15277 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15278 || (TARGET_SPE_ABI
15279 && SPE_VECTOR_MODE (mode)
15280 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15281 {
15282 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15283 flow path of instructions in the prologue. */
89e7058f
AH
15284 offset_rtx = gen_rtx_REG (Pmode, 11);
15285 emit_move_insn (offset_rtx, int_rtx);
15286
15287 replacea = offset_rtx;
15288 replaceb = int_rtx;
15289 }
15290 else
15291 {
15292 offset_rtx = int_rtx;
15293 replacea = NULL_RTX;
15294 replaceb = NULL_RTX;
15295 }
15296
15297 reg = gen_rtx_REG (mode, regno);
15298 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15299 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15300
15301 insn = emit_move_insn (mem, reg);
15302
15303 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15304}
15305
a3170dc6
AH
15306/* Emit an offset memory reference suitable for a frame store, while
15307 converting to a valid addressing mode. */
15308
15309static rtx
a2369ed3 15310gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15311{
15312 rtx int_rtx, offset_rtx;
15313
15314 int_rtx = GEN_INT (offset);
15315
4d4cbc0e
AH
15316 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
15317 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15318 {
15319 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15320 emit_move_insn (offset_rtx, int_rtx);
15321 }
15322 else
15323 offset_rtx = int_rtx;
15324
0be76840 15325 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15326}
15327
6d0a8091
DJ
15328/* Look for user-defined global regs. We should not save and restore these,
15329 and cannot use stmw/lmw if there are any in its range. */
15330
15331static bool
15332no_global_regs_above (int first_greg)
15333{
15334 int i;
15335 for (i = 0; i < 32 - first_greg; i++)
15336 if (global_regs[first_greg + i])
15337 return false;
15338 return true;
15339}
15340
699c914a
MS
15341#ifndef TARGET_FIX_AND_CONTINUE
15342#define TARGET_FIX_AND_CONTINUE 0
15343#endif
15344
52ff33d0
NF
15345/* Determine whether the gp REG is really used. */
15346
15347static bool
15348rs6000_reg_live_or_pic_offset_p (int reg)
15349{
6fb5fa3c 15350 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15351 && (!call_used_regs[reg]
15352 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15353 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15354 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15355 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15356 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15357}
15358
9ebbca7d
GK
15359/* Emit function prologue as insns. */
15360
9878760c 15361void
863d938c 15362rs6000_emit_prologue (void)
9878760c 15363{
4697a36c 15364 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15365 enum machine_mode reg_mode = Pmode;
327e5343 15366 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15367 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15368 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15369 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15370 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15371 rtx insn;
15372 int saving_FPRs_inline;
15373 int using_store_multiple;
15374 HOST_WIDE_INT sp_offset = 0;
f676971a 15375
699c914a
MS
15376 if (TARGET_FIX_AND_CONTINUE)
15377 {
15378 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15379 address by modifying the first 5 instructions of the function
699c914a
MS
15380 to branch to the overriding function. This is necessary to
15381 permit function pointers that point to the old function to
15382 actually forward to the new function. */
15383 emit_insn (gen_nop ());
15384 emit_insn (gen_nop ());
de2ab0ca 15385 emit_insn (gen_nop ());
699c914a
MS
15386 emit_insn (gen_nop ());
15387 emit_insn (gen_nop ());
15388 }
15389
15390 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15391 {
15392 reg_mode = V2SImode;
15393 reg_size = 8;
15394 }
a3170dc6 15395
9ebbca7d 15396 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15397 && (!TARGET_SPE_ABI
15398 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15399 && info->first_gp_reg_save < 31
15400 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15401 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15402 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15403 || current_function_calls_eh_return
8c29550d 15404 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15405
15406 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15407 if (! WORLD_SAVE_P (info)
15408 && info->push_p
acd0b319
AM
15409 && (DEFAULT_ABI == ABI_V4
15410 || current_function_calls_eh_return))
9ebbca7d
GK
15411 {
15412 if (info->total_size < 32767)
15413 sp_offset = info->total_size;
15414 else
15415 frame_reg_rtx = frame_ptr_rtx;
f676971a 15416 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15417 (frame_reg_rtx != sp_reg_rtx
15418 && (info->cr_save_p
15419 || info->lr_save_p
15420 || info->first_fp_reg_save < 64
15421 || info->first_gp_reg_save < 32
15422 )));
15423 if (frame_reg_rtx != sp_reg_rtx)
15424 rs6000_emit_stack_tie ();
15425 }
15426
d62294f5 15427 /* Handle world saves specially here. */
f57fe068 15428 if (WORLD_SAVE_P (info))
d62294f5
FJ
15429 {
15430 int i, j, sz;
15431 rtx treg;
15432 rtvec p;
22fa69da 15433 rtx reg0;
d62294f5
FJ
15434
15435 /* save_world expects lr in r0. */
22fa69da 15436 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15437 if (info->lr_save_p)
c4ad648e 15438 {
22fa69da 15439 insn = emit_move_insn (reg0,
1de43f85 15440 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15441 RTX_FRAME_RELATED_P (insn) = 1;
15442 }
d62294f5
FJ
15443
15444 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15445 assumptions about the offsets of various bits of the stack
992d08b1 15446 frame. */
37409796
NS
15447 gcc_assert (info->gp_save_offset == -220
15448 && info->fp_save_offset == -144
15449 && info->lr_save_offset == 8
15450 && info->cr_save_offset == 4
15451 && info->push_p
15452 && info->lr_save_p
15453 && (!current_function_calls_eh_return
15454 || info->ehrd_offset == -432)
15455 && info->vrsave_save_offset == -224
22fa69da 15456 && info->altivec_save_offset == -416);
d62294f5
FJ
15457
15458 treg = gen_rtx_REG (SImode, 11);
15459 emit_move_insn (treg, GEN_INT (-info->total_size));
15460
15461 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15462 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15463
15464 /* Preserve CR2 for save_world prologues */
22fa69da 15465 sz = 5;
d62294f5
FJ
15466 sz += 32 - info->first_gp_reg_save;
15467 sz += 64 - info->first_fp_reg_save;
15468 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15469 p = rtvec_alloc (sz);
15470 j = 0;
15471 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15472 gen_rtx_REG (SImode,
1de43f85 15473 LR_REGNO));
d62294f5 15474 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15475 gen_rtx_SYMBOL_REF (Pmode,
15476 "*save_world"));
d62294f5 15477 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15478 properly. */
15479 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15480 {
15481 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15482 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15483 GEN_INT (info->fp_save_offset
15484 + sp_offset + 8 * i));
0be76840 15485 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15486
15487 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15488 }
d62294f5 15489 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15490 {
15491 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15492 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15493 GEN_INT (info->altivec_save_offset
15494 + sp_offset + 16 * i));
0be76840 15495 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15496
15497 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15498 }
d62294f5 15499 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15500 {
15501 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15502 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15503 GEN_INT (info->gp_save_offset
15504 + sp_offset + reg_size * i));
0be76840 15505 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15506
15507 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15508 }
15509
15510 {
15511 /* CR register traditionally saved as CR2. */
15512 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15513 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15514 GEN_INT (info->cr_save_offset
15515 + sp_offset));
0be76840 15516 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15517
15518 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15519 }
22fa69da
GK
15520 /* Explain about use of R0. */
15521 if (info->lr_save_p)
15522 {
15523 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15524 GEN_INT (info->lr_save_offset
15525 + sp_offset));
15526 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15527
22fa69da
GK
15528 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15529 }
15530 /* Explain what happens to the stack pointer. */
15531 {
15532 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15533 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15534 }
d62294f5
FJ
15535
15536 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15537 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15538 treg, GEN_INT (-info->total_size));
15539 sp_offset = info->total_size;
d62294f5
FJ
15540 }
15541
9ebbca7d 15542 /* If we use the link register, get it into r0. */
f57fe068 15543 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15544 {
52ff33d0
NF
15545 rtx addr, reg, mem;
15546
f8a57be8 15547 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15548 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15549 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15550
15551 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15552 GEN_INT (info->lr_save_offset + sp_offset));
15553 reg = gen_rtx_REG (Pmode, 0);
15554 mem = gen_rtx_MEM (Pmode, addr);
15555 /* This should not be of rs6000_sr_alias_set, because of
15556 __builtin_return_address. */
15557
15558 insn = emit_move_insn (mem, reg);
15559 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15560 NULL_RTX, NULL_RTX);
f8a57be8 15561 }
9ebbca7d
GK
15562
15563 /* If we need to save CR, put it into r12. */
f57fe068 15564 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15565 {
f8a57be8 15566 rtx set;
f676971a 15567
9ebbca7d 15568 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15569 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15570 RTX_FRAME_RELATED_P (insn) = 1;
15571 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15572 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15573 But that's OK. All we have to do is specify that _one_ condition
15574 code register is saved in this stack slot. The thrower's epilogue
15575 will then restore all the call-saved registers.
15576 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15577 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15578 gen_rtx_REG (SImode, CR2_REGNO));
15579 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15580 set,
15581 REG_NOTES (insn));
9ebbca7d
GK
15582 }
15583
a4f6c312
SS
15584 /* Do any required saving of fpr's. If only one or two to save, do
15585 it ourselves. Otherwise, call function. */
f57fe068 15586 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15587 {
15588 int i;
15589 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15590 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15591 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15592 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15593 info->first_fp_reg_save + i,
15594 info->fp_save_offset + sp_offset + 8 * i,
15595 info->total_size);
9ebbca7d 15596 }
f57fe068 15597 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15598 {
15599 int i;
15600 char rname[30];
520a57c8 15601 const char *alloc_rname;
9ebbca7d
GK
15602 rtvec p;
15603 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15604
15605 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15606 gen_rtx_REG (Pmode,
1de43f85 15607 LR_REGNO));
9ebbca7d
GK
15608 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15609 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15610 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15611 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15612 gen_rtx_SYMBOL_REF (Pmode,
15613 alloc_rname));
15614 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15615 {
15616 rtx addr, reg, mem;
15617 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15618 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15619 GEN_INT (info->fp_save_offset
9ebbca7d 15620 + sp_offset + 8*i));
0be76840 15621 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15622
15623 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15624 }
15625 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15626 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15627 NULL_RTX, NULL_RTX);
15628 }
b6c9286a 15629
9ebbca7d
GK
15630 /* Save GPRs. This is done as a PARALLEL if we are using
15631 the store-multiple instructions. */
f57fe068 15632 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15633 {
308c142a 15634 rtvec p;
9ebbca7d
GK
15635 int i;
15636 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15637 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15638 {
15639 rtx addr, reg, mem;
15640 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15641 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15642 GEN_INT (info->gp_save_offset
15643 + sp_offset
9ebbca7d 15644 + reg_size * i));
0be76840 15645 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15646
15647 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15648 }
15649 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15650 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15651 NULL_RTX, NULL_RTX);
b6c9286a 15652 }
52ff33d0
NF
15653 else if (!WORLD_SAVE_P (info)
15654 && TARGET_SPE_ABI
15655 && info->spe_64bit_regs_used != 0
15656 && info->first_gp_reg_save != 32)
15657 {
15658 int i;
15659 rtx spe_save_area_ptr;
15660 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15661 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15662 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15663
15664 /* Determine whether we can address all of the registers that need
15665 to be saved with an offset from the stack pointer that fits in
15666 the small const field for SPE memory instructions. */
15667 int spe_regs_addressable_via_sp
15668 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15669 + (32 - info->first_gp_reg_save - 1) * reg_size);
15670 int spe_offset;
15671
15672 if (spe_regs_addressable_via_sp)
15673 {
15674 spe_save_area_ptr = sp_reg_rtx;
15675 spe_offset = info->spe_gp_save_offset + sp_offset;
15676 }
15677 else
15678 {
15679 /* Make r11 point to the start of the SPE save area. We need
15680 to be careful here if r11 is holding the static chain. If
15681 it is, then temporarily save it in r0. We would use r0 as
15682 our base register here, but using r0 as a base register in
15683 loads and stores means something different from what we
15684 would like. */
15685 if (using_static_chain_p)
15686 {
15687 rtx r0 = gen_rtx_REG (Pmode, 0);
15688
15689 gcc_assert (info->first_gp_reg_save > 11);
15690
15691 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15692 }
15693
15694 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
15695 emit_insn (gen_addsi3 (spe_save_area_ptr, sp_reg_rtx,
15696 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15697
15698 spe_offset = 0;
15699 }
15700
15701 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15702 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15703 {
15704 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15705 rtx offset, addr, mem;
15706
15707 /* We're doing all this to ensure that the offset fits into
15708 the immediate offset of 'evstdd'. */
15709 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15710
15711 offset = GEN_INT (reg_size * i + spe_offset);
15712 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15713 mem = gen_rtx_MEM (V2SImode, addr);
15714
15715 insn = emit_move_insn (mem, reg);
15716
15717 rs6000_frame_related (insn, spe_save_area_ptr,
15718 info->spe_gp_save_offset
15719 + sp_offset + reg_size * i,
15720 offset, const0_rtx);
15721 }
15722
15723 /* Move the static chain pointer back. */
15724 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15725 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15726 }
f57fe068 15727 else if (!WORLD_SAVE_P (info))
b6c9286a 15728 {
9ebbca7d
GK
15729 int i;
15730 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15731 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15732 {
15733 rtx addr, reg, mem;
15734 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15735
52ff33d0
NF
15736 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15737 GEN_INT (info->gp_save_offset
15738 + sp_offset
15739 + reg_size * i));
15740 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15741
52ff33d0
NF
15742 insn = emit_move_insn (mem, reg);
15743 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15744 NULL_RTX, NULL_RTX);
15745 }
9ebbca7d
GK
15746 }
15747
83720594
RH
15748 /* ??? There's no need to emit actual instructions here, but it's the
15749 easiest way to get the frame unwind information emitted. */
22fa69da 15750 if (current_function_calls_eh_return)
83720594 15751 {
78e1b90d
DE
15752 unsigned int i, regno;
15753
fc4767bb
JJ
15754 /* In AIX ABI we need to pretend we save r2 here. */
15755 if (TARGET_AIX)
15756 {
15757 rtx addr, reg, mem;
15758
15759 reg = gen_rtx_REG (reg_mode, 2);
15760 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15761 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15762 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15763
15764 insn = emit_move_insn (mem, reg);
f676971a 15765 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15766 NULL_RTX, NULL_RTX);
15767 PATTERN (insn) = gen_blockage ();
15768 }
15769
83720594
RH
15770 for (i = 0; ; ++i)
15771 {
83720594
RH
15772 regno = EH_RETURN_DATA_REGNO (i);
15773 if (regno == INVALID_REGNUM)
15774 break;
15775
89e7058f
AH
15776 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15777 info->ehrd_offset + sp_offset
15778 + reg_size * (int) i,
15779 info->total_size);
83720594
RH
15780 }
15781 }
15782
9ebbca7d 15783 /* Save CR if we use any that must be preserved. */
f57fe068 15784 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15785 {
15786 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15787 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15788 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15789 /* See the large comment above about why CR2_REGNO is used. */
15790 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15791
9ebbca7d
GK
15792 /* If r12 was used to hold the original sp, copy cr into r0 now
15793 that it's free. */
15794 if (REGNO (frame_reg_rtx) == 12)
15795 {
f8a57be8
GK
15796 rtx set;
15797
9ebbca7d 15798 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15799 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15800 RTX_FRAME_RELATED_P (insn) = 1;
15801 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15802 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15803 set,
15804 REG_NOTES (insn));
f676971a 15805
9ebbca7d
GK
15806 }
15807 insn = emit_move_insn (mem, cr_save_rtx);
15808
f676971a 15809 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15810 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15811 }
15812
f676971a 15813 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15814 for which it was done previously. */
f57fe068 15815 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15816 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 15817 {
bcb2d701 15818 if (info->total_size < 32767)
2b2c2fe5 15819 sp_offset = info->total_size;
bcb2d701
EC
15820 else
15821 frame_reg_rtx = frame_ptr_rtx;
15822 rs6000_emit_allocate_stack (info->total_size,
15823 (frame_reg_rtx != sp_reg_rtx
15824 && ((info->altivec_size != 0)
15825 || (info->vrsave_mask != 0)
15826 )));
15827 if (frame_reg_rtx != sp_reg_rtx)
15828 rs6000_emit_stack_tie ();
2b2c2fe5 15829 }
9ebbca7d
GK
15830
15831 /* Set frame pointer, if needed. */
15832 if (frame_pointer_needed)
15833 {
7d5175e1 15834 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15835 sp_reg_rtx);
15836 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15837 }
9878760c 15838
2b2c2fe5
EC
15839 /* Save AltiVec registers if needed. Save here because the red zone does
15840 not include AltiVec registers. */
15841 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15842 {
15843 int i;
15844
15845 /* There should be a non inline version of this, for when we
15846 are saving lots of vector registers. */
15847 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15848 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15849 {
15850 rtx areg, savereg, mem;
15851 int offset;
15852
15853 offset = info->altivec_save_offset + sp_offset
15854 + 16 * (i - info->first_altivec_reg_save);
15855
15856 savereg = gen_rtx_REG (V4SImode, i);
15857
15858 areg = gen_rtx_REG (Pmode, 0);
15859 emit_move_insn (areg, GEN_INT (offset));
15860
15861 /* AltiVec addressing mode is [reg+reg]. */
15862 mem = gen_frame_mem (V4SImode,
15863 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15864
15865 insn = emit_move_insn (mem, savereg);
15866
15867 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15868 areg, GEN_INT (offset));
15869 }
15870 }
15871
15872 /* VRSAVE is a bit vector representing which AltiVec registers
15873 are used. The OS uses this to determine which vector
15874 registers to save on a context switch. We need to save
15875 VRSAVE on the stack frame, add whatever AltiVec registers we
15876 used in this function, and do the corresponding magic in the
15877 epilogue. */
15878
15879 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15880 && info->vrsave_mask != 0)
15881 {
15882 rtx reg, mem, vrsave;
15883 int offset;
15884
15885 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15886 as frame_reg_rtx and r11 as the static chain pointer for
15887 nested functions. */
15888 reg = gen_rtx_REG (SImode, 0);
15889 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15890 if (TARGET_MACHO)
15891 emit_insn (gen_get_vrsave_internal (reg));
15892 else
15893 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15894
15895 if (!WORLD_SAVE_P (info))
15896 {
15897 /* Save VRSAVE. */
15898 offset = info->vrsave_save_offset + sp_offset;
15899 mem = gen_frame_mem (SImode,
15900 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15901 GEN_INT (offset)));
15902 insn = emit_move_insn (mem, reg);
15903 }
15904
15905 /* Include the registers in the mask. */
15906 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15907
15908 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15909 }
15910
1db02437 15911 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15912 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15913 || (DEFAULT_ABI == ABI_V4
15914 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 15915 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
15916 {
15917 /* If emit_load_toc_table will use the link register, we need to save
15918 it. We use R12 for this purpose because emit_load_toc_table
15919 can use register 0. This allows us to use a plain 'blr' to return
15920 from the procedure more often. */
15921 int save_LR_around_toc_setup = (TARGET_ELF
15922 && DEFAULT_ABI != ABI_AIX
15923 && flag_pic
15924 && ! info->lr_save_p
15925 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15926 if (save_LR_around_toc_setup)
15927 {
1de43f85 15928 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 15929
c4ad648e 15930 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 15931 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15932
c4ad648e 15933 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15934
c4ad648e 15935 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
15936 RTX_FRAME_RELATED_P (insn) = 1;
15937 }
15938 else
15939 rs6000_emit_load_toc_table (TRUE);
15940 }
ee890fe2 15941
fcce224d 15942#if TARGET_MACHO
ee890fe2
SS
15943 if (DEFAULT_ABI == ABI_DARWIN
15944 && flag_pic && current_function_uses_pic_offset_table)
15945 {
1de43f85 15946 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 15947 rtx src = machopic_function_base_sym ();
ee890fe2 15948
6d0a8091
DJ
15949 /* Save and restore LR locally around this call (in R0). */
15950 if (!info->lr_save_p)
6fb5fa3c 15951 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 15952
6fb5fa3c 15953 emit_insn (gen_load_macho_picbase (src));
ee890fe2 15954
6fb5fa3c
DB
15955 emit_move_insn (gen_rtx_REG (Pmode,
15956 RS6000_PIC_OFFSET_TABLE_REGNUM),
15957 lr);
6d0a8091
DJ
15958
15959 if (!info->lr_save_p)
6fb5fa3c 15960 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 15961 }
fcce224d 15962#endif
9ebbca7d
GK
15963}
15964
9ebbca7d 15965/* Write function prologue. */
a4f6c312 15966
08c148a8 15967static void
f676971a 15968rs6000_output_function_prologue (FILE *file,
a2369ed3 15969 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15970{
15971 rs6000_stack_t *info = rs6000_stack_info ();
15972
4697a36c
MM
15973 if (TARGET_DEBUG_STACK)
15974 debug_stack_info (info);
9878760c 15975
a4f6c312
SS
15976 /* Write .extern for any function we will call to save and restore
15977 fp values. */
15978 if (info->first_fp_reg_save < 64
15979 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 15980 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 15981 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
15982 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
15983 RESTORE_FP_SUFFIX);
9878760c 15984
c764f757
RK
15985 /* Write .extern for AIX common mode routines, if needed. */
15986 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
15987 {
f6709c70
JW
15988 fputs ("\t.extern __mulh\n", file);
15989 fputs ("\t.extern __mull\n", file);
15990 fputs ("\t.extern __divss\n", file);
15991 fputs ("\t.extern __divus\n", file);
15992 fputs ("\t.extern __quoss\n", file);
15993 fputs ("\t.extern __quous\n", file);
c764f757
RK
15994 common_mode_defined = 1;
15995 }
9878760c 15996
9ebbca7d 15997 if (! HAVE_prologue)
979721f8 15998 {
9ebbca7d 15999 start_sequence ();
9dda4cc8 16000
a4f6c312
SS
16001 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16002 the "toplevel" insn chain. */
2e040219 16003 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16004 rs6000_emit_prologue ();
2e040219 16005 emit_note (NOTE_INSN_DELETED);
178c3eff 16006
a3c9585f 16007 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16008 {
16009 rtx insn;
16010 unsigned addr = 0;
16011 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16012 {
16013 INSN_ADDRESSES_NEW (insn, addr);
16014 addr += 4;
16015 }
16016 }
9dda4cc8 16017
9ebbca7d 16018 if (TARGET_DEBUG_STACK)
a4f6c312 16019 debug_rtx_list (get_insns (), 100);
c9d691e9 16020 final (get_insns (), file, FALSE);
9ebbca7d 16021 end_sequence ();
979721f8
MM
16022 }
16023
9ebbca7d
GK
16024 rs6000_pic_labelno++;
16025}
f676971a 16026
9ebbca7d 16027/* Emit function epilogue as insns.
9878760c 16028
9ebbca7d
GK
16029 At present, dwarf2out_frame_debug_expr doesn't understand
16030 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16031 anywhere in the epilogue. Most of the insns below would in any case
16032 need special notes to explain where r11 is in relation to the stack. */
9878760c 16033
9ebbca7d 16034void
a2369ed3 16035rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16036{
16037 rs6000_stack_t *info;
16038 int restoring_FPRs_inline;
16039 int using_load_multiple;
d296e02e 16040 int using_mtcr_multiple;
9ebbca7d
GK
16041 int use_backchain_to_restore_sp;
16042 int sp_offset = 0;
16043 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16044 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16045 enum machine_mode reg_mode = Pmode;
327e5343 16046 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16047 int i;
16048
c19de7aa
AH
16049 info = rs6000_stack_info ();
16050
16051 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16052 {
16053 reg_mode = V2SImode;
16054 reg_size = 8;
16055 }
16056
9ebbca7d 16057 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
16058 && (!TARGET_SPE_ABI
16059 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
16060 && info->first_gp_reg_save < 31
16061 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 16062 restoring_FPRs_inline = (sibcall
83720594 16063 || current_function_calls_eh_return
9ebbca7d
GK
16064 || info->first_fp_reg_save == 64
16065 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 16066 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
16067 || current_function_calls_alloca
16068 || info->total_size > 32767);
d296e02e 16069 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16070 || rs6000_cpu == PROCESSOR_PPC603
16071 || rs6000_cpu == PROCESSOR_PPC750
16072 || optimize_size);
16073
f57fe068 16074 if (WORLD_SAVE_P (info))
d62294f5
FJ
16075 {
16076 int i, j;
16077 char rname[30];
16078 const char *alloc_rname;
16079 rtvec p;
16080
16081 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16082 stack slot (which is not likely to be our caller.)
16083 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16084 rest_world is similar, except any R10 parameter is ignored.
16085 The exception-handling stuff that was here in 2.95 is no
16086 longer necessary. */
d62294f5
FJ
16087
16088 p = rtvec_alloc (9
16089 + 1
f676971a 16090 + 32 - info->first_gp_reg_save
c4ad648e
AM
16091 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16092 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16093
c4ad648e
AM
16094 strcpy (rname, ((current_function_calls_eh_return) ?
16095 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16096 alloc_rname = ggc_strdup (rname);
16097
16098 j = 0;
16099 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16100 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16101 gen_rtx_REG (Pmode,
1de43f85 16102 LR_REGNO));
d62294f5 16103 RTVEC_ELT (p, j++)
c4ad648e 16104 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16105 /* The instruction pattern requires a clobber here;
c4ad648e 16106 it is shared with the restVEC helper. */
d62294f5 16107 RTVEC_ELT (p, j++)
c4ad648e 16108 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16109
16110 {
c4ad648e
AM
16111 /* CR register traditionally saved as CR2. */
16112 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16113 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16114 GEN_INT (info->cr_save_offset));
0be76840 16115 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16116
16117 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16118 }
16119
16120 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16121 {
16122 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16123 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16124 GEN_INT (info->gp_save_offset
16125 + reg_size * i));
0be76840 16126 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16127
16128 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16129 }
d62294f5 16130 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16131 {
16132 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16133 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16134 GEN_INT (info->altivec_save_offset
16135 + 16 * i));
0be76840 16136 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16137
16138 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16139 }
d62294f5 16140 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16141 {
16142 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16143 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16144 GEN_INT (info->fp_save_offset
16145 + 8 * i));
0be76840 16146 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16147
16148 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16149 }
d62294f5 16150 RTVEC_ELT (p, j++)
c4ad648e 16151 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16152 RTVEC_ELT (p, j++)
c4ad648e 16153 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16154 RTVEC_ELT (p, j++)
c4ad648e 16155 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16156 RTVEC_ELT (p, j++)
c4ad648e 16157 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16158 RTVEC_ELT (p, j++)
c4ad648e 16159 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16160 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16161
16162 return;
16163 }
16164
45b194f8
AM
16165 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16166 if (info->push_p)
2b2c2fe5 16167 sp_offset = info->total_size;
f676971a 16168
9aa86737
AH
16169 /* Restore AltiVec registers if needed. */
16170 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16171 {
16172 int i;
16173
16174 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16175 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16176 {
16177 rtx addr, areg, mem;
16178
16179 areg = gen_rtx_REG (Pmode, 0);
16180 emit_move_insn
16181 (areg, GEN_INT (info->altivec_save_offset
16182 + sp_offset
16183 + 16 * (i - info->first_altivec_reg_save)));
16184
16185 /* AltiVec addressing mode is [reg+reg]. */
16186 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16187 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16188
16189 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16190 }
16191 }
16192
2b2c2fe5
EC
16193 /* If we have a frame pointer, a call to alloca, or a large stack
16194 frame, restore the old stack pointer using the backchain. Otherwise,
16195 we know what size to update it with. */
16196 if (use_backchain_to_restore_sp)
16197 {
16198 /* Under V.4, don't reset the stack pointer until after we're done
16199 loading the saved registers. */
16200 if (DEFAULT_ABI == ABI_V4)
16201 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16202
16203 emit_move_insn (frame_reg_rtx,
16204 gen_rtx_MEM (Pmode, sp_reg_rtx));
45b194f8 16205 sp_offset = 0;
2b2c2fe5 16206 }
45b194f8
AM
16207 else if (info->push_p
16208 && DEFAULT_ABI != ABI_V4
16209 && !current_function_calls_eh_return)
2b2c2fe5 16210 {
45b194f8
AM
16211 emit_insn (TARGET_32BIT
16212 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16213 GEN_INT (info->total_size))
16214 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16215 GEN_INT (info->total_size)));
16216 sp_offset = 0;
2b2c2fe5
EC
16217 }
16218
554c2941
AM
16219 /* Restore VRSAVE if needed. */
16220 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16221 && info->vrsave_mask != 0)
16222 {
16223 rtx addr, mem, reg;
16224
16225 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16226 GEN_INT (info->vrsave_save_offset + sp_offset));
16227 mem = gen_frame_mem (SImode, addr);
16228 reg = gen_rtx_REG (SImode, 12);
16229 emit_move_insn (reg, mem);
16230
16231 emit_insn (generate_set_vrsave (reg, info, 1));
16232 }
16233
9ebbca7d
GK
16234 /* Get the old lr if we saved it. */
16235 if (info->lr_save_p)
b6c9286a 16236 {
a3170dc6
AH
16237 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16238 info->lr_save_offset + sp_offset);
ba4828e0 16239
9ebbca7d 16240 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16241 }
f676971a 16242
9ebbca7d
GK
16243 /* Get the old cr if we saved it. */
16244 if (info->cr_save_p)
16245 {
16246 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16247 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16248 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16249
9ebbca7d
GK
16250 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16251 }
f676971a 16252
9ebbca7d 16253 /* Set LR here to try to overlap restores below. */
4697a36c 16254 if (info->lr_save_p)
1de43f85 16255 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16256 gen_rtx_REG (Pmode, 0));
f676971a 16257
83720594
RH
16258 /* Load exception handler data registers, if needed. */
16259 if (current_function_calls_eh_return)
16260 {
78e1b90d
DE
16261 unsigned int i, regno;
16262
fc4767bb
JJ
16263 if (TARGET_AIX)
16264 {
16265 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16266 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16267 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16268
16269 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16270 }
16271
83720594
RH
16272 for (i = 0; ; ++i)
16273 {
a3170dc6 16274 rtx mem;
83720594
RH
16275
16276 regno = EH_RETURN_DATA_REGNO (i);
16277 if (regno == INVALID_REGNUM)
16278 break;
16279
a3170dc6
AH
16280 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16281 info->ehrd_offset + sp_offset
16282 + reg_size * (int) i);
83720594
RH
16283
16284 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16285 }
16286 }
f676971a 16287
9ebbca7d
GK
16288 /* Restore GPRs. This is done as a PARALLEL if we are using
16289 the load-multiple instructions. */
16290 if (using_load_multiple)
979721f8 16291 {
9ebbca7d
GK
16292 rtvec p;
16293 p = rtvec_alloc (32 - info->first_gp_reg_save);
16294 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16295 {
f676971a
EC
16296 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16297 GEN_INT (info->gp_save_offset
16298 + sp_offset
9ebbca7d 16299 + reg_size * i));
0be76840 16300 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16301
f676971a 16302 RTVEC_ELT (p, i) =
9ebbca7d
GK
16303 gen_rtx_SET (VOIDmode,
16304 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16305 mem);
979721f8 16306 }
9ebbca7d 16307 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16308 }
52ff33d0
NF
16309 else if (TARGET_SPE_ABI
16310 && info->spe_64bit_regs_used != 0
16311 && info->first_gp_reg_save != 32)
16312 {
52ff33d0
NF
16313 /* Determine whether we can address all of the registers that need
16314 to be saved with an offset from the stack pointer that fits in
16315 the small const field for SPE memory instructions. */
16316 int spe_regs_addressable_via_sp
16317 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16318 + (32 - info->first_gp_reg_save - 1) * reg_size);
16319 int spe_offset;
16320
16321 if (spe_regs_addressable_via_sp)
45b194f8 16322 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16323 else
16324 {
45b194f8 16325 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16326 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16327 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16328 There's no need to worry here because the static chain is passed
16329 anew to every function. */
45b194f8
AM
16330 if (frame_reg_rtx == sp_reg_rtx)
16331 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16332 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16333 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16334 /* Keep the invariant that frame_reg_rtx + sp_offset points
16335 at the top of the stack frame. */
16336 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16337
16338 spe_offset = 0;
16339 }
16340
16341 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16342 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16343 {
16344 rtx offset, addr, mem;
16345
16346 /* We're doing all this to ensure that the immediate offset
16347 fits into the immediate field of 'evldd'. */
16348 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16349
16350 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16351 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16352 mem = gen_rtx_MEM (V2SImode, addr);
16353
16354 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16355 mem);
16356 }
16357 }
9ebbca7d
GK
16358 else
16359 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16360 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16361 {
f676971a
EC
16362 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16363 GEN_INT (info->gp_save_offset
16364 + sp_offset
9ebbca7d 16365 + reg_size * i));
0be76840 16366 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16367
f676971a 16368 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16369 info->first_gp_reg_save + i), mem);
9ebbca7d 16370 }
9878760c 16371
9ebbca7d
GK
16372 /* Restore fpr's if we need to do it without calling a function. */
16373 if (restoring_FPRs_inline)
16374 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16375 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16376 && ! call_used_regs[info->first_fp_reg_save+i]))
16377 {
16378 rtx addr, mem;
16379 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16380 GEN_INT (info->fp_save_offset
16381 + sp_offset
a4f6c312 16382 + 8 * i));
0be76840 16383 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16384
f676971a 16385 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16386 info->first_fp_reg_save + i),
16387 mem);
16388 }
8d30c4ee 16389
9ebbca7d
GK
16390 /* If we saved cr, restore it here. Just those that were used. */
16391 if (info->cr_save_p)
979721f8 16392 {
9ebbca7d 16393 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16394 int count = 0;
f676971a 16395
d296e02e 16396 if (using_mtcr_multiple)
979721f8 16397 {
9ebbca7d 16398 for (i = 0; i < 8; i++)
6fb5fa3c 16399 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16400 count++;
37409796 16401 gcc_assert (count);
e35b9579
GK
16402 }
16403
d296e02e 16404 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16405 {
16406 rtvec p;
16407 int ndx;
f676971a 16408
e35b9579 16409 p = rtvec_alloc (count);
9ebbca7d 16410
e35b9579 16411 ndx = 0;
9ebbca7d 16412 for (i = 0; i < 8; i++)
6fb5fa3c 16413 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16414 {
16415 rtvec r = rtvec_alloc (2);
16416 RTVEC_ELT (r, 0) = r12_rtx;
16417 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16418 RTVEC_ELT (p, ndx) =
f676971a 16419 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16420 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16421 ndx++;
9ebbca7d
GK
16422 }
16423 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16424 gcc_assert (ndx == count);
979721f8
MM
16425 }
16426 else
9ebbca7d 16427 for (i = 0; i < 8; i++)
6fb5fa3c 16428 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16429 {
f676971a 16430 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16431 CR0_REGNO+i),
16432 r12_rtx));
979721f8 16433 }
979721f8
MM
16434 }
16435
9ebbca7d 16436 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16437 have been done. */
16438 if (frame_reg_rtx != sp_reg_rtx)
16439 {
16440 /* This blockage is needed so that sched doesn't decide to move
16441 the sp change before the register restores. */
16442 rs6000_emit_stack_tie ();
45b194f8
AM
16443 if (sp_offset != 0)
16444 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16445 GEN_INT (sp_offset)));
52ff33d0
NF
16446 else
16447 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16448 }
16449 else if (sp_offset != 0)
16450 emit_insn (TARGET_32BIT
16451 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16452 GEN_INT (sp_offset))
16453 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16454 GEN_INT (sp_offset)));
b6c9286a 16455
83720594
RH
16456 if (current_function_calls_eh_return)
16457 {
16458 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16459 emit_insn (TARGET_32BIT
83720594
RH
16460 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16461 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16462 }
16463
9ebbca7d
GK
16464 if (!sibcall)
16465 {
16466 rtvec p;
16467 if (! restoring_FPRs_inline)
16468 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16469 else
16470 p = rtvec_alloc (2);
b6c9286a 16471
e35b9579 16472 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16473 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16474 gen_rtx_REG (Pmode,
1de43f85 16475 LR_REGNO));
9ebbca7d
GK
16476
16477 /* If we have to restore more than two FP registers, branch to the
16478 restore function. It will return to our caller. */
16479 if (! restoring_FPRs_inline)
16480 {
16481 int i;
16482 char rname[30];
520a57c8 16483 const char *alloc_rname;
979721f8 16484
f676971a 16485 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16486 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16487 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16488 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16489 gen_rtx_SYMBOL_REF (Pmode,
16490 alloc_rname));
b6c9286a 16491
9ebbca7d
GK
16492 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16493 {
16494 rtx addr, mem;
16495 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16496 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16497 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16498
f676971a 16499 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16500 gen_rtx_SET (VOIDmode,
16501 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16502 mem);
b6c9286a
MM
16503 }
16504 }
f676971a 16505
9ebbca7d 16506 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16507 }
9878760c
RK
16508}
16509
16510/* Write function epilogue. */
16511
08c148a8 16512static void
f676971a 16513rs6000_output_function_epilogue (FILE *file,
a2369ed3 16514 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16515{
9ebbca7d 16516 if (! HAVE_epilogue)
9878760c 16517 {
9ebbca7d
GK
16518 rtx insn = get_last_insn ();
16519 /* If the last insn was a BARRIER, we don't have to write anything except
16520 the trace table. */
16521 if (GET_CODE (insn) == NOTE)
16522 insn = prev_nonnote_insn (insn);
16523 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16524 {
9ebbca7d
GK
16525 /* This is slightly ugly, but at least we don't have two
16526 copies of the epilogue-emitting code. */
16527 start_sequence ();
16528
16529 /* A NOTE_INSN_DELETED is supposed to be at the start
16530 and end of the "toplevel" insn chain. */
2e040219 16531 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16532 rs6000_emit_epilogue (FALSE);
2e040219 16533 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16534
a3c9585f 16535 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16536 {
16537 rtx insn;
16538 unsigned addr = 0;
16539 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16540 {
16541 INSN_ADDRESSES_NEW (insn, addr);
16542 addr += 4;
16543 }
16544 }
16545
9ebbca7d 16546 if (TARGET_DEBUG_STACK)
a4f6c312 16547 debug_rtx_list (get_insns (), 100);
c9d691e9 16548 final (get_insns (), file, FALSE);
9ebbca7d 16549 end_sequence ();
4697a36c 16550 }
9878760c 16551 }
b4ac57ab 16552
efdba735
SH
16553#if TARGET_MACHO
16554 macho_branch_islands ();
0e5da0be
GK
16555 /* Mach-O doesn't support labels at the end of objects, so if
16556 it looks like we might want one, insert a NOP. */
16557 {
16558 rtx insn = get_last_insn ();
16559 while (insn
16560 && NOTE_P (insn)
a38e7aa5 16561 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16562 insn = PREV_INSN (insn);
f676971a
EC
16563 if (insn
16564 && (LABEL_P (insn)
0e5da0be 16565 || (NOTE_P (insn)
a38e7aa5 16566 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16567 fputs ("\tnop\n", file);
16568 }
16569#endif
16570
9b30bae2 16571 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16572 on its format.
16573
16574 We don't output a traceback table if -finhibit-size-directive was
16575 used. The documentation for -finhibit-size-directive reads
16576 ``don't output a @code{.size} assembler directive, or anything
16577 else that would cause trouble if the function is split in the
16578 middle, and the two halves are placed at locations far apart in
16579 memory.'' The traceback table has this property, since it
16580 includes the offset from the start of the function to the
4d30c363
MM
16581 traceback table itself.
16582
16583 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16584 different traceback table. */
57ac7be9 16585 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16586 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16587 {
69c75916 16588 const char *fname = NULL;
3ac88239 16589 const char *language_string = lang_hooks.name;
6041bf2f 16590 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16591 int i;
57ac7be9 16592 int optional_tbtab;
8097c268 16593 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16594
16595 if (rs6000_traceback == traceback_full)
16596 optional_tbtab = 1;
16597 else if (rs6000_traceback == traceback_part)
16598 optional_tbtab = 0;
16599 else
16600 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16601
69c75916
AM
16602 if (optional_tbtab)
16603 {
16604 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16605 while (*fname == '.') /* V.4 encodes . in the name */
16606 fname++;
16607
16608 /* Need label immediately before tbtab, so we can compute
16609 its offset from the function start. */
16610 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16611 ASM_OUTPUT_LABEL (file, fname);
16612 }
314fc5a9
ILT
16613
16614 /* The .tbtab pseudo-op can only be used for the first eight
16615 expressions, since it can't handle the possibly variable
16616 length fields that follow. However, if you omit the optional
16617 fields, the assembler outputs zeros for all optional fields
16618 anyways, giving each variable length field is minimum length
16619 (as defined in sys/debug.h). Thus we can not use the .tbtab
16620 pseudo-op at all. */
16621
16622 /* An all-zero word flags the start of the tbtab, for debuggers
16623 that have to find it by searching forward from the entry
16624 point or from the current pc. */
19d2d16f 16625 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16626
16627 /* Tbtab format type. Use format type 0. */
19d2d16f 16628 fputs ("\t.byte 0,", file);
314fc5a9 16629
5fc921c1
DE
16630 /* Language type. Unfortunately, there does not seem to be any
16631 official way to discover the language being compiled, so we
16632 use language_string.
16633 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16634 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16635 a number, so for now use 9. */
5fc921c1 16636 if (! strcmp (language_string, "GNU C"))
314fc5a9 16637 i = 0;
6de9cd9a
DN
16638 else if (! strcmp (language_string, "GNU F77")
16639 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16640 i = 1;
8b83775b 16641 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16642 i = 2;
5fc921c1
DE
16643 else if (! strcmp (language_string, "GNU Ada"))
16644 i = 3;
56438901
AM
16645 else if (! strcmp (language_string, "GNU C++")
16646 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16647 i = 9;
9517ead8
AG
16648 else if (! strcmp (language_string, "GNU Java"))
16649 i = 13;
5fc921c1
DE
16650 else if (! strcmp (language_string, "GNU Objective-C"))
16651 i = 14;
314fc5a9 16652 else
37409796 16653 gcc_unreachable ();
314fc5a9
ILT
16654 fprintf (file, "%d,", i);
16655
16656 /* 8 single bit fields: global linkage (not set for C extern linkage,
16657 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16658 from start of procedure stored in tbtab, internal function, function
16659 has controlled storage, function has no toc, function uses fp,
16660 function logs/aborts fp operations. */
16661 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16662 fprintf (file, "%d,",
16663 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16664
16665 /* 6 bitfields: function is interrupt handler, name present in
16666 proc table, function calls alloca, on condition directives
16667 (controls stack walks, 3 bits), saves condition reg, saves
16668 link reg. */
16669 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16670 set up as a frame pointer, even when there is no alloca call. */
16671 fprintf (file, "%d,",
6041bf2f
DE
16672 ((optional_tbtab << 6)
16673 | ((optional_tbtab & frame_pointer_needed) << 5)
16674 | (info->cr_save_p << 1)
16675 | (info->lr_save_p)));
314fc5a9 16676
6041bf2f 16677 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16678 (6 bits). */
16679 fprintf (file, "%d,",
4697a36c 16680 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16681
16682 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16683 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16684
6041bf2f
DE
16685 if (optional_tbtab)
16686 {
16687 /* Compute the parameter info from the function decl argument
16688 list. */
16689 tree decl;
16690 int next_parm_info_bit = 31;
314fc5a9 16691
6041bf2f
DE
16692 for (decl = DECL_ARGUMENTS (current_function_decl);
16693 decl; decl = TREE_CHAIN (decl))
16694 {
16695 rtx parameter = DECL_INCOMING_RTL (decl);
16696 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16697
6041bf2f
DE
16698 if (GET_CODE (parameter) == REG)
16699 {
ebb109ad 16700 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16701 {
16702 int bits;
16703
16704 float_parms++;
16705
37409796
NS
16706 switch (mode)
16707 {
16708 case SFmode:
16709 bits = 0x2;
16710 break;
16711
16712 case DFmode:
7393f7f8 16713 case DDmode:
37409796 16714 case TFmode:
7393f7f8 16715 case TDmode:
37409796
NS
16716 bits = 0x3;
16717 break;
16718
16719 default:
16720 gcc_unreachable ();
16721 }
6041bf2f
DE
16722
16723 /* If only one bit will fit, don't or in this entry. */
16724 if (next_parm_info_bit > 0)
16725 parm_info |= (bits << (next_parm_info_bit - 1));
16726 next_parm_info_bit -= 2;
16727 }
16728 else
16729 {
16730 fixed_parms += ((GET_MODE_SIZE (mode)
16731 + (UNITS_PER_WORD - 1))
16732 / UNITS_PER_WORD);
16733 next_parm_info_bit -= 1;
16734 }
16735 }
16736 }
16737 }
314fc5a9
ILT
16738
16739 /* Number of fixed point parameters. */
16740 /* This is actually the number of words of fixed point parameters; thus
16741 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16742 fprintf (file, "%d,", fixed_parms);
16743
16744 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16745 all on stack. */
16746 /* This is actually the number of fp registers that hold parameters;
16747 and thus the maximum value is 13. */
16748 /* Set parameters on stack bit if parameters are not in their original
16749 registers, regardless of whether they are on the stack? Xlc
16750 seems to set the bit when not optimizing. */
16751 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16752
6041bf2f
DE
16753 if (! optional_tbtab)
16754 return;
16755
314fc5a9
ILT
16756 /* Optional fields follow. Some are variable length. */
16757
16758 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16759 11 double float. */
16760 /* There is an entry for each parameter in a register, in the order that
16761 they occur in the parameter list. Any intervening arguments on the
16762 stack are ignored. If the list overflows a long (max possible length
16763 34 bits) then completely leave off all elements that don't fit. */
16764 /* Only emit this long if there was at least one parameter. */
16765 if (fixed_parms || float_parms)
16766 fprintf (file, "\t.long %d\n", parm_info);
16767
16768 /* Offset from start of code to tb table. */
19d2d16f 16769 fputs ("\t.long ", file);
314fc5a9 16770 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16771 if (TARGET_AIX)
16772 RS6000_OUTPUT_BASENAME (file, fname);
16773 else
16774 assemble_name (file, fname);
16775 putc ('-', file);
16776 rs6000_output_function_entry (file, fname);
19d2d16f 16777 putc ('\n', file);
314fc5a9
ILT
16778
16779 /* Interrupt handler mask. */
16780 /* Omit this long, since we never set the interrupt handler bit
16781 above. */
16782
16783 /* Number of CTL (controlled storage) anchors. */
16784 /* Omit this long, since the has_ctl bit is never set above. */
16785
16786 /* Displacement into stack of each CTL anchor. */
16787 /* Omit this list of longs, because there are no CTL anchors. */
16788
16789 /* Length of function name. */
69c75916
AM
16790 if (*fname == '*')
16791 ++fname;
296b8152 16792 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16793
16794 /* Function name. */
16795 assemble_string (fname, strlen (fname));
16796
16797 /* Register for alloca automatic storage; this is always reg 31.
16798 Only emit this if the alloca bit was set above. */
16799 if (frame_pointer_needed)
19d2d16f 16800 fputs ("\t.byte 31\n", file);
b1765bde
DE
16801
16802 fputs ("\t.align 2\n", file);
9b30bae2 16803 }
9878760c 16804}
17167fd8 16805\f
a4f6c312
SS
16806/* A C compound statement that outputs the assembler code for a thunk
16807 function, used to implement C++ virtual function calls with
16808 multiple inheritance. The thunk acts as a wrapper around a virtual
16809 function, adjusting the implicit object parameter before handing
16810 control off to the real function.
16811
16812 First, emit code to add the integer DELTA to the location that
16813 contains the incoming first argument. Assume that this argument
16814 contains a pointer, and is the one used to pass the `this' pointer
16815 in C++. This is the incoming argument *before* the function
16816 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16817 values of all other incoming arguments.
17167fd8
MM
16818
16819 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16820 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16821 not touch the return address. Hence returning from FUNCTION will
16822 return to whoever called the current `thunk'.
17167fd8 16823
a4f6c312
SS
16824 The effect must be as if FUNCTION had been called directly with the
16825 adjusted first argument. This macro is responsible for emitting
16826 all of the code for a thunk function; output_function_prologue()
16827 and output_function_epilogue() are not invoked.
17167fd8 16828
a4f6c312
SS
16829 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16830 been extracted from it.) It might possibly be useful on some
16831 targets, but probably not.
17167fd8 16832
a4f6c312
SS
16833 If you do not define this macro, the target-independent code in the
16834 C++ frontend will generate a less efficient heavyweight thunk that
16835 calls FUNCTION instead of jumping to it. The generic approach does
16836 not support varargs. */
17167fd8 16837
3961e8fe 16838static void
f676971a
EC
16839rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16840 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16841 tree function)
17167fd8 16842{
5b71a4e7 16843 rtx this, insn, funexp;
17167fd8 16844
5b71a4e7 16845 reload_completed = 1;
fe3ad572 16846 epilogue_completed = 1;
56a7189a 16847
5b71a4e7 16848 /* Mark the end of the (empty) prologue. */
2e040219 16849 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16850
5b71a4e7
DE
16851 /* Find the "this" pointer. If the function returns a structure,
16852 the structure return pointer is in r3. */
61f71b34 16853 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16854 this = gen_rtx_REG (Pmode, 4);
56a7189a 16855 else
5b71a4e7 16856 this = gen_rtx_REG (Pmode, 3);
17167fd8 16857
5b71a4e7
DE
16858 /* Apply the constant offset, if required. */
16859 if (delta)
16860 {
16861 rtx delta_rtx = GEN_INT (delta);
16862 emit_insn (TARGET_32BIT
16863 ? gen_addsi3 (this, this, delta_rtx)
16864 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16865 }
16866
5b71a4e7
DE
16867 /* Apply the offset from the vtable, if required. */
16868 if (vcall_offset)
17167fd8 16869 {
5b71a4e7
DE
16870 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16871 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16872
5b71a4e7 16873 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16874 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16875 {
16876 emit_insn (TARGET_32BIT
16877 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16878 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16879 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16880 }
16881 else
16882 {
16883 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16884
16885 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16886 }
5b71a4e7
DE
16887 emit_insn (TARGET_32BIT
16888 ? gen_addsi3 (this, this, tmp)
16889 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16890 }
16891
5b71a4e7
DE
16892 /* Generate a tail call to the target function. */
16893 if (!TREE_USED (function))
16894 {
16895 assemble_external (function);
16896 TREE_USED (function) = 1;
16897 }
16898 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16899 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16900
16901#if TARGET_MACHO
ab82a49f 16902 if (MACHOPIC_INDIRECT)
5b71a4e7 16903 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16904#endif
5b71a4e7
DE
16905
16906 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16907 generate sibcall RTL explicitly. */
5b71a4e7
DE
16908 insn = emit_call_insn (
16909 gen_rtx_PARALLEL (VOIDmode,
16910 gen_rtvec (4,
16911 gen_rtx_CALL (VOIDmode,
16912 funexp, const0_rtx),
16913 gen_rtx_USE (VOIDmode, const0_rtx),
16914 gen_rtx_USE (VOIDmode,
16915 gen_rtx_REG (SImode,
1de43f85 16916 LR_REGNO)),
5b71a4e7
DE
16917 gen_rtx_RETURN (VOIDmode))));
16918 SIBLING_CALL_P (insn) = 1;
16919 emit_barrier ();
16920
16921 /* Run just enough of rest_of_compilation to get the insns emitted.
16922 There's not really enough bulk here to make other passes such as
16923 instruction scheduling worth while. Note that use_thunk calls
16924 assemble_start_function and assemble_end_function. */
16925 insn = get_insns ();
55e092c4 16926 insn_locators_alloc ();
5b71a4e7
DE
16927 shorten_branches (insn);
16928 final_start_function (insn, file, 1);
c9d691e9 16929 final (insn, file, 1);
5b71a4e7
DE
16930 final_end_function ();
16931
16932 reload_completed = 0;
fe3ad572 16933 epilogue_completed = 0;
9ebbca7d 16934}
9ebbca7d
GK
16935\f
16936/* A quick summary of the various types of 'constant-pool tables'
16937 under PowerPC:
16938
f676971a 16939 Target Flags Name One table per
9ebbca7d
GK
16940 AIX (none) AIX TOC object file
16941 AIX -mfull-toc AIX TOC object file
16942 AIX -mminimal-toc AIX minimal TOC translation unit
16943 SVR4/EABI (none) SVR4 SDATA object file
16944 SVR4/EABI -fpic SVR4 pic object file
16945 SVR4/EABI -fPIC SVR4 PIC translation unit
16946 SVR4/EABI -mrelocatable EABI TOC function
16947 SVR4/EABI -maix AIX TOC object file
f676971a 16948 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16949 AIX minimal TOC translation unit
16950
16951 Name Reg. Set by entries contains:
16952 made by addrs? fp? sum?
16953
16954 AIX TOC 2 crt0 as Y option option
16955 AIX minimal TOC 30 prolog gcc Y Y option
16956 SVR4 SDATA 13 crt0 gcc N Y N
16957 SVR4 pic 30 prolog ld Y not yet N
16958 SVR4 PIC 30 prolog gcc Y option option
16959 EABI TOC 30 prolog gcc Y option option
16960
16961*/
16962
9ebbca7d
GK
16963/* Hash functions for the hash table. */
16964
16965static unsigned
a2369ed3 16966rs6000_hash_constant (rtx k)
9ebbca7d 16967{
46b33600
RH
16968 enum rtx_code code = GET_CODE (k);
16969 enum machine_mode mode = GET_MODE (k);
16970 unsigned result = (code << 3) ^ mode;
16971 const char *format;
16972 int flen, fidx;
f676971a 16973
46b33600
RH
16974 format = GET_RTX_FORMAT (code);
16975 flen = strlen (format);
16976 fidx = 0;
9ebbca7d 16977
46b33600
RH
16978 switch (code)
16979 {
16980 case LABEL_REF:
16981 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
16982
16983 case CONST_DOUBLE:
16984 if (mode != VOIDmode)
16985 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
16986 flen = 2;
16987 break;
16988
16989 case CODE_LABEL:
16990 fidx = 3;
16991 break;
16992
16993 default:
16994 break;
16995 }
9ebbca7d
GK
16996
16997 for (; fidx < flen; fidx++)
16998 switch (format[fidx])
16999 {
17000 case 's':
17001 {
17002 unsigned i, len;
17003 const char *str = XSTR (k, fidx);
17004 len = strlen (str);
17005 result = result * 613 + len;
17006 for (i = 0; i < len; i++)
17007 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17008 break;
17009 }
9ebbca7d
GK
17010 case 'u':
17011 case 'e':
17012 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17013 break;
17014 case 'i':
17015 case 'n':
17016 result = result * 613 + (unsigned) XINT (k, fidx);
17017 break;
17018 case 'w':
17019 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17020 result = result * 613 + (unsigned) XWINT (k, fidx);
17021 else
17022 {
17023 size_t i;
9390387d 17024 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17025 result = result * 613 + (unsigned) (XWINT (k, fidx)
17026 >> CHAR_BIT * i);
17027 }
17028 break;
09501938
DE
17029 case '0':
17030 break;
9ebbca7d 17031 default:
37409796 17032 gcc_unreachable ();
9ebbca7d 17033 }
46b33600 17034
9ebbca7d
GK
17035 return result;
17036}
17037
17038static unsigned
a2369ed3 17039toc_hash_function (const void *hash_entry)
9ebbca7d 17040{
f676971a 17041 const struct toc_hash_struct *thc =
a9098fd0
GK
17042 (const struct toc_hash_struct *) hash_entry;
17043 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17044}
17045
17046/* Compare H1 and H2 for equivalence. */
17047
17048static int
a2369ed3 17049toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17050{
17051 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17052 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17053
a9098fd0
GK
17054 if (((const struct toc_hash_struct *) h1)->key_mode
17055 != ((const struct toc_hash_struct *) h2)->key_mode)
17056 return 0;
17057
5692c7bc 17058 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17059}
17060
28e510bd
MM
17061/* These are the names given by the C++ front-end to vtables, and
17062 vtable-like objects. Ideally, this logic should not be here;
17063 instead, there should be some programmatic way of inquiring as
17064 to whether or not an object is a vtable. */
17065
17066#define VTABLE_NAME_P(NAME) \
9390387d 17067 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17068 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17069 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17070 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17071 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17072
17073void
a2369ed3 17074rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17075{
17076 /* Currently C++ toc references to vtables can be emitted before it
17077 is decided whether the vtable is public or private. If this is
17078 the case, then the linker will eventually complain that there is
f676971a 17079 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17080 we emit the TOC reference to reference the symbol and not the
17081 section. */
17082 const char *name = XSTR (x, 0);
54ee9799 17083
f676971a 17084 if (VTABLE_NAME_P (name))
54ee9799
DE
17085 {
17086 RS6000_OUTPUT_BASENAME (file, name);
17087 }
17088 else
17089 assemble_name (file, name);
28e510bd
MM
17090}
17091
a4f6c312
SS
17092/* Output a TOC entry. We derive the entry name from what is being
17093 written. */
9878760c
RK
17094
17095void
a2369ed3 17096output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17097{
17098 char buf[256];
3cce094d 17099 const char *name = buf;
ec940faa 17100 const char *real_name;
9878760c 17101 rtx base = x;
16fdeb48 17102 HOST_WIDE_INT offset = 0;
9878760c 17103
37409796 17104 gcc_assert (!TARGET_NO_TOC);
4697a36c 17105
9ebbca7d
GK
17106 /* When the linker won't eliminate them, don't output duplicate
17107 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17108 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17109 CODE_LABELs. */
17110 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17111 {
17112 struct toc_hash_struct *h;
17113 void * * found;
f676971a 17114
17211ab5 17115 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17116 time because GGC is not initialized at that point. */
17211ab5 17117 if (toc_hash_table == NULL)
f676971a 17118 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17119 toc_hash_eq, NULL);
17120
9ebbca7d
GK
17121 h = ggc_alloc (sizeof (*h));
17122 h->key = x;
a9098fd0 17123 h->key_mode = mode;
9ebbca7d 17124 h->labelno = labelno;
f676971a 17125
9ebbca7d
GK
17126 found = htab_find_slot (toc_hash_table, h, 1);
17127 if (*found == NULL)
17128 *found = h;
f676971a 17129 else /* This is indeed a duplicate.
9ebbca7d
GK
17130 Set this label equal to that label. */
17131 {
17132 fputs ("\t.set ", file);
17133 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17134 fprintf (file, "%d,", labelno);
17135 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17136 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17137 found)->labelno));
17138 return;
17139 }
17140 }
17141
17142 /* If we're going to put a double constant in the TOC, make sure it's
17143 aligned properly when strict alignment is on. */
ff1720ed
RK
17144 if (GET_CODE (x) == CONST_DOUBLE
17145 && STRICT_ALIGNMENT
a9098fd0 17146 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17147 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17148 ASM_OUTPUT_ALIGN (file, 3);
17149 }
17150
4977bab6 17151 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17152
37c37a57
RK
17153 /* Handle FP constants specially. Note that if we have a minimal
17154 TOC, things we put here aren't actually in the TOC, so we can allow
17155 FP constants. */
00b79d54
BE
17156 if (GET_CODE (x) == CONST_DOUBLE &&
17157 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17158 {
17159 REAL_VALUE_TYPE rv;
17160 long k[4];
17161
17162 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17163 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17164 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17165 else
17166 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17167
17168 if (TARGET_64BIT)
17169 {
17170 if (TARGET_MINIMAL_TOC)
17171 fputs (DOUBLE_INT_ASM_OP, file);
17172 else
17173 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17174 k[0] & 0xffffffff, k[1] & 0xffffffff,
17175 k[2] & 0xffffffff, k[3] & 0xffffffff);
17176 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17177 k[0] & 0xffffffff, k[1] & 0xffffffff,
17178 k[2] & 0xffffffff, k[3] & 0xffffffff);
17179 return;
17180 }
17181 else
17182 {
17183 if (TARGET_MINIMAL_TOC)
17184 fputs ("\t.long ", file);
17185 else
17186 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17187 k[0] & 0xffffffff, k[1] & 0xffffffff,
17188 k[2] & 0xffffffff, k[3] & 0xffffffff);
17189 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17190 k[0] & 0xffffffff, k[1] & 0xffffffff,
17191 k[2] & 0xffffffff, k[3] & 0xffffffff);
17192 return;
17193 }
17194 }
00b79d54
BE
17195 else if (GET_CODE (x) == CONST_DOUBLE &&
17196 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17197 {
042259f2
DE
17198 REAL_VALUE_TYPE rv;
17199 long k[2];
0adc764e 17200
042259f2 17201 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17202
17203 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17204 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17205 else
17206 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17207
13ded975
DE
17208 if (TARGET_64BIT)
17209 {
17210 if (TARGET_MINIMAL_TOC)
2bfcf297 17211 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17212 else
2f0552b6
AM
17213 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17214 k[0] & 0xffffffff, k[1] & 0xffffffff);
17215 fprintf (file, "0x%lx%08lx\n",
17216 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17217 return;
17218 }
1875cc88 17219 else
13ded975
DE
17220 {
17221 if (TARGET_MINIMAL_TOC)
2bfcf297 17222 fputs ("\t.long ", file);
13ded975 17223 else
2f0552b6
AM
17224 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17225 k[0] & 0xffffffff, k[1] & 0xffffffff);
17226 fprintf (file, "0x%lx,0x%lx\n",
17227 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17228 return;
17229 }
9878760c 17230 }
00b79d54
BE
17231 else if (GET_CODE (x) == CONST_DOUBLE &&
17232 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17233 {
042259f2
DE
17234 REAL_VALUE_TYPE rv;
17235 long l;
9878760c 17236
042259f2 17237 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17238 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17239 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17240 else
17241 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17242
31bfaa0b
DE
17243 if (TARGET_64BIT)
17244 {
17245 if (TARGET_MINIMAL_TOC)
2bfcf297 17246 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17247 else
2f0552b6
AM
17248 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17249 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17250 return;
17251 }
042259f2 17252 else
31bfaa0b
DE
17253 {
17254 if (TARGET_MINIMAL_TOC)
2bfcf297 17255 fputs ("\t.long ", file);
31bfaa0b 17256 else
2f0552b6
AM
17257 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17258 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17259 return;
17260 }
042259f2 17261 }
f176e826 17262 else if (GET_MODE (x) == VOIDmode
a9098fd0 17263 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17264 {
e2c953b6 17265 unsigned HOST_WIDE_INT low;
042259f2
DE
17266 HOST_WIDE_INT high;
17267
17268 if (GET_CODE (x) == CONST_DOUBLE)
17269 {
17270 low = CONST_DOUBLE_LOW (x);
17271 high = CONST_DOUBLE_HIGH (x);
17272 }
17273 else
17274#if HOST_BITS_PER_WIDE_INT == 32
17275 {
17276 low = INTVAL (x);
0858c623 17277 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17278 }
17279#else
17280 {
c4ad648e
AM
17281 low = INTVAL (x) & 0xffffffff;
17282 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17283 }
17284#endif
9878760c 17285
a9098fd0
GK
17286 /* TOC entries are always Pmode-sized, but since this
17287 is a bigendian machine then if we're putting smaller
17288 integer constants in the TOC we have to pad them.
17289 (This is still a win over putting the constants in
17290 a separate constant pool, because then we'd have
02a4ec28
FS
17291 to have both a TOC entry _and_ the actual constant.)
17292
17293 For a 32-bit target, CONST_INT values are loaded and shifted
17294 entirely within `low' and can be stored in one TOC entry. */
17295
37409796
NS
17296 /* It would be easy to make this work, but it doesn't now. */
17297 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17298
17299 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17300 {
17301#if HOST_BITS_PER_WIDE_INT == 32
17302 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17303 POINTER_SIZE, &low, &high, 0);
17304#else
17305 low |= high << 32;
17306 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17307 high = (HOST_WIDE_INT) low >> 32;
17308 low &= 0xffffffff;
17309#endif
17310 }
a9098fd0 17311
13ded975
DE
17312 if (TARGET_64BIT)
17313 {
17314 if (TARGET_MINIMAL_TOC)
2bfcf297 17315 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17316 else
2f0552b6
AM
17317 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17318 (long) high & 0xffffffff, (long) low & 0xffffffff);
17319 fprintf (file, "0x%lx%08lx\n",
17320 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17321 return;
17322 }
1875cc88 17323 else
13ded975 17324 {
02a4ec28
FS
17325 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17326 {
17327 if (TARGET_MINIMAL_TOC)
2bfcf297 17328 fputs ("\t.long ", file);
02a4ec28 17329 else
2bfcf297 17330 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17331 (long) high & 0xffffffff, (long) low & 0xffffffff);
17332 fprintf (file, "0x%lx,0x%lx\n",
17333 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17334 }
13ded975 17335 else
02a4ec28
FS
17336 {
17337 if (TARGET_MINIMAL_TOC)
2bfcf297 17338 fputs ("\t.long ", file);
02a4ec28 17339 else
2f0552b6
AM
17340 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17341 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17342 }
13ded975
DE
17343 return;
17344 }
9878760c
RK
17345 }
17346
17347 if (GET_CODE (x) == CONST)
17348 {
37409796 17349 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17350
9878760c
RK
17351 base = XEXP (XEXP (x, 0), 0);
17352 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17353 }
f676971a 17354
37409796
NS
17355 switch (GET_CODE (base))
17356 {
17357 case SYMBOL_REF:
17358 name = XSTR (base, 0);
17359 break;
17360
17361 case LABEL_REF:
17362 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17363 CODE_LABEL_NUMBER (XEXP (base, 0)));
17364 break;
17365
17366 case CODE_LABEL:
17367 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17368 break;
17369
17370 default:
17371 gcc_unreachable ();
17372 }
9878760c 17373
772c5265 17374 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17375 if (TARGET_MINIMAL_TOC)
2bfcf297 17376 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17377 else
17378 {
b6c9286a 17379 fprintf (file, "\t.tc %s", real_name);
9878760c 17380
1875cc88 17381 if (offset < 0)
16fdeb48 17382 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17383 else if (offset)
16fdeb48 17384 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17385
19d2d16f 17386 fputs ("[TC],", file);
1875cc88 17387 }
581bc4de
MM
17388
17389 /* Currently C++ toc references to vtables can be emitted before it
17390 is decided whether the vtable is public or private. If this is
17391 the case, then the linker will eventually complain that there is
17392 a TOC reference to an unknown section. Thus, for vtables only,
17393 we emit the TOC reference to reference the symbol and not the
17394 section. */
28e510bd 17395 if (VTABLE_NAME_P (name))
581bc4de 17396 {
54ee9799 17397 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17398 if (offset < 0)
16fdeb48 17399 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17400 else if (offset > 0)
16fdeb48 17401 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17402 }
17403 else
17404 output_addr_const (file, x);
19d2d16f 17405 putc ('\n', file);
9878760c
RK
17406}
17407\f
17408/* Output an assembler pseudo-op to write an ASCII string of N characters
17409 starting at P to FILE.
17410
17411 On the RS/6000, we have to do this using the .byte operation and
17412 write out special characters outside the quoted string.
17413 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17414 so we must artificially break them up early. */
9878760c
RK
17415
17416void
a2369ed3 17417output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17418{
17419 char c;
17420 int i, count_string;
d330fd93
KG
17421 const char *for_string = "\t.byte \"";
17422 const char *for_decimal = "\t.byte ";
17423 const char *to_close = NULL;
9878760c
RK
17424
17425 count_string = 0;
17426 for (i = 0; i < n; i++)
17427 {
17428 c = *p++;
17429 if (c >= ' ' && c < 0177)
17430 {
17431 if (for_string)
17432 fputs (for_string, file);
17433 putc (c, file);
17434
17435 /* Write two quotes to get one. */
17436 if (c == '"')
17437 {
17438 putc (c, file);
17439 ++count_string;
17440 }
17441
17442 for_string = NULL;
17443 for_decimal = "\"\n\t.byte ";
17444 to_close = "\"\n";
17445 ++count_string;
17446
17447 if (count_string >= 512)
17448 {
17449 fputs (to_close, file);
17450
17451 for_string = "\t.byte \"";
17452 for_decimal = "\t.byte ";
17453 to_close = NULL;
17454 count_string = 0;
17455 }
17456 }
17457 else
17458 {
17459 if (for_decimal)
17460 fputs (for_decimal, file);
17461 fprintf (file, "%d", c);
17462
17463 for_string = "\n\t.byte \"";
17464 for_decimal = ", ";
17465 to_close = "\n";
17466 count_string = 0;
17467 }
17468 }
17469
17470 /* Now close the string if we have written one. Then end the line. */
17471 if (to_close)
9ebbca7d 17472 fputs (to_close, file);
9878760c
RK
17473}
17474\f
17475/* Generate a unique section name for FILENAME for a section type
17476 represented by SECTION_DESC. Output goes into BUF.
17477
17478 SECTION_DESC can be any string, as long as it is different for each
17479 possible section type.
17480
17481 We name the section in the same manner as xlc. The name begins with an
17482 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17483 names) with the last period replaced by the string SECTION_DESC. If
17484 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17485 the name. */
9878760c
RK
17486
17487void
f676971a 17488rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17489 const char *section_desc)
9878760c 17490{
9ebbca7d 17491 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17492 char *p;
17493 int len;
9878760c
RK
17494
17495 after_last_slash = filename;
17496 for (q = filename; *q; q++)
11e5fe42
RK
17497 {
17498 if (*q == '/')
17499 after_last_slash = q + 1;
17500 else if (*q == '.')
17501 last_period = q;
17502 }
9878760c 17503
11e5fe42 17504 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17505 *buf = (char *) xmalloc (len);
9878760c
RK
17506
17507 p = *buf;
17508 *p++ = '_';
17509
17510 for (q = after_last_slash; *q; q++)
17511 {
11e5fe42 17512 if (q == last_period)
c4ad648e 17513 {
9878760c
RK
17514 strcpy (p, section_desc);
17515 p += strlen (section_desc);
e3981aab 17516 break;
c4ad648e 17517 }
9878760c 17518
e9a780ec 17519 else if (ISALNUM (*q))
c4ad648e 17520 *p++ = *q;
9878760c
RK
17521 }
17522
11e5fe42 17523 if (last_period == 0)
9878760c
RK
17524 strcpy (p, section_desc);
17525 else
17526 *p = '\0';
17527}
e165f3f0 17528\f
a4f6c312 17529/* Emit profile function. */
411707f4 17530
411707f4 17531void
a2369ed3 17532output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17533{
858081ad
AH
17534 /* Non-standard profiling for kernels, which just saves LR then calls
17535 _mcount without worrying about arg saves. The idea is to change
17536 the function prologue as little as possible as it isn't easy to
17537 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17538 if (TARGET_PROFILE_KERNEL)
17539 return;
17540
8480e480
CC
17541 if (DEFAULT_ABI == ABI_AIX)
17542 {
9739c90c
JJ
17543#ifndef NO_PROFILE_COUNTERS
17544# define NO_PROFILE_COUNTERS 0
17545#endif
f676971a 17546 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17547 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17548 else
17549 {
17550 char buf[30];
17551 const char *label_name;
17552 rtx fun;
411707f4 17553
9739c90c
JJ
17554 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17555 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17556 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17557
9739c90c
JJ
17558 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17559 fun, Pmode);
17560 }
8480e480 17561 }
ee890fe2
SS
17562 else if (DEFAULT_ABI == ABI_DARWIN)
17563 {
d5fa86ba 17564 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17565 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17566
17567 /* Be conservative and always set this, at least for now. */
17568 current_function_uses_pic_offset_table = 1;
17569
17570#if TARGET_MACHO
17571 /* For PIC code, set up a stub and collect the caller's address
17572 from r0, which is where the prologue puts it. */
11abc112
MM
17573 if (MACHOPIC_INDIRECT
17574 && current_function_uses_pic_offset_table)
17575 caller_addr_regno = 0;
ee890fe2
SS
17576#endif
17577 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17578 0, VOIDmode, 1,
17579 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17580 }
411707f4
CC
17581}
17582
a4f6c312 17583/* Write function profiler code. */
e165f3f0
RK
17584
17585void
a2369ed3 17586output_function_profiler (FILE *file, int labelno)
e165f3f0 17587{
3daf36a4 17588 char buf[100];
e165f3f0 17589
38c1f2d7 17590 switch (DEFAULT_ABI)
3daf36a4 17591 {
38c1f2d7 17592 default:
37409796 17593 gcc_unreachable ();
38c1f2d7
MM
17594
17595 case ABI_V4:
09eeeacb
AM
17596 if (!TARGET_32BIT)
17597 {
d4ee4d25 17598 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17599 return;
17600 }
ffcfcb5f 17601 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17602 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17603 if (NO_PROFILE_COUNTERS)
17604 {
17605 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17606 reg_names[0], reg_names[1]);
17607 }
17608 else if (TARGET_SECURE_PLT && flag_pic)
17609 {
17610 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17611 reg_names[0], reg_names[1]);
17612 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17613 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17614 reg_names[12], reg_names[12]);
17615 assemble_name (file, buf);
17616 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17617 assemble_name (file, buf);
17618 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17619 }
17620 else if (flag_pic == 1)
38c1f2d7 17621 {
dfdfa60f 17622 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17623 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17624 reg_names[0], reg_names[1]);
17167fd8 17625 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17626 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17627 assemble_name (file, buf);
17167fd8 17628 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17629 }
9ebbca7d 17630 else if (flag_pic > 1)
38c1f2d7 17631 {
71625f3d
AM
17632 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17633 reg_names[0], reg_names[1]);
9ebbca7d 17634 /* Now, we need to get the address of the label. */
71625f3d 17635 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17636 assemble_name (file, buf);
9ebbca7d
GK
17637 fputs ("-.\n1:", file);
17638 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17639 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17640 reg_names[0], reg_names[11]);
17641 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17642 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17643 }
38c1f2d7
MM
17644 else
17645 {
17167fd8 17646 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17647 assemble_name (file, buf);
dfdfa60f 17648 fputs ("@ha\n", file);
71625f3d
AM
17649 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17650 reg_names[0], reg_names[1]);
a260abc9 17651 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17652 assemble_name (file, buf);
17167fd8 17653 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17654 }
17655
50d440bc 17656 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17657 fprintf (file, "\tbl %s%s\n",
17658 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17659 break;
17660
17661 case ABI_AIX:
ee890fe2 17662 case ABI_DARWIN:
ffcfcb5f
AM
17663 if (!TARGET_PROFILE_KERNEL)
17664 {
a3c9585f 17665 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17666 }
17667 else
17668 {
37409796 17669 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17670
17671 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17672 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17673
6de9cd9a 17674 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17675 {
17676 asm_fprintf (file, "\tstd %s,24(%s)\n",
17677 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17678 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17679 asm_fprintf (file, "\tld %s,24(%s)\n",
17680 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17681 }
17682 else
17683 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17684 }
38c1f2d7
MM
17685 break;
17686 }
e165f3f0 17687}
a251ffd0 17688
b54cf83a 17689\f
44cd321e
PS
17690
17691/* The following variable value is the last issued insn. */
17692
17693static rtx last_scheduled_insn;
17694
17695/* The following variable helps to balance issuing of load and
17696 store instructions */
17697
17698static int load_store_pendulum;
17699
b54cf83a
DE
17700/* Power4 load update and store update instructions are cracked into a
17701 load or store and an integer insn which are executed in the same cycle.
17702 Branches have their own dispatch slot which does not count against the
17703 GCC issue rate, but it changes the program flow so there are no other
17704 instructions to issue in this cycle. */
17705
17706static int
f676971a
EC
17707rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17708 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17709 rtx insn, int more)
b54cf83a 17710{
44cd321e 17711 last_scheduled_insn = insn;
b54cf83a
DE
17712 if (GET_CODE (PATTERN (insn)) == USE
17713 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17714 {
17715 cached_can_issue_more = more;
17716 return cached_can_issue_more;
17717 }
17718
17719 if (insn_terminates_group_p (insn, current_group))
17720 {
17721 cached_can_issue_more = 0;
17722 return cached_can_issue_more;
17723 }
b54cf83a 17724
d296e02e
AP
17725 /* If no reservation, but reach here */
17726 if (recog_memoized (insn) < 0)
17727 return more;
17728
ec507f2d 17729 if (rs6000_sched_groups)
b54cf83a 17730 {
cbe26ab8 17731 if (is_microcoded_insn (insn))
44cd321e 17732 cached_can_issue_more = 0;
cbe26ab8 17733 else if (is_cracked_insn (insn))
44cd321e
PS
17734 cached_can_issue_more = more > 2 ? more - 2 : 0;
17735 else
17736 cached_can_issue_more = more - 1;
17737
17738 return cached_can_issue_more;
b54cf83a 17739 }
165b263e 17740
d296e02e
AP
17741 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17742 return 0;
17743
44cd321e
PS
17744 cached_can_issue_more = more - 1;
17745 return cached_can_issue_more;
b54cf83a
DE
17746}
17747
a251ffd0
TG
17748/* Adjust the cost of a scheduling dependency. Return the new cost of
17749 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17750
c237e94a 17751static int
0a4f0294 17752rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17753{
44cd321e 17754 enum attr_type attr_type;
a251ffd0 17755
44cd321e 17756 if (! recog_memoized (insn))
a251ffd0
TG
17757 return 0;
17758
44cd321e 17759 switch (REG_NOTE_KIND (link))
a251ffd0 17760 {
44cd321e
PS
17761 case REG_DEP_TRUE:
17762 {
17763 /* Data dependency; DEP_INSN writes a register that INSN reads
17764 some cycles later. */
17765
17766 /* Separate a load from a narrower, dependent store. */
17767 if (rs6000_sched_groups
17768 && GET_CODE (PATTERN (insn)) == SET
17769 && GET_CODE (PATTERN (dep_insn)) == SET
17770 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17771 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17772 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17773 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17774 return cost + 14;
17775
17776 attr_type = get_attr_type (insn);
17777
17778 switch (attr_type)
17779 {
17780 case TYPE_JMPREG:
17781 /* Tell the first scheduling pass about the latency between
17782 a mtctr and bctr (and mtlr and br/blr). The first
17783 scheduling pass will not know about this latency since
17784 the mtctr instruction, which has the latency associated
17785 to it, will be generated by reload. */
17786 return TARGET_POWER ? 5 : 4;
17787 case TYPE_BRANCH:
17788 /* Leave some extra cycles between a compare and its
17789 dependent branch, to inhibit expensive mispredicts. */
17790 if ((rs6000_cpu_attr == CPU_PPC603
17791 || rs6000_cpu_attr == CPU_PPC604
17792 || rs6000_cpu_attr == CPU_PPC604E
17793 || rs6000_cpu_attr == CPU_PPC620
17794 || rs6000_cpu_attr == CPU_PPC630
17795 || rs6000_cpu_attr == CPU_PPC750
17796 || rs6000_cpu_attr == CPU_PPC7400
17797 || rs6000_cpu_attr == CPU_PPC7450
17798 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17799 || rs6000_cpu_attr == CPU_POWER5
17800 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17801 && recog_memoized (dep_insn)
17802 && (INSN_CODE (dep_insn) >= 0))
982afe02 17803
44cd321e
PS
17804 switch (get_attr_type (dep_insn))
17805 {
17806 case TYPE_CMP:
17807 case TYPE_COMPARE:
17808 case TYPE_DELAYED_COMPARE:
17809 case TYPE_IMUL_COMPARE:
17810 case TYPE_LMUL_COMPARE:
17811 case TYPE_FPCOMPARE:
17812 case TYPE_CR_LOGICAL:
17813 case TYPE_DELAYED_CR:
17814 return cost + 2;
17815 default:
17816 break;
17817 }
17818 break;
17819
17820 case TYPE_STORE:
17821 case TYPE_STORE_U:
17822 case TYPE_STORE_UX:
17823 case TYPE_FPSTORE:
17824 case TYPE_FPSTORE_U:
17825 case TYPE_FPSTORE_UX:
17826 if ((rs6000_cpu == PROCESSOR_POWER6)
17827 && recog_memoized (dep_insn)
17828 && (INSN_CODE (dep_insn) >= 0))
17829 {
17830
17831 if (GET_CODE (PATTERN (insn)) != SET)
17832 /* If this happens, we have to extend this to schedule
17833 optimally. Return default for now. */
17834 return cost;
17835
17836 /* Adjust the cost for the case where the value written
17837 by a fixed point operation is used as the address
17838 gen value on a store. */
17839 switch (get_attr_type (dep_insn))
17840 {
17841 case TYPE_LOAD:
17842 case TYPE_LOAD_U:
17843 case TYPE_LOAD_UX:
17844 case TYPE_CNTLZ:
17845 {
17846 if (! store_data_bypass_p (dep_insn, insn))
17847 return 4;
17848 break;
17849 }
17850 case TYPE_LOAD_EXT:
17851 case TYPE_LOAD_EXT_U:
17852 case TYPE_LOAD_EXT_UX:
17853 case TYPE_VAR_SHIFT_ROTATE:
17854 case TYPE_VAR_DELAYED_COMPARE:
17855 {
17856 if (! store_data_bypass_p (dep_insn, insn))
17857 return 6;
17858 break;
17859 }
17860 case TYPE_INTEGER:
17861 case TYPE_COMPARE:
17862 case TYPE_FAST_COMPARE:
17863 case TYPE_EXTS:
17864 case TYPE_SHIFT:
17865 case TYPE_INSERT_WORD:
17866 case TYPE_INSERT_DWORD:
17867 case TYPE_FPLOAD_U:
17868 case TYPE_FPLOAD_UX:
17869 case TYPE_STORE_U:
17870 case TYPE_STORE_UX:
17871 case TYPE_FPSTORE_U:
17872 case TYPE_FPSTORE_UX:
17873 {
17874 if (! store_data_bypass_p (dep_insn, insn))
17875 return 3;
17876 break;
17877 }
17878 case TYPE_IMUL:
17879 case TYPE_IMUL2:
17880 case TYPE_IMUL3:
17881 case TYPE_LMUL:
17882 case TYPE_IMUL_COMPARE:
17883 case TYPE_LMUL_COMPARE:
17884 {
17885 if (! store_data_bypass_p (dep_insn, insn))
17886 return 17;
17887 break;
17888 }
17889 case TYPE_IDIV:
17890 {
17891 if (! store_data_bypass_p (dep_insn, insn))
17892 return 45;
17893 break;
17894 }
17895 case TYPE_LDIV:
17896 {
17897 if (! store_data_bypass_p (dep_insn, insn))
17898 return 57;
17899 break;
17900 }
17901 default:
17902 break;
17903 }
17904 }
17905 break;
17906
17907 case TYPE_LOAD:
17908 case TYPE_LOAD_U:
17909 case TYPE_LOAD_UX:
17910 case TYPE_LOAD_EXT:
17911 case TYPE_LOAD_EXT_U:
17912 case TYPE_LOAD_EXT_UX:
17913 if ((rs6000_cpu == PROCESSOR_POWER6)
17914 && recog_memoized (dep_insn)
17915 && (INSN_CODE (dep_insn) >= 0))
17916 {
17917
17918 /* Adjust the cost for the case where the value written
17919 by a fixed point instruction is used within the address
17920 gen portion of a subsequent load(u)(x) */
17921 switch (get_attr_type (dep_insn))
17922 {
17923 case TYPE_LOAD:
17924 case TYPE_LOAD_U:
17925 case TYPE_LOAD_UX:
17926 case TYPE_CNTLZ:
17927 {
17928 if (set_to_load_agen (dep_insn, insn))
17929 return 4;
17930 break;
17931 }
17932 case TYPE_LOAD_EXT:
17933 case TYPE_LOAD_EXT_U:
17934 case TYPE_LOAD_EXT_UX:
17935 case TYPE_VAR_SHIFT_ROTATE:
17936 case TYPE_VAR_DELAYED_COMPARE:
17937 {
17938 if (set_to_load_agen (dep_insn, insn))
17939 return 6;
17940 break;
17941 }
17942 case TYPE_INTEGER:
17943 case TYPE_COMPARE:
17944 case TYPE_FAST_COMPARE:
17945 case TYPE_EXTS:
17946 case TYPE_SHIFT:
17947 case TYPE_INSERT_WORD:
17948 case TYPE_INSERT_DWORD:
17949 case TYPE_FPLOAD_U:
17950 case TYPE_FPLOAD_UX:
17951 case TYPE_STORE_U:
17952 case TYPE_STORE_UX:
17953 case TYPE_FPSTORE_U:
17954 case TYPE_FPSTORE_UX:
17955 {
17956 if (set_to_load_agen (dep_insn, insn))
17957 return 3;
17958 break;
17959 }
17960 case TYPE_IMUL:
17961 case TYPE_IMUL2:
17962 case TYPE_IMUL3:
17963 case TYPE_LMUL:
17964 case TYPE_IMUL_COMPARE:
17965 case TYPE_LMUL_COMPARE:
17966 {
17967 if (set_to_load_agen (dep_insn, insn))
17968 return 17;
17969 break;
17970 }
17971 case TYPE_IDIV:
17972 {
17973 if (set_to_load_agen (dep_insn, insn))
17974 return 45;
17975 break;
17976 }
17977 case TYPE_LDIV:
17978 {
17979 if (set_to_load_agen (dep_insn, insn))
17980 return 57;
17981 break;
17982 }
17983 default:
17984 break;
17985 }
17986 }
17987 break;
17988
17989 case TYPE_FPLOAD:
17990 if ((rs6000_cpu == PROCESSOR_POWER6)
17991 && recog_memoized (dep_insn)
17992 && (INSN_CODE (dep_insn) >= 0)
17993 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
17994 return 2;
17995
17996 default:
17997 break;
17998 }
c9dbf840 17999
a251ffd0 18000 /* Fall out to return default cost. */
44cd321e
PS
18001 }
18002 break;
18003
18004 case REG_DEP_OUTPUT:
18005 /* Output dependency; DEP_INSN writes a register that INSN writes some
18006 cycles later. */
18007 if ((rs6000_cpu == PROCESSOR_POWER6)
18008 && recog_memoized (dep_insn)
18009 && (INSN_CODE (dep_insn) >= 0))
18010 {
18011 attr_type = get_attr_type (insn);
18012
18013 switch (attr_type)
18014 {
18015 case TYPE_FP:
18016 if (get_attr_type (dep_insn) == TYPE_FP)
18017 return 1;
18018 break;
18019 case TYPE_FPLOAD:
18020 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18021 return 2;
18022 break;
18023 default:
18024 break;
18025 }
18026 }
18027 case REG_DEP_ANTI:
18028 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18029 cycles later. */
18030 return 0;
18031
18032 default:
18033 gcc_unreachable ();
a251ffd0
TG
18034 }
18035
18036 return cost;
18037}
b6c9286a 18038
cbe26ab8 18039/* The function returns a true if INSN is microcoded.
839a4992 18040 Return false otherwise. */
cbe26ab8
DN
18041
18042static bool
18043is_microcoded_insn (rtx insn)
18044{
18045 if (!insn || !INSN_P (insn)
18046 || GET_CODE (PATTERN (insn)) == USE
18047 || GET_CODE (PATTERN (insn)) == CLOBBER)
18048 return false;
18049
d296e02e
AP
18050 if (rs6000_cpu_attr == CPU_CELL)
18051 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18052
ec507f2d 18053 if (rs6000_sched_groups)
cbe26ab8
DN
18054 {
18055 enum attr_type type = get_attr_type (insn);
18056 if (type == TYPE_LOAD_EXT_U
18057 || type == TYPE_LOAD_EXT_UX
18058 || type == TYPE_LOAD_UX
18059 || type == TYPE_STORE_UX
18060 || type == TYPE_MFCR)
c4ad648e 18061 return true;
cbe26ab8
DN
18062 }
18063
18064 return false;
18065}
18066
cbe26ab8
DN
18067/* The function returns true if INSN is cracked into 2 instructions
18068 by the processor (and therefore occupies 2 issue slots). */
18069
18070static bool
18071is_cracked_insn (rtx insn)
18072{
18073 if (!insn || !INSN_P (insn)
18074 || GET_CODE (PATTERN (insn)) == USE
18075 || GET_CODE (PATTERN (insn)) == CLOBBER)
18076 return false;
18077
ec507f2d 18078 if (rs6000_sched_groups)
cbe26ab8
DN
18079 {
18080 enum attr_type type = get_attr_type (insn);
18081 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18082 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18083 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18084 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18085 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18086 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18087 || type == TYPE_IDIV || type == TYPE_LDIV
18088 || type == TYPE_INSERT_WORD)
18089 return true;
cbe26ab8
DN
18090 }
18091
18092 return false;
18093}
18094
18095/* The function returns true if INSN can be issued only from
a3c9585f 18096 the branch slot. */
cbe26ab8
DN
18097
18098static bool
18099is_branch_slot_insn (rtx insn)
18100{
18101 if (!insn || !INSN_P (insn)
18102 || GET_CODE (PATTERN (insn)) == USE
18103 || GET_CODE (PATTERN (insn)) == CLOBBER)
18104 return false;
18105
ec507f2d 18106 if (rs6000_sched_groups)
cbe26ab8
DN
18107 {
18108 enum attr_type type = get_attr_type (insn);
18109 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18110 return true;
cbe26ab8
DN
18111 return false;
18112 }
18113
18114 return false;
18115}
79ae11c4 18116
44cd321e
PS
18117/* The function returns true if out_inst sets a value that is
18118 used in the address generation computation of in_insn */
18119static bool
18120set_to_load_agen (rtx out_insn, rtx in_insn)
18121{
18122 rtx out_set, in_set;
18123
18124 /* For performance reasons, only handle the simple case where
18125 both loads are a single_set. */
18126 out_set = single_set (out_insn);
18127 if (out_set)
18128 {
18129 in_set = single_set (in_insn);
18130 if (in_set)
18131 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18132 }
18133
18134 return false;
18135}
18136
18137/* The function returns true if the target storage location of
18138 out_insn is adjacent to the target storage location of in_insn */
18139/* Return 1 if memory locations are adjacent. */
18140
18141static bool
18142adjacent_mem_locations (rtx insn1, rtx insn2)
18143{
18144
e3a0e200
PB
18145 rtx a = get_store_dest (PATTERN (insn1));
18146 rtx b = get_store_dest (PATTERN (insn2));
18147
44cd321e
PS
18148 if ((GET_CODE (XEXP (a, 0)) == REG
18149 || (GET_CODE (XEXP (a, 0)) == PLUS
18150 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18151 && (GET_CODE (XEXP (b, 0)) == REG
18152 || (GET_CODE (XEXP (b, 0)) == PLUS
18153 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18154 {
18155 HOST_WIDE_INT val0 = 0, val1 = 0;
18156 rtx reg0, reg1;
18157 int val_diff;
18158
18159 if (GET_CODE (XEXP (a, 0)) == PLUS)
18160 {
18161 reg0 = XEXP (XEXP (a, 0), 0);
18162 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18163 }
18164 else
18165 reg0 = XEXP (a, 0);
18166
18167 if (GET_CODE (XEXP (b, 0)) == PLUS)
18168 {
18169 reg1 = XEXP (XEXP (b, 0), 0);
18170 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18171 }
18172 else
18173 reg1 = XEXP (b, 0);
18174
18175 val_diff = val1 - val0;
18176
18177 return ((REGNO (reg0) == REGNO (reg1))
18178 && (val_diff == INTVAL (MEM_SIZE (a))
18179 || val_diff == -INTVAL (MEM_SIZE (b))));
18180 }
18181
18182 return false;
18183}
18184
a4f6c312 18185/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18186 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18187 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18188 define this macro if you do not need to adjust the scheduling
18189 priorities of insns. */
bef84347 18190
c237e94a 18191static int
a2369ed3 18192rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18193{
a4f6c312
SS
18194 /* On machines (like the 750) which have asymmetric integer units,
18195 where one integer unit can do multiply and divides and the other
18196 can't, reduce the priority of multiply/divide so it is scheduled
18197 before other integer operations. */
bef84347
VM
18198
18199#if 0
2c3c49de 18200 if (! INSN_P (insn))
bef84347
VM
18201 return priority;
18202
18203 if (GET_CODE (PATTERN (insn)) == USE)
18204 return priority;
18205
18206 switch (rs6000_cpu_attr) {
18207 case CPU_PPC750:
18208 switch (get_attr_type (insn))
18209 {
18210 default:
18211 break;
18212
18213 case TYPE_IMUL:
18214 case TYPE_IDIV:
3cb999d8
DE
18215 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18216 priority, priority);
bef84347
VM
18217 if (priority >= 0 && priority < 0x01000000)
18218 priority >>= 3;
18219 break;
18220 }
18221 }
18222#endif
18223
44cd321e 18224 if (insn_must_be_first_in_group (insn)
79ae11c4 18225 && reload_completed
f676971a 18226 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18227 && rs6000_sched_restricted_insns_priority)
18228 {
18229
c4ad648e
AM
18230 /* Prioritize insns that can be dispatched only in the first
18231 dispatch slot. */
79ae11c4 18232 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18233 /* Attach highest priority to insn. This means that in
18234 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18235 precede 'priority' (critical path) considerations. */
f676971a 18236 return current_sched_info->sched_max_insns_priority;
79ae11c4 18237 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18238 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18239 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18240 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18241 return (priority + 1);
18242 }
79ae11c4 18243
44cd321e
PS
18244 if (rs6000_cpu == PROCESSOR_POWER6
18245 && ((load_store_pendulum == -2 && is_load_insn (insn))
18246 || (load_store_pendulum == 2 && is_store_insn (insn))))
18247 /* Attach highest priority to insn if the scheduler has just issued two
18248 stores and this instruction is a load, or two loads and this instruction
18249 is a store. Power6 wants loads and stores scheduled alternately
18250 when possible */
18251 return current_sched_info->sched_max_insns_priority;
18252
bef84347
VM
18253 return priority;
18254}
18255
d296e02e
AP
18256/* Return true if the instruction is nonpipelined on the Cell. */
18257static bool
18258is_nonpipeline_insn (rtx insn)
18259{
18260 enum attr_type type;
18261 if (!insn || !INSN_P (insn)
18262 || GET_CODE (PATTERN (insn)) == USE
18263 || GET_CODE (PATTERN (insn)) == CLOBBER)
18264 return false;
18265
18266 type = get_attr_type (insn);
18267 if (type == TYPE_IMUL
18268 || type == TYPE_IMUL2
18269 || type == TYPE_IMUL3
18270 || type == TYPE_LMUL
18271 || type == TYPE_IDIV
18272 || type == TYPE_LDIV
18273 || type == TYPE_SDIV
18274 || type == TYPE_DDIV
18275 || type == TYPE_SSQRT
18276 || type == TYPE_DSQRT
18277 || type == TYPE_MFCR
18278 || type == TYPE_MFCRF
18279 || type == TYPE_MFJMPR)
18280 {
18281 return true;
18282 }
18283 return false;
18284}
18285
18286
a4f6c312
SS
18287/* Return how many instructions the machine can issue per cycle. */
18288
c237e94a 18289static int
863d938c 18290rs6000_issue_rate (void)
b6c9286a 18291{
3317bab1
DE
18292 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18293 if (!reload_completed)
18294 return 1;
18295
b6c9286a 18296 switch (rs6000_cpu_attr) {
3cb999d8
DE
18297 case CPU_RIOS1: /* ? */
18298 case CPU_RS64A:
18299 case CPU_PPC601: /* ? */
ed947a96 18300 case CPU_PPC7450:
3cb999d8 18301 return 3;
b54cf83a 18302 case CPU_PPC440:
b6c9286a 18303 case CPU_PPC603:
bef84347 18304 case CPU_PPC750:
ed947a96 18305 case CPU_PPC7400:
be12c2b0 18306 case CPU_PPC8540:
d296e02e 18307 case CPU_CELL:
f676971a 18308 return 2;
3cb999d8 18309 case CPU_RIOS2:
b6c9286a 18310 case CPU_PPC604:
19684119 18311 case CPU_PPC604E:
b6c9286a 18312 case CPU_PPC620:
3cb999d8 18313 case CPU_PPC630:
b6c9286a 18314 return 4;
cbe26ab8 18315 case CPU_POWER4:
ec507f2d 18316 case CPU_POWER5:
44cd321e 18317 case CPU_POWER6:
cbe26ab8 18318 return 5;
b6c9286a
MM
18319 default:
18320 return 1;
18321 }
18322}
18323
be12c2b0
VM
18324/* Return how many instructions to look ahead for better insn
18325 scheduling. */
18326
18327static int
863d938c 18328rs6000_use_sched_lookahead (void)
be12c2b0
VM
18329{
18330 if (rs6000_cpu_attr == CPU_PPC8540)
18331 return 4;
d296e02e
AP
18332 if (rs6000_cpu_attr == CPU_CELL)
18333 return (reload_completed ? 8 : 0);
be12c2b0
VM
18334 return 0;
18335}
18336
d296e02e
AP
18337/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18338static int
18339rs6000_use_sched_lookahead_guard (rtx insn)
18340{
18341 if (rs6000_cpu_attr != CPU_CELL)
18342 return 1;
18343
18344 if (insn == NULL_RTX || !INSN_P (insn))
18345 abort ();
982afe02 18346
d296e02e
AP
18347 if (!reload_completed
18348 || is_nonpipeline_insn (insn)
18349 || is_microcoded_insn (insn))
18350 return 0;
18351
18352 return 1;
18353}
18354
569fa502
DN
18355/* Determine is PAT refers to memory. */
18356
18357static bool
18358is_mem_ref (rtx pat)
18359{
18360 const char * fmt;
18361 int i, j;
18362 bool ret = false;
18363
18364 if (GET_CODE (pat) == MEM)
18365 return true;
18366
18367 /* Recursively process the pattern. */
18368 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18369
18370 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18371 {
18372 if (fmt[i] == 'e')
18373 ret |= is_mem_ref (XEXP (pat, i));
18374 else if (fmt[i] == 'E')
18375 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18376 ret |= is_mem_ref (XVECEXP (pat, i, j));
18377 }
18378
18379 return ret;
18380}
18381
18382/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18383
569fa502
DN
18384static bool
18385is_load_insn1 (rtx pat)
18386{
18387 if (!pat || pat == NULL_RTX)
18388 return false;
18389
18390 if (GET_CODE (pat) == SET)
18391 return is_mem_ref (SET_SRC (pat));
18392
18393 if (GET_CODE (pat) == PARALLEL)
18394 {
18395 int i;
18396
18397 for (i = 0; i < XVECLEN (pat, 0); i++)
18398 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18399 return true;
18400 }
18401
18402 return false;
18403}
18404
18405/* Determine if INSN loads from memory. */
18406
18407static bool
18408is_load_insn (rtx insn)
18409{
18410 if (!insn || !INSN_P (insn))
18411 return false;
18412
18413 if (GET_CODE (insn) == CALL_INSN)
18414 return false;
18415
18416 return is_load_insn1 (PATTERN (insn));
18417}
18418
18419/* Determine if PAT is a PATTERN of a store insn. */
18420
18421static bool
18422is_store_insn1 (rtx pat)
18423{
18424 if (!pat || pat == NULL_RTX)
18425 return false;
18426
18427 if (GET_CODE (pat) == SET)
18428 return is_mem_ref (SET_DEST (pat));
18429
18430 if (GET_CODE (pat) == PARALLEL)
18431 {
18432 int i;
18433
18434 for (i = 0; i < XVECLEN (pat, 0); i++)
18435 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18436 return true;
18437 }
18438
18439 return false;
18440}
18441
18442/* Determine if INSN stores to memory. */
18443
18444static bool
18445is_store_insn (rtx insn)
18446{
18447 if (!insn || !INSN_P (insn))
18448 return false;
18449
18450 return is_store_insn1 (PATTERN (insn));
18451}
18452
e3a0e200
PB
18453/* Return the dest of a store insn. */
18454
18455static rtx
18456get_store_dest (rtx pat)
18457{
18458 gcc_assert (is_store_insn1 (pat));
18459
18460 if (GET_CODE (pat) == SET)
18461 return SET_DEST (pat);
18462 else if (GET_CODE (pat) == PARALLEL)
18463 {
18464 int i;
18465
18466 for (i = 0; i < XVECLEN (pat, 0); i++)
18467 {
18468 rtx inner_pat = XVECEXP (pat, 0, i);
18469 if (GET_CODE (inner_pat) == SET
18470 && is_mem_ref (SET_DEST (inner_pat)))
18471 return inner_pat;
18472 }
18473 }
18474 /* We shouldn't get here, because we should have either a simple
18475 store insn or a store with update which are covered above. */
18476 gcc_unreachable();
18477}
18478
569fa502
DN
18479/* Returns whether the dependence between INSN and NEXT is considered
18480 costly by the given target. */
18481
18482static bool
b198261f 18483rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18484{
b198261f
MK
18485 rtx insn;
18486 rtx next;
18487
aabcd309 18488 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18489 allow all dependent insns in the same group.
569fa502
DN
18490 This is the most aggressive option. */
18491 if (rs6000_sched_costly_dep == no_dep_costly)
18492 return false;
18493
f676971a 18494 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18495 do not allow dependent instructions in the same group.
18496 This is the most conservative option. */
18497 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18498 return true;
569fa502 18499
b198261f
MK
18500 insn = DEP_PRO (dep);
18501 next = DEP_CON (dep);
18502
f676971a
EC
18503 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18504 && is_load_insn (next)
569fa502
DN
18505 && is_store_insn (insn))
18506 /* Prevent load after store in the same group. */
18507 return true;
18508
18509 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18510 && is_load_insn (next)
569fa502 18511 && is_store_insn (insn)
e2f6ff94 18512 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18513 /* Prevent load after store in the same group if it is a true
18514 dependence. */
569fa502 18515 return true;
f676971a
EC
18516
18517 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18518 and will not be scheduled in the same group. */
18519 if (rs6000_sched_costly_dep <= max_dep_latency
18520 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18521 return true;
18522
18523 return false;
18524}
18525
f676971a 18526/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18527 skipping any "non-active" insns - insns that will not actually occupy
18528 an issue slot. Return NULL_RTX if such an insn is not found. */
18529
18530static rtx
18531get_next_active_insn (rtx insn, rtx tail)
18532{
f489aff8 18533 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18534 return NULL_RTX;
18535
f489aff8 18536 while (1)
cbe26ab8 18537 {
f489aff8
AM
18538 insn = NEXT_INSN (insn);
18539 if (insn == NULL_RTX || insn == tail)
18540 return NULL_RTX;
cbe26ab8 18541
f489aff8
AM
18542 if (CALL_P (insn)
18543 || JUMP_P (insn)
18544 || (NONJUMP_INSN_P (insn)
18545 && GET_CODE (PATTERN (insn)) != USE
18546 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18547 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18548 break;
18549 }
18550 return insn;
cbe26ab8
DN
18551}
18552
44cd321e
PS
18553/* We are about to begin issuing insns for this clock cycle. */
18554
18555static int
18556rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18557 rtx *ready ATTRIBUTE_UNUSED,
18558 int *pn_ready ATTRIBUTE_UNUSED,
18559 int clock_var ATTRIBUTE_UNUSED)
18560{
d296e02e
AP
18561 int n_ready = *pn_ready;
18562
44cd321e
PS
18563 if (sched_verbose)
18564 fprintf (dump, "// rs6000_sched_reorder :\n");
18565
d296e02e
AP
18566 /* Reorder the ready list, if the second to last ready insn
18567 is a nonepipeline insn. */
18568 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18569 {
18570 if (is_nonpipeline_insn (ready[n_ready - 1])
18571 && (recog_memoized (ready[n_ready - 2]) > 0))
18572 /* Simply swap first two insns. */
18573 {
18574 rtx tmp = ready[n_ready - 1];
18575 ready[n_ready - 1] = ready[n_ready - 2];
18576 ready[n_ready - 2] = tmp;
18577 }
18578 }
18579
44cd321e
PS
18580 if (rs6000_cpu == PROCESSOR_POWER6)
18581 load_store_pendulum = 0;
18582
18583 return rs6000_issue_rate ();
18584}
18585
18586/* Like rs6000_sched_reorder, but called after issuing each insn. */
18587
18588static int
18589rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18590 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18591{
18592 if (sched_verbose)
18593 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18594
18595 /* For Power6, we need to handle some special cases to try and keep the
18596 store queue from overflowing and triggering expensive flushes.
18597
18598 This code monitors how load and store instructions are being issued
18599 and skews the ready list one way or the other to increase the likelihood
18600 that a desired instruction is issued at the proper time.
18601
18602 A couple of things are done. First, we maintain a "load_store_pendulum"
18603 to track the current state of load/store issue.
18604
18605 - If the pendulum is at zero, then no loads or stores have been
18606 issued in the current cycle so we do nothing.
18607
18608 - If the pendulum is 1, then a single load has been issued in this
18609 cycle and we attempt to locate another load in the ready list to
18610 issue with it.
18611
2f8e468b 18612 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18613 issued in this cycle, so we increase the priority of the first load
18614 in the ready list to increase it's likelihood of being chosen first
18615 in the next cycle.
18616
18617 - If the pendulum is -1, then a single store has been issued in this
18618 cycle and we attempt to locate another store in the ready list to
18619 issue with it, preferring a store to an adjacent memory location to
18620 facilitate store pairing in the store queue.
18621
18622 - If the pendulum is 2, then two loads have already been
18623 issued in this cycle, so we increase the priority of the first store
18624 in the ready list to increase it's likelihood of being chosen first
18625 in the next cycle.
18626
18627 - If the pendulum < -2 or > 2, then do nothing.
18628
18629 Note: This code covers the most common scenarios. There exist non
18630 load/store instructions which make use of the LSU and which
18631 would need to be accounted for to strictly model the behavior
18632 of the machine. Those instructions are currently unaccounted
18633 for to help minimize compile time overhead of this code.
18634 */
18635 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18636 {
18637 int pos;
18638 int i;
18639 rtx tmp;
18640
18641 if (is_store_insn (last_scheduled_insn))
18642 /* Issuing a store, swing the load_store_pendulum to the left */
18643 load_store_pendulum--;
18644 else if (is_load_insn (last_scheduled_insn))
18645 /* Issuing a load, swing the load_store_pendulum to the right */
18646 load_store_pendulum++;
18647 else
18648 return cached_can_issue_more;
18649
18650 /* If the pendulum is balanced, or there is only one instruction on
18651 the ready list, then all is well, so return. */
18652 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18653 return cached_can_issue_more;
18654
18655 if (load_store_pendulum == 1)
18656 {
18657 /* A load has been issued in this cycle. Scan the ready list
18658 for another load to issue with it */
18659 pos = *pn_ready-1;
18660
18661 while (pos >= 0)
18662 {
18663 if (is_load_insn (ready[pos]))
18664 {
18665 /* Found a load. Move it to the head of the ready list,
18666 and adjust it's priority so that it is more likely to
18667 stay there */
18668 tmp = ready[pos];
18669 for (i=pos; i<*pn_ready-1; i++)
18670 ready[i] = ready[i + 1];
18671 ready[*pn_ready-1] = tmp;
18672 if INSN_PRIORITY_KNOWN (tmp)
18673 INSN_PRIORITY (tmp)++;
18674 break;
18675 }
18676 pos--;
18677 }
18678 }
18679 else if (load_store_pendulum == -2)
18680 {
18681 /* Two stores have been issued in this cycle. Increase the
18682 priority of the first load in the ready list to favor it for
18683 issuing in the next cycle. */
18684 pos = *pn_ready-1;
18685
18686 while (pos >= 0)
18687 {
18688 if (is_load_insn (ready[pos])
18689 && INSN_PRIORITY_KNOWN (ready[pos]))
18690 {
18691 INSN_PRIORITY (ready[pos])++;
18692
18693 /* Adjust the pendulum to account for the fact that a load
18694 was found and increased in priority. This is to prevent
18695 increasing the priority of multiple loads */
18696 load_store_pendulum--;
18697
18698 break;
18699 }
18700 pos--;
18701 }
18702 }
18703 else if (load_store_pendulum == -1)
18704 {
18705 /* A store has been issued in this cycle. Scan the ready list for
18706 another store to issue with it, preferring a store to an adjacent
18707 memory location */
18708 int first_store_pos = -1;
18709
18710 pos = *pn_ready-1;
18711
18712 while (pos >= 0)
18713 {
18714 if (is_store_insn (ready[pos]))
18715 {
18716 /* Maintain the index of the first store found on the
18717 list */
18718 if (first_store_pos == -1)
18719 first_store_pos = pos;
18720
18721 if (is_store_insn (last_scheduled_insn)
18722 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18723 {
18724 /* Found an adjacent store. Move it to the head of the
18725 ready list, and adjust it's priority so that it is
18726 more likely to stay there */
18727 tmp = ready[pos];
18728 for (i=pos; i<*pn_ready-1; i++)
18729 ready[i] = ready[i + 1];
18730 ready[*pn_ready-1] = tmp;
18731 if INSN_PRIORITY_KNOWN (tmp)
18732 INSN_PRIORITY (tmp)++;
18733 first_store_pos = -1;
18734
18735 break;
18736 };
18737 }
18738 pos--;
18739 }
18740
18741 if (first_store_pos >= 0)
18742 {
18743 /* An adjacent store wasn't found, but a non-adjacent store was,
18744 so move the non-adjacent store to the front of the ready
18745 list, and adjust its priority so that it is more likely to
18746 stay there. */
18747 tmp = ready[first_store_pos];
18748 for (i=first_store_pos; i<*pn_ready-1; i++)
18749 ready[i] = ready[i + 1];
18750 ready[*pn_ready-1] = tmp;
18751 if INSN_PRIORITY_KNOWN (tmp)
18752 INSN_PRIORITY (tmp)++;
18753 }
18754 }
18755 else if (load_store_pendulum == 2)
18756 {
18757 /* Two loads have been issued in this cycle. Increase the priority
18758 of the first store in the ready list to favor it for issuing in
18759 the next cycle. */
18760 pos = *pn_ready-1;
18761
18762 while (pos >= 0)
18763 {
18764 if (is_store_insn (ready[pos])
18765 && INSN_PRIORITY_KNOWN (ready[pos]))
18766 {
18767 INSN_PRIORITY (ready[pos])++;
18768
18769 /* Adjust the pendulum to account for the fact that a store
18770 was found and increased in priority. This is to prevent
18771 increasing the priority of multiple stores */
18772 load_store_pendulum++;
18773
18774 break;
18775 }
18776 pos--;
18777 }
18778 }
18779 }
18780
18781 return cached_can_issue_more;
18782}
18783
839a4992 18784/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18785 of group WHICH_GROUP.
18786
18787 If WHICH_GROUP == current_group, this function will return true if INSN
18788 causes the termination of the current group (i.e, the dispatch group to
18789 which INSN belongs). This means that INSN will be the last insn in the
18790 group it belongs to.
18791
18792 If WHICH_GROUP == previous_group, this function will return true if INSN
18793 causes the termination of the previous group (i.e, the dispatch group that
18794 precedes the group to which INSN belongs). This means that INSN will be
18795 the first insn in the group it belongs to). */
18796
18797static bool
18798insn_terminates_group_p (rtx insn, enum group_termination which_group)
18799{
44cd321e 18800 bool first, last;
cbe26ab8
DN
18801
18802 if (! insn)
18803 return false;
569fa502 18804
44cd321e
PS
18805 first = insn_must_be_first_in_group (insn);
18806 last = insn_must_be_last_in_group (insn);
cbe26ab8 18807
44cd321e 18808 if (first && last)
cbe26ab8
DN
18809 return true;
18810
18811 if (which_group == current_group)
44cd321e 18812 return last;
cbe26ab8 18813 else if (which_group == previous_group)
44cd321e
PS
18814 return first;
18815
18816 return false;
18817}
18818
18819
18820static bool
18821insn_must_be_first_in_group (rtx insn)
18822{
18823 enum attr_type type;
18824
18825 if (!insn
18826 || insn == NULL_RTX
18827 || GET_CODE (insn) == NOTE
18828 || GET_CODE (PATTERN (insn)) == USE
18829 || GET_CODE (PATTERN (insn)) == CLOBBER)
18830 return false;
18831
18832 switch (rs6000_cpu)
cbe26ab8 18833 {
44cd321e
PS
18834 case PROCESSOR_POWER5:
18835 if (is_cracked_insn (insn))
18836 return true;
18837 case PROCESSOR_POWER4:
18838 if (is_microcoded_insn (insn))
18839 return true;
18840
18841 if (!rs6000_sched_groups)
18842 return false;
18843
18844 type = get_attr_type (insn);
18845
18846 switch (type)
18847 {
18848 case TYPE_MFCR:
18849 case TYPE_MFCRF:
18850 case TYPE_MTCR:
18851 case TYPE_DELAYED_CR:
18852 case TYPE_CR_LOGICAL:
18853 case TYPE_MTJMPR:
18854 case TYPE_MFJMPR:
18855 case TYPE_IDIV:
18856 case TYPE_LDIV:
18857 case TYPE_LOAD_L:
18858 case TYPE_STORE_C:
18859 case TYPE_ISYNC:
18860 case TYPE_SYNC:
18861 return true;
18862 default:
18863 break;
18864 }
18865 break;
18866 case PROCESSOR_POWER6:
18867 type = get_attr_type (insn);
18868
18869 switch (type)
18870 {
18871 case TYPE_INSERT_DWORD:
18872 case TYPE_EXTS:
18873 case TYPE_CNTLZ:
18874 case TYPE_SHIFT:
18875 case TYPE_VAR_SHIFT_ROTATE:
18876 case TYPE_TRAP:
18877 case TYPE_IMUL:
18878 case TYPE_IMUL2:
18879 case TYPE_IMUL3:
18880 case TYPE_LMUL:
18881 case TYPE_IDIV:
18882 case TYPE_INSERT_WORD:
18883 case TYPE_DELAYED_COMPARE:
18884 case TYPE_IMUL_COMPARE:
18885 case TYPE_LMUL_COMPARE:
18886 case TYPE_FPCOMPARE:
18887 case TYPE_MFCR:
18888 case TYPE_MTCR:
18889 case TYPE_MFJMPR:
18890 case TYPE_MTJMPR:
18891 case TYPE_ISYNC:
18892 case TYPE_SYNC:
18893 case TYPE_LOAD_L:
18894 case TYPE_STORE_C:
18895 case TYPE_LOAD_U:
18896 case TYPE_LOAD_UX:
18897 case TYPE_LOAD_EXT_UX:
18898 case TYPE_STORE_U:
18899 case TYPE_STORE_UX:
18900 case TYPE_FPLOAD_U:
18901 case TYPE_FPLOAD_UX:
18902 case TYPE_FPSTORE_U:
18903 case TYPE_FPSTORE_UX:
18904 return true;
18905 default:
18906 break;
18907 }
18908 break;
18909 default:
18910 break;
18911 }
18912
18913 return false;
18914}
18915
18916static bool
18917insn_must_be_last_in_group (rtx insn)
18918{
18919 enum attr_type type;
18920
18921 if (!insn
18922 || insn == NULL_RTX
18923 || GET_CODE (insn) == NOTE
18924 || GET_CODE (PATTERN (insn)) == USE
18925 || GET_CODE (PATTERN (insn)) == CLOBBER)
18926 return false;
18927
18928 switch (rs6000_cpu) {
18929 case PROCESSOR_POWER4:
18930 case PROCESSOR_POWER5:
18931 if (is_microcoded_insn (insn))
18932 return true;
18933
18934 if (is_branch_slot_insn (insn))
18935 return true;
18936
18937 break;
18938 case PROCESSOR_POWER6:
18939 type = get_attr_type (insn);
18940
18941 switch (type)
18942 {
18943 case TYPE_EXTS:
18944 case TYPE_CNTLZ:
18945 case TYPE_SHIFT:
18946 case TYPE_VAR_SHIFT_ROTATE:
18947 case TYPE_TRAP:
18948 case TYPE_IMUL:
18949 case TYPE_IMUL2:
18950 case TYPE_IMUL3:
18951 case TYPE_LMUL:
18952 case TYPE_IDIV:
18953 case TYPE_DELAYED_COMPARE:
18954 case TYPE_IMUL_COMPARE:
18955 case TYPE_LMUL_COMPARE:
18956 case TYPE_FPCOMPARE:
18957 case TYPE_MFCR:
18958 case TYPE_MTCR:
18959 case TYPE_MFJMPR:
18960 case TYPE_MTJMPR:
18961 case TYPE_ISYNC:
18962 case TYPE_SYNC:
18963 case TYPE_LOAD_L:
18964 case TYPE_STORE_C:
18965 return true;
18966 default:
18967 break;
cbe26ab8 18968 }
44cd321e
PS
18969 break;
18970 default:
18971 break;
18972 }
cbe26ab8
DN
18973
18974 return false;
18975}
18976
839a4992 18977/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
18978 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18979
18980static bool
18981is_costly_group (rtx *group_insns, rtx next_insn)
18982{
18983 int i;
cbe26ab8
DN
18984 int issue_rate = rs6000_issue_rate ();
18985
18986 for (i = 0; i < issue_rate; i++)
18987 {
e2f6ff94
MK
18988 sd_iterator_def sd_it;
18989 dep_t dep;
cbe26ab8 18990 rtx insn = group_insns[i];
b198261f 18991
cbe26ab8 18992 if (!insn)
c4ad648e 18993 continue;
b198261f 18994
e2f6ff94 18995 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 18996 {
b198261f
MK
18997 rtx next = DEP_CON (dep);
18998
18999 if (next == next_insn
19000 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19001 return true;
c4ad648e 19002 }
cbe26ab8
DN
19003 }
19004
19005 return false;
19006}
19007
f676971a 19008/* Utility of the function redefine_groups.
cbe26ab8
DN
19009 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19010 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19011 to keep it "far" (in a separate group) from GROUP_INSNS, following
19012 one of the following schemes, depending on the value of the flag
19013 -minsert_sched_nops = X:
19014 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19015 in order to force NEXT_INSN into a separate group.
f676971a
EC
19016 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19017 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19018 insertion (has a group just ended, how many vacant issue slots remain in the
19019 last group, and how many dispatch groups were encountered so far). */
19020
f676971a 19021static int
c4ad648e
AM
19022force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19023 rtx next_insn, bool *group_end, int can_issue_more,
19024 int *group_count)
cbe26ab8
DN
19025{
19026 rtx nop;
19027 bool force;
19028 int issue_rate = rs6000_issue_rate ();
19029 bool end = *group_end;
19030 int i;
19031
19032 if (next_insn == NULL_RTX)
19033 return can_issue_more;
19034
19035 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19036 return can_issue_more;
19037
19038 force = is_costly_group (group_insns, next_insn);
19039 if (!force)
19040 return can_issue_more;
19041
19042 if (sched_verbose > 6)
19043 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19044 *group_count ,can_issue_more);
cbe26ab8
DN
19045
19046 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19047 {
19048 if (*group_end)
c4ad648e 19049 can_issue_more = 0;
cbe26ab8
DN
19050
19051 /* Since only a branch can be issued in the last issue_slot, it is
19052 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19053 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19054 in this case the last nop will start a new group and the branch
19055 will be forced to the new group. */
cbe26ab8 19056 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19057 can_issue_more--;
cbe26ab8
DN
19058
19059 while (can_issue_more > 0)
c4ad648e 19060 {
9390387d 19061 nop = gen_nop ();
c4ad648e
AM
19062 emit_insn_before (nop, next_insn);
19063 can_issue_more--;
19064 }
cbe26ab8
DN
19065
19066 *group_end = true;
19067 return 0;
f676971a 19068 }
cbe26ab8
DN
19069
19070 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19071 {
19072 int n_nops = rs6000_sched_insert_nops;
19073
f676971a 19074 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19075 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19076 if (can_issue_more == 0)
c4ad648e 19077 can_issue_more = issue_rate;
cbe26ab8
DN
19078 can_issue_more--;
19079 if (can_issue_more == 0)
c4ad648e
AM
19080 {
19081 can_issue_more = issue_rate - 1;
19082 (*group_count)++;
19083 end = true;
19084 for (i = 0; i < issue_rate; i++)
19085 {
19086 group_insns[i] = 0;
19087 }
19088 }
cbe26ab8
DN
19089
19090 while (n_nops > 0)
c4ad648e
AM
19091 {
19092 nop = gen_nop ();
19093 emit_insn_before (nop, next_insn);
19094 if (can_issue_more == issue_rate - 1) /* new group begins */
19095 end = false;
19096 can_issue_more--;
19097 if (can_issue_more == 0)
19098 {
19099 can_issue_more = issue_rate - 1;
19100 (*group_count)++;
19101 end = true;
19102 for (i = 0; i < issue_rate; i++)
19103 {
19104 group_insns[i] = 0;
19105 }
19106 }
19107 n_nops--;
19108 }
cbe26ab8
DN
19109
19110 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19111 can_issue_more++;
cbe26ab8 19112
c4ad648e
AM
19113 /* Is next_insn going to start a new group? */
19114 *group_end
19115 = (end
cbe26ab8
DN
19116 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19117 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19118 || (can_issue_more < issue_rate &&
c4ad648e 19119 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19120 if (*group_end && end)
c4ad648e 19121 (*group_count)--;
cbe26ab8
DN
19122
19123 if (sched_verbose > 6)
c4ad648e
AM
19124 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19125 *group_count, can_issue_more);
f676971a
EC
19126 return can_issue_more;
19127 }
cbe26ab8
DN
19128
19129 return can_issue_more;
19130}
19131
19132/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19133 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19134 form in practice. It tries to achieve this synchronization by forcing the
19135 estimated processor grouping on the compiler (as opposed to the function
19136 'pad_goups' which tries to force the scheduler's grouping on the processor).
19137
19138 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19139 examines the (estimated) dispatch groups that will be formed by the processor
19140 dispatcher. It marks these group boundaries to reflect the estimated
19141 processor grouping, overriding the grouping that the scheduler had marked.
19142 Depending on the value of the flag '-minsert-sched-nops' this function can
19143 force certain insns into separate groups or force a certain distance between
19144 them by inserting nops, for example, if there exists a "costly dependence"
19145 between the insns.
19146
19147 The function estimates the group boundaries that the processor will form as
0fa2e4df 19148 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19149 each insn. A subsequent insn will start a new group if one of the following
19150 4 cases applies:
19151 - no more vacant issue slots remain in the current dispatch group.
19152 - only the last issue slot, which is the branch slot, is vacant, but the next
19153 insn is not a branch.
19154 - only the last 2 or less issue slots, including the branch slot, are vacant,
19155 which means that a cracked insn (which occupies two issue slots) can't be
19156 issued in this group.
f676971a 19157 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19158 start a new group. */
19159
19160static int
19161redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19162{
19163 rtx insn, next_insn;
19164 int issue_rate;
19165 int can_issue_more;
19166 int slot, i;
19167 bool group_end;
19168 int group_count = 0;
19169 rtx *group_insns;
19170
19171 /* Initialize. */
19172 issue_rate = rs6000_issue_rate ();
19173 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19174 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19175 {
19176 group_insns[i] = 0;
19177 }
19178 can_issue_more = issue_rate;
19179 slot = 0;
19180 insn = get_next_active_insn (prev_head_insn, tail);
19181 group_end = false;
19182
19183 while (insn != NULL_RTX)
19184 {
19185 slot = (issue_rate - can_issue_more);
19186 group_insns[slot] = insn;
19187 can_issue_more =
c4ad648e 19188 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19189 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19190 can_issue_more = 0;
cbe26ab8
DN
19191
19192 next_insn = get_next_active_insn (insn, tail);
19193 if (next_insn == NULL_RTX)
c4ad648e 19194 return group_count + 1;
cbe26ab8 19195
c4ad648e
AM
19196 /* Is next_insn going to start a new group? */
19197 group_end
19198 = (can_issue_more == 0
19199 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19200 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19201 || (can_issue_more < issue_rate &&
19202 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19203
f676971a 19204 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19205 next_insn, &group_end, can_issue_more,
19206 &group_count);
cbe26ab8
DN
19207
19208 if (group_end)
c4ad648e
AM
19209 {
19210 group_count++;
19211 can_issue_more = 0;
19212 for (i = 0; i < issue_rate; i++)
19213 {
19214 group_insns[i] = 0;
19215 }
19216 }
cbe26ab8
DN
19217
19218 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19219 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19220 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19221 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19222
19223 insn = next_insn;
19224 if (can_issue_more == 0)
c4ad648e
AM
19225 can_issue_more = issue_rate;
19226 } /* while */
cbe26ab8
DN
19227
19228 return group_count;
19229}
19230
19231/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19232 dispatch group boundaries that the scheduler had marked. Pad with nops
19233 any dispatch groups which have vacant issue slots, in order to force the
19234 scheduler's grouping on the processor dispatcher. The function
19235 returns the number of dispatch groups found. */
19236
19237static int
19238pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19239{
19240 rtx insn, next_insn;
19241 rtx nop;
19242 int issue_rate;
19243 int can_issue_more;
19244 int group_end;
19245 int group_count = 0;
19246
19247 /* Initialize issue_rate. */
19248 issue_rate = rs6000_issue_rate ();
19249 can_issue_more = issue_rate;
19250
19251 insn = get_next_active_insn (prev_head_insn, tail);
19252 next_insn = get_next_active_insn (insn, tail);
19253
19254 while (insn != NULL_RTX)
19255 {
19256 can_issue_more =
19257 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19258
19259 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19260
19261 if (next_insn == NULL_RTX)
c4ad648e 19262 break;
cbe26ab8
DN
19263
19264 if (group_end)
c4ad648e
AM
19265 {
19266 /* If the scheduler had marked group termination at this location
19267 (between insn and next_indn), and neither insn nor next_insn will
19268 force group termination, pad the group with nops to force group
19269 termination. */
19270 if (can_issue_more
19271 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19272 && !insn_terminates_group_p (insn, current_group)
19273 && !insn_terminates_group_p (next_insn, previous_group))
19274 {
9390387d 19275 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19276 can_issue_more--;
19277
19278 while (can_issue_more)
19279 {
19280 nop = gen_nop ();
19281 emit_insn_before (nop, next_insn);
19282 can_issue_more--;
19283 }
19284 }
19285
19286 can_issue_more = issue_rate;
19287 group_count++;
19288 }
cbe26ab8
DN
19289
19290 insn = next_insn;
19291 next_insn = get_next_active_insn (insn, tail);
19292 }
19293
19294 return group_count;
19295}
19296
44cd321e
PS
19297/* We're beginning a new block. Initialize data structures as necessary. */
19298
19299static void
19300rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19301 int sched_verbose ATTRIBUTE_UNUSED,
19302 int max_ready ATTRIBUTE_UNUSED)
982afe02 19303{
44cd321e
PS
19304 last_scheduled_insn = NULL_RTX;
19305 load_store_pendulum = 0;
19306}
19307
cbe26ab8
DN
19308/* The following function is called at the end of scheduling BB.
19309 After reload, it inserts nops at insn group bundling. */
19310
19311static void
38f391a5 19312rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19313{
19314 int n_groups;
19315
19316 if (sched_verbose)
19317 fprintf (dump, "=== Finishing schedule.\n");
19318
ec507f2d 19319 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19320 {
19321 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19322 return;
cbe26ab8
DN
19323
19324 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19325 n_groups = pad_groups (dump, sched_verbose,
19326 current_sched_info->prev_head,
19327 current_sched_info->next_tail);
cbe26ab8 19328 else
c4ad648e
AM
19329 n_groups = redefine_groups (dump, sched_verbose,
19330 current_sched_info->prev_head,
19331 current_sched_info->next_tail);
cbe26ab8
DN
19332
19333 if (sched_verbose >= 6)
19334 {
19335 fprintf (dump, "ngroups = %d\n", n_groups);
19336 print_rtl (dump, current_sched_info->prev_head);
19337 fprintf (dump, "Done finish_sched\n");
19338 }
19339 }
19340}
b6c9286a 19341\f
b6c9286a
MM
19342/* Length in units of the trampoline for entering a nested function. */
19343
19344int
863d938c 19345rs6000_trampoline_size (void)
b6c9286a
MM
19346{
19347 int ret = 0;
19348
19349 switch (DEFAULT_ABI)
19350 {
19351 default:
37409796 19352 gcc_unreachable ();
b6c9286a
MM
19353
19354 case ABI_AIX:
8f802bfb 19355 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19356 break;
19357
4dabc42d 19358 case ABI_DARWIN:
b6c9286a 19359 case ABI_V4:
03a7e1a5 19360 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19361 break;
b6c9286a
MM
19362 }
19363
19364 return ret;
19365}
19366
19367/* Emit RTL insns to initialize the variable parts of a trampoline.
19368 FNADDR is an RTX for the address of the function's pure code.
19369 CXT is an RTX for the static chain value for the function. */
19370
19371void
a2369ed3 19372rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19373{
8bd04c56 19374 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19375 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19376
19377 switch (DEFAULT_ABI)
19378 {
19379 default:
37409796 19380 gcc_unreachable ();
b6c9286a 19381
8bd04c56 19382/* Macros to shorten the code expansions below. */
9613eaff 19383#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19384#define MEM_PLUS(addr,offset) \
9613eaff 19385 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19386
b6c9286a
MM
19387 /* Under AIX, just build the 3 word function descriptor */
19388 case ABI_AIX:
8bd04c56 19389 {
9613eaff
SH
19390 rtx fn_reg = gen_reg_rtx (Pmode);
19391 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19392 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19393 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19394 emit_move_insn (MEM_DEREF (addr), fn_reg);
19395 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19396 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19397 }
b6c9286a
MM
19398 break;
19399
4dabc42d
TC
19400 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19401 case ABI_DARWIN:
b6c9286a 19402 case ABI_V4:
9613eaff 19403 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19404 FALSE, VOIDmode, 4,
9613eaff 19405 addr, Pmode,
eaf1bcf1 19406 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19407 fnaddr, Pmode,
19408 ctx_reg, Pmode);
b6c9286a 19409 break;
b6c9286a
MM
19410 }
19411
19412 return;
19413}
7509c759
MM
19414
19415\f
91d231cb 19416/* Table of valid machine attributes. */
a4f6c312 19417
91d231cb 19418const struct attribute_spec rs6000_attribute_table[] =
7509c759 19419{
91d231cb 19420 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19421 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19422 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19423 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19424 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19425 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19426#ifdef SUBTARGET_ATTRIBUTE_TABLE
19427 SUBTARGET_ATTRIBUTE_TABLE,
19428#endif
a5c76ee6 19429 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19430};
7509c759 19431
8bb418a3
ZL
19432/* Handle the "altivec" attribute. The attribute may have
19433 arguments as follows:
f676971a 19434
8bb418a3
ZL
19435 __attribute__((altivec(vector__)))
19436 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19437 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19438
19439 and may appear more than once (e.g., 'vector bool char') in a
19440 given declaration. */
19441
19442static tree
f90ac3f0
UP
19443rs6000_handle_altivec_attribute (tree *node,
19444 tree name ATTRIBUTE_UNUSED,
19445 tree args,
8bb418a3
ZL
19446 int flags ATTRIBUTE_UNUSED,
19447 bool *no_add_attrs)
19448{
19449 tree type = *node, result = NULL_TREE;
19450 enum machine_mode mode;
19451 int unsigned_p;
19452 char altivec_type
19453 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19454 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19455 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19456 : '?');
8bb418a3
ZL
19457
19458 while (POINTER_TYPE_P (type)
19459 || TREE_CODE (type) == FUNCTION_TYPE
19460 || TREE_CODE (type) == METHOD_TYPE
19461 || TREE_CODE (type) == ARRAY_TYPE)
19462 type = TREE_TYPE (type);
19463
19464 mode = TYPE_MODE (type);
19465
f90ac3f0
UP
19466 /* Check for invalid AltiVec type qualifiers. */
19467 if (type == long_unsigned_type_node || type == long_integer_type_node)
19468 {
19469 if (TARGET_64BIT)
19470 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19471 else if (rs6000_warn_altivec_long)
d4ee4d25 19472 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19473 }
19474 else if (type == long_long_unsigned_type_node
19475 || type == long_long_integer_type_node)
19476 error ("use of %<long long%> in AltiVec types is invalid");
19477 else if (type == double_type_node)
19478 error ("use of %<double%> in AltiVec types is invalid");
19479 else if (type == long_double_type_node)
19480 error ("use of %<long double%> in AltiVec types is invalid");
19481 else if (type == boolean_type_node)
19482 error ("use of boolean types in AltiVec types is invalid");
19483 else if (TREE_CODE (type) == COMPLEX_TYPE)
19484 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19485 else if (DECIMAL_FLOAT_MODE_P (mode))
19486 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19487
19488 switch (altivec_type)
19489 {
19490 case 'v':
8df83eae 19491 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19492 switch (mode)
19493 {
c4ad648e
AM
19494 case SImode:
19495 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19496 break;
19497 case HImode:
19498 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19499 break;
19500 case QImode:
19501 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19502 break;
19503 case SFmode: result = V4SF_type_node; break;
19504 /* If the user says 'vector int bool', we may be handed the 'bool'
19505 attribute _before_ the 'vector' attribute, and so select the
19506 proper type in the 'b' case below. */
19507 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19508 result = type;
19509 default: break;
8bb418a3
ZL
19510 }
19511 break;
19512 case 'b':
19513 switch (mode)
19514 {
c4ad648e
AM
19515 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19516 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19517 case QImode: case V16QImode: result = bool_V16QI_type_node;
19518 default: break;
8bb418a3
ZL
19519 }
19520 break;
19521 case 'p':
19522 switch (mode)
19523 {
c4ad648e
AM
19524 case V8HImode: result = pixel_V8HI_type_node;
19525 default: break;
8bb418a3
ZL
19526 }
19527 default: break;
19528 }
19529
7958a2a6
FJ
19530 if (result && result != type && TYPE_READONLY (type))
19531 result = build_qualified_type (result, TYPE_QUAL_CONST);
19532
8bb418a3
ZL
19533 *no_add_attrs = true; /* No need to hang on to the attribute. */
19534
f90ac3f0 19535 if (result)
8bb418a3
ZL
19536 *node = reconstruct_complex_type (*node, result);
19537
19538 return NULL_TREE;
19539}
19540
f18eca82
ZL
19541/* AltiVec defines four built-in scalar types that serve as vector
19542 elements; we must teach the compiler how to mangle them. */
19543
19544static const char *
3101faab 19545rs6000_mangle_type (const_tree type)
f18eca82 19546{
608063c3
JB
19547 type = TYPE_MAIN_VARIANT (type);
19548
19549 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19550 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19551 return NULL;
19552
f18eca82
ZL
19553 if (type == bool_char_type_node) return "U6__boolc";
19554 if (type == bool_short_type_node) return "U6__bools";
19555 if (type == pixel_type_node) return "u7__pixel";
19556 if (type == bool_int_type_node) return "U6__booli";
19557
337bde91
DE
19558 /* Mangle IBM extended float long double as `g' (__float128) on
19559 powerpc*-linux where long-double-64 previously was the default. */
19560 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19561 && TARGET_ELF
19562 && TARGET_LONG_DOUBLE_128
19563 && !TARGET_IEEEQUAD)
19564 return "g";
19565
f18eca82
ZL
19566 /* For all other types, use normal C++ mangling. */
19567 return NULL;
19568}
19569
a5c76ee6
ZW
19570/* Handle a "longcall" or "shortcall" attribute; arguments as in
19571 struct attribute_spec.handler. */
a4f6c312 19572
91d231cb 19573static tree
f676971a
EC
19574rs6000_handle_longcall_attribute (tree *node, tree name,
19575 tree args ATTRIBUTE_UNUSED,
19576 int flags ATTRIBUTE_UNUSED,
a2369ed3 19577 bool *no_add_attrs)
91d231cb
JM
19578{
19579 if (TREE_CODE (*node) != FUNCTION_TYPE
19580 && TREE_CODE (*node) != FIELD_DECL
19581 && TREE_CODE (*node) != TYPE_DECL)
19582 {
5c498b10 19583 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19584 IDENTIFIER_POINTER (name));
19585 *no_add_attrs = true;
19586 }
6a4cee5f 19587
91d231cb 19588 return NULL_TREE;
7509c759
MM
19589}
19590
a5c76ee6
ZW
19591/* Set longcall attributes on all functions declared when
19592 rs6000_default_long_calls is true. */
19593static void
a2369ed3 19594rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19595{
19596 if (rs6000_default_long_calls
19597 && (TREE_CODE (type) == FUNCTION_TYPE
19598 || TREE_CODE (type) == METHOD_TYPE))
19599 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19600 NULL_TREE,
19601 TYPE_ATTRIBUTES (type));
16d6f994
EC
19602
19603#if TARGET_MACHO
19604 darwin_set_default_type_attributes (type);
19605#endif
a5c76ee6
ZW
19606}
19607
3cb999d8
DE
19608/* Return a reference suitable for calling a function with the
19609 longcall attribute. */
a4f6c312 19610
9390387d 19611rtx
a2369ed3 19612rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19613{
d330fd93 19614 const char *call_name;
6a4cee5f
MM
19615 tree node;
19616
19617 if (GET_CODE (call_ref) != SYMBOL_REF)
19618 return call_ref;
19619
19620 /* System V adds '.' to the internal name, so skip them. */
19621 call_name = XSTR (call_ref, 0);
19622 if (*call_name == '.')
19623 {
19624 while (*call_name == '.')
19625 call_name++;
19626
19627 node = get_identifier (call_name);
39403d82 19628 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19629 }
19630
19631 return force_reg (Pmode, call_ref);
19632}
7509c759 19633\f
77ccdfed
EC
19634#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19635#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19636#endif
19637
19638/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19639 struct attribute_spec.handler. */
19640static tree
19641rs6000_handle_struct_attribute (tree *node, tree name,
19642 tree args ATTRIBUTE_UNUSED,
19643 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19644{
19645 tree *type = NULL;
19646 if (DECL_P (*node))
19647 {
19648 if (TREE_CODE (*node) == TYPE_DECL)
19649 type = &TREE_TYPE (*node);
19650 }
19651 else
19652 type = node;
19653
19654 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19655 || TREE_CODE (*type) == UNION_TYPE)))
19656 {
19657 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19658 *no_add_attrs = true;
19659 }
19660
19661 else if ((is_attribute_p ("ms_struct", name)
19662 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19663 || ((is_attribute_p ("gcc_struct", name)
19664 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19665 {
19666 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19667 IDENTIFIER_POINTER (name));
19668 *no_add_attrs = true;
19669 }
19670
19671 return NULL_TREE;
19672}
19673
19674static bool
3101faab 19675rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19676{
19677 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19678 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19679 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19680}
19681\f
b64a1b53
RH
19682#ifdef USING_ELFOS_H
19683
d6b5193b 19684/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19685
d6b5193b
RS
19686static void
19687rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19688{
19689 if (DEFAULT_ABI == ABI_AIX
19690 && TARGET_MINIMAL_TOC
19691 && !TARGET_RELOCATABLE)
19692 {
19693 if (!toc_initialized)
19694 {
19695 toc_initialized = 1;
19696 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19697 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19698 fprintf (asm_out_file, "\t.tc ");
19699 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19700 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19701 fprintf (asm_out_file, "\n");
19702
19703 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19704 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19705 fprintf (asm_out_file, " = .+32768\n");
19706 }
19707 else
19708 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19709 }
19710 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19711 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19712 else
19713 {
19714 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19715 if (!toc_initialized)
19716 {
19717 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19718 fprintf (asm_out_file, " = .+32768\n");
19719 toc_initialized = 1;
19720 }
19721 }
19722}
19723
19724/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19725
b64a1b53 19726static void
d6b5193b
RS
19727rs6000_elf_asm_init_sections (void)
19728{
19729 toc_section
19730 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19731
19732 sdata2_section
19733 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19734 SDATA2_SECTION_ASM_OP);
19735}
19736
19737/* Implement TARGET_SELECT_RTX_SECTION. */
19738
19739static section *
f676971a 19740rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19741 unsigned HOST_WIDE_INT align)
7509c759 19742{
a9098fd0 19743 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19744 return toc_section;
7509c759 19745 else
d6b5193b 19746 return default_elf_select_rtx_section (mode, x, align);
7509c759 19747}
d9407988 19748\f
d1908feb
JJ
19749/* For a SYMBOL_REF, set generic flags and then perform some
19750 target-specific processing.
19751
d1908feb
JJ
19752 When the AIX ABI is requested on a non-AIX system, replace the
19753 function name with the real name (with a leading .) rather than the
19754 function descriptor name. This saves a lot of overriding code to
19755 read the prefixes. */
d9407988 19756
fb49053f 19757static void
a2369ed3 19758rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19759{
d1908feb 19760 default_encode_section_info (decl, rtl, first);
b2003250 19761
d1908feb
JJ
19762 if (first
19763 && TREE_CODE (decl) == FUNCTION_DECL
19764 && !TARGET_AIX
19765 && DEFAULT_ABI == ABI_AIX)
d9407988 19766 {
c6a2438a 19767 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19768 size_t len = strlen (XSTR (sym_ref, 0));
19769 char *str = alloca (len + 2);
19770 str[0] = '.';
19771 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19772 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19773 }
d9407988
MM
19774}
19775
21d9bb3f
PB
19776static inline bool
19777compare_section_name (const char *section, const char *template)
19778{
19779 int len;
19780
19781 len = strlen (template);
19782 return (strncmp (section, template, len) == 0
19783 && (section[len] == 0 || section[len] == '.'));
19784}
19785
c1b7d95a 19786bool
3101faab 19787rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
19788{
19789 if (rs6000_sdata == SDATA_NONE)
19790 return false;
19791
7482ad25
AF
19792 /* We want to merge strings, so we never consider them small data. */
19793 if (TREE_CODE (decl) == STRING_CST)
19794 return false;
19795
19796 /* Functions are never in the small data area. */
19797 if (TREE_CODE (decl) == FUNCTION_DECL)
19798 return false;
19799
0e5dbd9b
DE
19800 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
19801 {
19802 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
19803 if (compare_section_name (section, ".sdata")
19804 || compare_section_name (section, ".sdata2")
19805 || compare_section_name (section, ".gnu.linkonce.s")
19806 || compare_section_name (section, ".sbss")
19807 || compare_section_name (section, ".sbss2")
19808 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
19809 || strcmp (section, ".PPC.EMB.sdata0") == 0
19810 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19811 return true;
19812 }
19813 else
19814 {
19815 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19816
19817 if (size > 0
307b599c 19818 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19819 /* If it's not public, and we're not going to reference it there,
19820 there's no need to put it in the small data section. */
0e5dbd9b
DE
19821 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19822 return true;
19823 }
19824
19825 return false;
19826}
19827
b91da81f 19828#endif /* USING_ELFOS_H */
aacd3885
RS
19829\f
19830/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19831
aacd3885 19832static bool
3101faab 19833rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
19834{
19835 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19836}
a6c2a102 19837\f
000034eb 19838/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19839 ADDR can be effectively incremented by incrementing REG.
19840
19841 r0 is special and we must not select it as an address
19842 register by this routine since our caller will try to
19843 increment the returned register via an "la" instruction. */
000034eb 19844
9390387d 19845rtx
a2369ed3 19846find_addr_reg (rtx addr)
000034eb
DE
19847{
19848 while (GET_CODE (addr) == PLUS)
19849 {
02441cd6
JL
19850 if (GET_CODE (XEXP (addr, 0)) == REG
19851 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19852 addr = XEXP (addr, 0);
02441cd6
JL
19853 else if (GET_CODE (XEXP (addr, 1)) == REG
19854 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19855 addr = XEXP (addr, 1);
19856 else if (CONSTANT_P (XEXP (addr, 0)))
19857 addr = XEXP (addr, 1);
19858 else if (CONSTANT_P (XEXP (addr, 1)))
19859 addr = XEXP (addr, 0);
19860 else
37409796 19861 gcc_unreachable ();
000034eb 19862 }
37409796
NS
19863 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19864 return addr;
000034eb
DE
19865}
19866
a6c2a102 19867void
a2369ed3 19868rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19869{
19870 fatal_insn ("bad address", op);
19871}
c8023011 19872
ee890fe2
SS
19873#if TARGET_MACHO
19874
efdba735 19875static tree branch_island_list = 0;
ee890fe2 19876
efdba735
SH
19877/* Remember to generate a branch island for far calls to the given
19878 function. */
ee890fe2 19879
f676971a 19880static void
c4ad648e
AM
19881add_compiler_branch_island (tree label_name, tree function_name,
19882 int line_number)
ee890fe2 19883{
efdba735 19884 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19885 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19886 TREE_CHAIN (branch_island) = branch_island_list;
19887 branch_island_list = branch_island;
ee890fe2
SS
19888}
19889
efdba735
SH
19890#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19891#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19892#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19893 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19894
efdba735
SH
19895/* Generate far-jump branch islands for everything on the
19896 branch_island_list. Invoked immediately after the last instruction
19897 of the epilogue has been emitted; the branch-islands must be
19898 appended to, and contiguous with, the function body. Mach-O stubs
19899 are generated in machopic_output_stub(). */
ee890fe2 19900
efdba735
SH
19901static void
19902macho_branch_islands (void)
19903{
19904 char tmp_buf[512];
19905 tree branch_island;
19906
19907 for (branch_island = branch_island_list;
19908 branch_island;
19909 branch_island = TREE_CHAIN (branch_island))
19910 {
19911 const char *label =
19912 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19913 const char *name =
11abc112 19914 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19915 char name_buf[512];
19916 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19917 if (name[0] == '*' || name[0] == '&')
19918 strcpy (name_buf, name+1);
19919 else
19920 {
19921 name_buf[0] = '_';
19922 strcpy (name_buf+1, name);
19923 }
19924 strcpy (tmp_buf, "\n");
19925 strcat (tmp_buf, label);
ee890fe2 19926#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19927 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19928 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19929#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19930 if (flag_pic)
19931 {
19932 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19933 strcat (tmp_buf, label);
19934 strcat (tmp_buf, "_pic\n");
19935 strcat (tmp_buf, label);
19936 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19937
efdba735
SH
19938 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19939 strcat (tmp_buf, name_buf);
19940 strcat (tmp_buf, " - ");
19941 strcat (tmp_buf, label);
19942 strcat (tmp_buf, "_pic)\n");
f676971a 19943
efdba735 19944 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19945
efdba735
SH
19946 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19947 strcat (tmp_buf, name_buf);
19948 strcat (tmp_buf, " - ");
19949 strcat (tmp_buf, label);
19950 strcat (tmp_buf, "_pic)\n");
f676971a 19951
efdba735
SH
19952 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19953 }
19954 else
19955 {
19956 strcat (tmp_buf, ":\nlis r12,hi16(");
19957 strcat (tmp_buf, name_buf);
19958 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19959 strcat (tmp_buf, name_buf);
19960 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19961 }
19962 output_asm_insn (tmp_buf, 0);
ee890fe2 19963#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19964 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19965 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19966#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19967 }
ee890fe2 19968
efdba735 19969 branch_island_list = 0;
ee890fe2
SS
19970}
19971
19972/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19973 already there or not. */
19974
efdba735 19975static int
a2369ed3 19976no_previous_def (tree function_name)
ee890fe2 19977{
efdba735
SH
19978 tree branch_island;
19979 for (branch_island = branch_island_list;
19980 branch_island;
19981 branch_island = TREE_CHAIN (branch_island))
19982 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
19983 return 0;
19984 return 1;
19985}
19986
19987/* GET_PREV_LABEL gets the label name from the previous definition of
19988 the function. */
19989
efdba735 19990static tree
a2369ed3 19991get_prev_label (tree function_name)
ee890fe2 19992{
efdba735
SH
19993 tree branch_island;
19994 for (branch_island = branch_island_list;
19995 branch_island;
19996 branch_island = TREE_CHAIN (branch_island))
19997 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
19998 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
19999 return 0;
20000}
20001
75b1b789
MS
20002#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20003#define DARWIN_LINKER_GENERATES_ISLANDS 0
20004#endif
20005
20006/* KEXTs still need branch islands. */
20007#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20008 || flag_mkernel || flag_apple_kext)
20009
ee890fe2 20010/* INSN is either a function call or a millicode call. It may have an
f676971a 20011 unconditional jump in its delay slot.
ee890fe2
SS
20012
20013 CALL_DEST is the routine we are calling. */
20014
20015char *
c4ad648e
AM
20016output_call (rtx insn, rtx *operands, int dest_operand_number,
20017 int cookie_operand_number)
ee890fe2
SS
20018{
20019 static char buf[256];
75b1b789
MS
20020 if (DARWIN_GENERATE_ISLANDS
20021 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20022 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20023 {
20024 tree labelname;
efdba735 20025 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20026
ee890fe2
SS
20027 if (no_previous_def (funname))
20028 {
ee890fe2
SS
20029 rtx label_rtx = gen_label_rtx ();
20030 char *label_buf, temp_buf[256];
20031 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20032 CODE_LABEL_NUMBER (label_rtx));
20033 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20034 labelname = get_identifier (label_buf);
a38e7aa5 20035 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20036 }
20037 else
20038 labelname = get_prev_label (funname);
20039
efdba735
SH
20040 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20041 instruction will reach 'foo', otherwise link as 'bl L42'".
20042 "L42" should be a 'branch island', that will do a far jump to
20043 'foo'. Branch islands are generated in
20044 macho_branch_islands(). */
ee890fe2 20045 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20046 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20047 }
20048 else
efdba735
SH
20049 sprintf (buf, "bl %%z%d", dest_operand_number);
20050 return buf;
ee890fe2
SS
20051}
20052
ee890fe2
SS
20053/* Generate PIC and indirect symbol stubs. */
20054
20055void
a2369ed3 20056machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20057{
20058 unsigned int length;
a4f6c312
SS
20059 char *symbol_name, *lazy_ptr_name;
20060 char *local_label_0;
ee890fe2
SS
20061 static int label = 0;
20062
df56a27f 20063 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20064 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20065
ee890fe2 20066
ee890fe2
SS
20067 length = strlen (symb);
20068 symbol_name = alloca (length + 32);
20069 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20070
20071 lazy_ptr_name = alloca (length + 32);
20072 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20073
ee890fe2 20074 if (flag_pic == 2)
56c779bc 20075 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20076 else
56c779bc 20077 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20078
20079 if (flag_pic == 2)
20080 {
d974312d
DJ
20081 fprintf (file, "\t.align 5\n");
20082
20083 fprintf (file, "%s:\n", stub);
20084 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20085
876455fa 20086 label++;
89da1f32 20087 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 20088 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20089
ee890fe2
SS
20090 fprintf (file, "\tmflr r0\n");
20091 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20092 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20093 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20094 lazy_ptr_name, local_label_0);
20095 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20096 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20097 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20098 lazy_ptr_name, local_label_0);
20099 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20100 fprintf (file, "\tbctr\n");
20101 }
20102 else
d974312d
DJ
20103 {
20104 fprintf (file, "\t.align 4\n");
20105
20106 fprintf (file, "%s:\n", stub);
20107 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20108
20109 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20110 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20111 (TARGET_64BIT ? "ldu" : "lwzu"),
20112 lazy_ptr_name);
d974312d
DJ
20113 fprintf (file, "\tmtctr r12\n");
20114 fprintf (file, "\tbctr\n");
20115 }
f676971a 20116
56c779bc 20117 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20118 fprintf (file, "%s:\n", lazy_ptr_name);
20119 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20120 fprintf (file, "%sdyld_stub_binding_helper\n",
20121 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20122}
20123
20124/* Legitimize PIC addresses. If the address is already
20125 position-independent, we return ORIG. Newly generated
20126 position-independent addresses go into a reg. This is REG if non
20127 zero, otherwise we allocate register(s) as necessary. */
20128
4fbbe694 20129#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20130
20131rtx
f676971a 20132rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20133 rtx reg)
ee890fe2
SS
20134{
20135 rtx base, offset;
20136
20137 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20138 reg = gen_reg_rtx (Pmode);
20139
20140 if (GET_CODE (orig) == CONST)
20141 {
37409796
NS
20142 rtx reg_temp;
20143
ee890fe2
SS
20144 if (GET_CODE (XEXP (orig, 0)) == PLUS
20145 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20146 return orig;
20147
37409796 20148 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20149
37409796
NS
20150 /* Use a different reg for the intermediate value, as
20151 it will be marked UNCHANGING. */
b3a13419 20152 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20153 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20154 Pmode, reg_temp);
20155 offset =
20156 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20157 Pmode, reg);
bb8df8a6 20158
ee890fe2
SS
20159 if (GET_CODE (offset) == CONST_INT)
20160 {
20161 if (SMALL_INT (offset))
ed8908e7 20162 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20163 else if (! reload_in_progress && ! reload_completed)
20164 offset = force_reg (Pmode, offset);
20165 else
c859cda6
DJ
20166 {
20167 rtx mem = force_const_mem (Pmode, orig);
20168 return machopic_legitimize_pic_address (mem, Pmode, reg);
20169 }
ee890fe2 20170 }
f1c25d3b 20171 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20172 }
20173
20174 /* Fall back on generic machopic code. */
20175 return machopic_legitimize_pic_address (orig, mode, reg);
20176}
20177
c4e18b1c
GK
20178/* Output a .machine directive for the Darwin assembler, and call
20179 the generic start_file routine. */
20180
20181static void
20182rs6000_darwin_file_start (void)
20183{
94ff898d 20184 static const struct
c4e18b1c
GK
20185 {
20186 const char *arg;
20187 const char *name;
20188 int if_set;
20189 } mapping[] = {
55dbfb48 20190 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20191 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20192 { "power4", "ppc970", 0 },
20193 { "G5", "ppc970", 0 },
20194 { "7450", "ppc7450", 0 },
20195 { "7400", "ppc7400", MASK_ALTIVEC },
20196 { "G4", "ppc7400", 0 },
20197 { "750", "ppc750", 0 },
20198 { "740", "ppc750", 0 },
20199 { "G3", "ppc750", 0 },
20200 { "604e", "ppc604e", 0 },
20201 { "604", "ppc604", 0 },
20202 { "603e", "ppc603", 0 },
20203 { "603", "ppc603", 0 },
20204 { "601", "ppc601", 0 },
20205 { NULL, "ppc", 0 } };
20206 const char *cpu_id = "";
20207 size_t i;
94ff898d 20208
9390387d 20209 rs6000_file_start ();
192d0f89 20210 darwin_file_start ();
c4e18b1c
GK
20211
20212 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20213 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20214 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20215 && rs6000_select[i].string[0] != '\0')
20216 cpu_id = rs6000_select[i].string;
20217
20218 /* Look through the mapping array. Pick the first name that either
20219 matches the argument, has a bit set in IF_SET that is also set
20220 in the target flags, or has a NULL name. */
20221
20222 i = 0;
20223 while (mapping[i].arg != NULL
20224 && strcmp (mapping[i].arg, cpu_id) != 0
20225 && (mapping[i].if_set & target_flags) == 0)
20226 i++;
20227
20228 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20229}
20230
ee890fe2 20231#endif /* TARGET_MACHO */
7c262518
RH
20232
20233#if TARGET_ELF
9b580a0b
RH
20234static int
20235rs6000_elf_reloc_rw_mask (void)
7c262518 20236{
9b580a0b
RH
20237 if (flag_pic)
20238 return 3;
20239 else if (DEFAULT_ABI == ABI_AIX)
20240 return 2;
20241 else
20242 return 0;
7c262518 20243}
d9f6800d
RH
20244
20245/* Record an element in the table of global constructors. SYMBOL is
20246 a SYMBOL_REF of the function to be called; PRIORITY is a number
20247 between 0 and MAX_INIT_PRIORITY.
20248
20249 This differs from default_named_section_asm_out_constructor in
20250 that we have special handling for -mrelocatable. */
20251
20252static void
a2369ed3 20253rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20254{
20255 const char *section = ".ctors";
20256 char buf[16];
20257
20258 if (priority != DEFAULT_INIT_PRIORITY)
20259 {
20260 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20261 /* Invert the numbering so the linker puts us in the proper
20262 order; constructors are run from right to left, and the
20263 linker sorts in increasing order. */
20264 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20265 section = buf;
20266 }
20267
d6b5193b 20268 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20269 assemble_align (POINTER_SIZE);
d9f6800d
RH
20270
20271 if (TARGET_RELOCATABLE)
20272 {
20273 fputs ("\t.long (", asm_out_file);
20274 output_addr_const (asm_out_file, symbol);
20275 fputs (")@fixup\n", asm_out_file);
20276 }
20277 else
c8af3574 20278 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20279}
20280
20281static void
a2369ed3 20282rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20283{
20284 const char *section = ".dtors";
20285 char buf[16];
20286
20287 if (priority != DEFAULT_INIT_PRIORITY)
20288 {
20289 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20290 /* Invert the numbering so the linker puts us in the proper
20291 order; constructors are run from right to left, and the
20292 linker sorts in increasing order. */
20293 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20294 section = buf;
20295 }
20296
d6b5193b 20297 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20298 assemble_align (POINTER_SIZE);
d9f6800d
RH
20299
20300 if (TARGET_RELOCATABLE)
20301 {
20302 fputs ("\t.long (", asm_out_file);
20303 output_addr_const (asm_out_file, symbol);
20304 fputs (")@fixup\n", asm_out_file);
20305 }
20306 else
c8af3574 20307 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20308}
9739c90c
JJ
20309
20310void
a2369ed3 20311rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20312{
20313 if (TARGET_64BIT)
20314 {
20315 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20316 ASM_OUTPUT_LABEL (file, name);
20317 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20318 rs6000_output_function_entry (file, name);
20319 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20320 if (DOT_SYMBOLS)
9739c90c 20321 {
85b776df 20322 fputs ("\t.size\t", file);
9739c90c 20323 assemble_name (file, name);
85b776df
AM
20324 fputs (",24\n\t.type\t.", file);
20325 assemble_name (file, name);
20326 fputs (",@function\n", file);
20327 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20328 {
20329 fputs ("\t.globl\t.", file);
20330 assemble_name (file, name);
20331 putc ('\n', file);
20332 }
9739c90c 20333 }
85b776df
AM
20334 else
20335 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20336 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20337 rs6000_output_function_entry (file, name);
20338 fputs (":\n", file);
9739c90c
JJ
20339 return;
20340 }
20341
20342 if (TARGET_RELOCATABLE
7f970b70 20343 && !TARGET_SECURE_PLT
9739c90c 20344 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20345 && uses_TOC ())
9739c90c
JJ
20346 {
20347 char buf[256];
20348
20349 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20350
20351 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20352 fprintf (file, "\t.long ");
20353 assemble_name (file, buf);
20354 putc ('-', file);
20355 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20356 assemble_name (file, buf);
20357 putc ('\n', file);
20358 }
20359
20360 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20361 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20362
20363 if (DEFAULT_ABI == ABI_AIX)
20364 {
20365 const char *desc_name, *orig_name;
20366
20367 orig_name = (*targetm.strip_name_encoding) (name);
20368 desc_name = orig_name;
20369 while (*desc_name == '.')
20370 desc_name++;
20371
20372 if (TREE_PUBLIC (decl))
20373 fprintf (file, "\t.globl %s\n", desc_name);
20374
20375 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20376 fprintf (file, "%s:\n", desc_name);
20377 fprintf (file, "\t.long %s\n", orig_name);
20378 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20379 if (DEFAULT_ABI == ABI_AIX)
20380 fputs ("\t.long 0\n", file);
20381 fprintf (file, "\t.previous\n");
20382 }
20383 ASM_OUTPUT_LABEL (file, name);
20384}
1334b570
AM
20385
20386static void
20387rs6000_elf_end_indicate_exec_stack (void)
20388{
20389 if (TARGET_32BIT)
20390 file_end_indicate_exec_stack ();
20391}
7c262518
RH
20392#endif
20393
cbaaba19 20394#if TARGET_XCOFF
0d5817b2
DE
20395static void
20396rs6000_xcoff_asm_output_anchor (rtx symbol)
20397{
20398 char buffer[100];
20399
20400 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20401 SYMBOL_REF_BLOCK_OFFSET (symbol));
20402 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20403}
20404
7c262518 20405static void
a2369ed3 20406rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20407{
20408 fputs (GLOBAL_ASM_OP, stream);
20409 RS6000_OUTPUT_BASENAME (stream, name);
20410 putc ('\n', stream);
20411}
20412
d6b5193b
RS
20413/* A get_unnamed_decl callback, used for read-only sections. PTR
20414 points to the section string variable. */
20415
20416static void
20417rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20418{
890f9edf
OH
20419 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20420 *(const char *const *) directive,
20421 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20422}
20423
20424/* Likewise for read-write sections. */
20425
20426static void
20427rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20428{
890f9edf
OH
20429 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20430 *(const char *const *) directive,
20431 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20432}
20433
20434/* A get_unnamed_section callback, used for switching to toc_section. */
20435
20436static void
20437rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20438{
20439 if (TARGET_MINIMAL_TOC)
20440 {
20441 /* toc_section is always selected at least once from
20442 rs6000_xcoff_file_start, so this is guaranteed to
20443 always be defined once and only once in each file. */
20444 if (!toc_initialized)
20445 {
20446 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20447 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20448 toc_initialized = 1;
20449 }
20450 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20451 (TARGET_32BIT ? "" : ",3"));
20452 }
20453 else
20454 fputs ("\t.toc\n", asm_out_file);
20455}
20456
20457/* Implement TARGET_ASM_INIT_SECTIONS. */
20458
20459static void
20460rs6000_xcoff_asm_init_sections (void)
20461{
20462 read_only_data_section
20463 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20464 &xcoff_read_only_section_name);
20465
20466 private_data_section
20467 = get_unnamed_section (SECTION_WRITE,
20468 rs6000_xcoff_output_readwrite_section_asm_op,
20469 &xcoff_private_data_section_name);
20470
20471 read_only_private_data_section
20472 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20473 &xcoff_private_data_section_name);
20474
20475 toc_section
20476 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20477
20478 readonly_data_section = read_only_data_section;
20479 exception_section = data_section;
20480}
20481
9b580a0b
RH
20482static int
20483rs6000_xcoff_reloc_rw_mask (void)
20484{
20485 return 3;
20486}
20487
b275d088 20488static void
c18a5b6c
MM
20489rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20490 tree decl ATTRIBUTE_UNUSED)
7c262518 20491{
0e5dbd9b
DE
20492 int smclass;
20493 static const char * const suffix[3] = { "PR", "RO", "RW" };
20494
20495 if (flags & SECTION_CODE)
20496 smclass = 0;
20497 else if (flags & SECTION_WRITE)
20498 smclass = 2;
20499 else
20500 smclass = 1;
20501
5b5198f7 20502 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20503 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20504 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20505}
ae46c4e0 20506
d6b5193b 20507static section *
f676971a 20508rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20509 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20510{
9b580a0b 20511 if (decl_readonly_section (decl, reloc))
ae46c4e0 20512 {
0e5dbd9b 20513 if (TREE_PUBLIC (decl))
d6b5193b 20514 return read_only_data_section;
ae46c4e0 20515 else
d6b5193b 20516 return read_only_private_data_section;
ae46c4e0
RH
20517 }
20518 else
20519 {
0e5dbd9b 20520 if (TREE_PUBLIC (decl))
d6b5193b 20521 return data_section;
ae46c4e0 20522 else
d6b5193b 20523 return private_data_section;
ae46c4e0
RH
20524 }
20525}
20526
20527static void
a2369ed3 20528rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20529{
20530 const char *name;
ae46c4e0 20531
5b5198f7
DE
20532 /* Use select_section for private and uninitialized data. */
20533 if (!TREE_PUBLIC (decl)
20534 || DECL_COMMON (decl)
0e5dbd9b
DE
20535 || DECL_INITIAL (decl) == NULL_TREE
20536 || DECL_INITIAL (decl) == error_mark_node
20537 || (flag_zero_initialized_in_bss
20538 && initializer_zerop (DECL_INITIAL (decl))))
20539 return;
20540
20541 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20542 name = (*targetm.strip_name_encoding) (name);
20543 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20544}
b64a1b53 20545
fb49053f
RH
20546/* Select section for constant in constant pool.
20547
20548 On RS/6000, all constants are in the private read-only data area.
20549 However, if this is being placed in the TOC it must be output as a
20550 toc entry. */
20551
d6b5193b 20552static section *
f676971a 20553rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20554 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20555{
20556 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20557 return toc_section;
b64a1b53 20558 else
d6b5193b 20559 return read_only_private_data_section;
b64a1b53 20560}
772c5265
RH
20561
20562/* Remove any trailing [DS] or the like from the symbol name. */
20563
20564static const char *
a2369ed3 20565rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20566{
20567 size_t len;
20568 if (*name == '*')
20569 name++;
20570 len = strlen (name);
20571 if (name[len - 1] == ']')
20572 return ggc_alloc_string (name, len - 4);
20573 else
20574 return name;
20575}
20576
5add3202
DE
20577/* Section attributes. AIX is always PIC. */
20578
20579static unsigned int
a2369ed3 20580rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20581{
5b5198f7 20582 unsigned int align;
9b580a0b 20583 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20584
20585 /* Align to at least UNIT size. */
20586 if (flags & SECTION_CODE)
20587 align = MIN_UNITS_PER_WORD;
20588 else
20589 /* Increase alignment of large objects if not already stricter. */
20590 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20591 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20592 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20593
20594 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20595}
a5fe455b 20596
1bc7c5b6
ZW
20597/* Output at beginning of assembler file.
20598
20599 Initialize the section names for the RS/6000 at this point.
20600
20601 Specify filename, including full path, to assembler.
20602
20603 We want to go into the TOC section so at least one .toc will be emitted.
20604 Also, in order to output proper .bs/.es pairs, we need at least one static
20605 [RW] section emitted.
20606
20607 Finally, declare mcount when profiling to make the assembler happy. */
20608
20609static void
863d938c 20610rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20611{
20612 rs6000_gen_section_name (&xcoff_bss_section_name,
20613 main_input_filename, ".bss_");
20614 rs6000_gen_section_name (&xcoff_private_data_section_name,
20615 main_input_filename, ".rw_");
20616 rs6000_gen_section_name (&xcoff_read_only_section_name,
20617 main_input_filename, ".ro_");
20618
20619 fputs ("\t.file\t", asm_out_file);
20620 output_quoted_string (asm_out_file, main_input_filename);
20621 fputc ('\n', asm_out_file);
1bc7c5b6 20622 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20623 switch_to_section (private_data_section);
20624 switch_to_section (text_section);
1bc7c5b6
ZW
20625 if (profile_flag)
20626 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20627 rs6000_file_start ();
20628}
20629
a5fe455b
ZW
20630/* Output at end of assembler file.
20631 On the RS/6000, referencing data should automatically pull in text. */
20632
20633static void
863d938c 20634rs6000_xcoff_file_end (void)
a5fe455b 20635{
d6b5193b 20636 switch_to_section (text_section);
a5fe455b 20637 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20638 switch_to_section (data_section);
a5fe455b
ZW
20639 fputs (TARGET_32BIT
20640 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20641 asm_out_file);
20642}
f1384257 20643#endif /* TARGET_XCOFF */
0e5dbd9b 20644
3c50106f
RH
20645/* Compute a (partial) cost for rtx X. Return true if the complete
20646 cost has been computed, and false if subexpressions should be
20647 scanned. In either case, *TOTAL contains the cost result. */
20648
20649static bool
1494c534 20650rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20651{
f0517163
RS
20652 enum machine_mode mode = GET_MODE (x);
20653
3c50106f
RH
20654 switch (code)
20655 {
30a555d9 20656 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20657 case CONST_INT:
066cd967
DE
20658 if (((outer_code == SET
20659 || outer_code == PLUS
20660 || outer_code == MINUS)
279bb624
DE
20661 && (satisfies_constraint_I (x)
20662 || satisfies_constraint_L (x)))
066cd967 20663 || (outer_code == AND
279bb624
DE
20664 && (satisfies_constraint_K (x)
20665 || (mode == SImode
20666 ? satisfies_constraint_L (x)
20667 : satisfies_constraint_J (x))
1990cd79
AM
20668 || mask_operand (x, mode)
20669 || (mode == DImode
20670 && mask64_operand (x, DImode))))
22e54023 20671 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20672 && (satisfies_constraint_K (x)
20673 || (mode == SImode
20674 ? satisfies_constraint_L (x)
20675 : satisfies_constraint_J (x))))
066cd967
DE
20676 || outer_code == ASHIFT
20677 || outer_code == ASHIFTRT
20678 || outer_code == LSHIFTRT
20679 || outer_code == ROTATE
20680 || outer_code == ROTATERT
d5861a7a 20681 || outer_code == ZERO_EXTRACT
066cd967 20682 || (outer_code == MULT
279bb624 20683 && satisfies_constraint_I (x))
22e54023
DE
20684 || ((outer_code == DIV || outer_code == UDIV
20685 || outer_code == MOD || outer_code == UMOD)
20686 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20687 || (outer_code == COMPARE
279bb624
DE
20688 && (satisfies_constraint_I (x)
20689 || satisfies_constraint_K (x)))
22e54023 20690 || (outer_code == EQ
279bb624
DE
20691 && (satisfies_constraint_I (x)
20692 || satisfies_constraint_K (x)
20693 || (mode == SImode
20694 ? satisfies_constraint_L (x)
20695 : satisfies_constraint_J (x))))
22e54023 20696 || (outer_code == GTU
279bb624 20697 && satisfies_constraint_I (x))
22e54023 20698 || (outer_code == LTU
279bb624 20699 && satisfies_constraint_P (x)))
066cd967
DE
20700 {
20701 *total = 0;
20702 return true;
20703 }
20704 else if ((outer_code == PLUS
4ae234b0 20705 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20706 || (outer_code == MINUS
4ae234b0 20707 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20708 || ((outer_code == SET
20709 || outer_code == IOR
20710 || outer_code == XOR)
20711 && (INTVAL (x)
20712 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20713 {
20714 *total = COSTS_N_INSNS (1);
20715 return true;
20716 }
20717 /* FALLTHRU */
20718
20719 case CONST_DOUBLE:
f6fe3a22 20720 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20721 {
f6fe3a22
DE
20722 if ((outer_code == IOR || outer_code == XOR)
20723 && CONST_DOUBLE_HIGH (x) == 0
20724 && (CONST_DOUBLE_LOW (x)
20725 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20726 {
20727 *total = 0;
20728 return true;
20729 }
20730 else if ((outer_code == AND && and64_2_operand (x, DImode))
20731 || ((outer_code == SET
20732 || outer_code == IOR
20733 || outer_code == XOR)
20734 && CONST_DOUBLE_HIGH (x) == 0))
20735 {
20736 *total = COSTS_N_INSNS (1);
20737 return true;
20738 }
066cd967
DE
20739 }
20740 /* FALLTHRU */
20741
3c50106f 20742 case CONST:
066cd967 20743 case HIGH:
3c50106f 20744 case SYMBOL_REF:
066cd967
DE
20745 case MEM:
20746 /* When optimizing for size, MEM should be slightly more expensive
20747 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20748 L1 cache latency is about two instructions. */
066cd967 20749 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20750 return true;
20751
30a555d9
DE
20752 case LABEL_REF:
20753 *total = 0;
20754 return true;
20755
3c50106f 20756 case PLUS:
f0517163 20757 if (mode == DFmode)
066cd967
DE
20758 {
20759 if (GET_CODE (XEXP (x, 0)) == MULT)
20760 {
20761 /* FNMA accounted in outer NEG. */
20762 if (outer_code == NEG)
20763 *total = rs6000_cost->dmul - rs6000_cost->fp;
20764 else
20765 *total = rs6000_cost->dmul;
20766 }
20767 else
20768 *total = rs6000_cost->fp;
20769 }
f0517163 20770 else if (mode == SFmode)
066cd967
DE
20771 {
20772 /* FNMA accounted in outer NEG. */
20773 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20774 *total = 0;
20775 else
20776 *total = rs6000_cost->fp;
20777 }
f0517163 20778 else
066cd967
DE
20779 *total = COSTS_N_INSNS (1);
20780 return false;
3c50106f 20781
52190329 20782 case MINUS:
f0517163 20783 if (mode == DFmode)
066cd967 20784 {
762c919f
JM
20785 if (GET_CODE (XEXP (x, 0)) == MULT
20786 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20787 {
20788 /* FNMA accounted in outer NEG. */
20789 if (outer_code == NEG)
762c919f 20790 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20791 else
20792 *total = rs6000_cost->dmul;
20793 }
20794 else
20795 *total = rs6000_cost->fp;
20796 }
f0517163 20797 else if (mode == SFmode)
066cd967
DE
20798 {
20799 /* FNMA accounted in outer NEG. */
20800 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20801 *total = 0;
20802 else
20803 *total = rs6000_cost->fp;
20804 }
f0517163 20805 else
c4ad648e 20806 *total = COSTS_N_INSNS (1);
066cd967 20807 return false;
3c50106f
RH
20808
20809 case MULT:
c9dbf840 20810 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 20811 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 20812 {
8b897cfa
RS
20813 if (INTVAL (XEXP (x, 1)) >= -256
20814 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20815 *total = rs6000_cost->mulsi_const9;
8b897cfa 20816 else
06a67bdd 20817 *total = rs6000_cost->mulsi_const;
3c50106f 20818 }
066cd967
DE
20819 /* FMA accounted in outer PLUS/MINUS. */
20820 else if ((mode == DFmode || mode == SFmode)
20821 && (outer_code == PLUS || outer_code == MINUS))
20822 *total = 0;
f0517163 20823 else if (mode == DFmode)
06a67bdd 20824 *total = rs6000_cost->dmul;
f0517163 20825 else if (mode == SFmode)
06a67bdd 20826 *total = rs6000_cost->fp;
f0517163 20827 else if (mode == DImode)
06a67bdd 20828 *total = rs6000_cost->muldi;
8b897cfa 20829 else
06a67bdd 20830 *total = rs6000_cost->mulsi;
066cd967 20831 return false;
3c50106f
RH
20832
20833 case DIV:
20834 case MOD:
f0517163
RS
20835 if (FLOAT_MODE_P (mode))
20836 {
06a67bdd
RS
20837 *total = mode == DFmode ? rs6000_cost->ddiv
20838 : rs6000_cost->sdiv;
066cd967 20839 return false;
f0517163 20840 }
5efb1046 20841 /* FALLTHRU */
3c50106f
RH
20842
20843 case UDIV:
20844 case UMOD:
627b6fe2
DJ
20845 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20846 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20847 {
20848 if (code == DIV || code == MOD)
20849 /* Shift, addze */
20850 *total = COSTS_N_INSNS (2);
20851 else
20852 /* Shift */
20853 *total = COSTS_N_INSNS (1);
20854 }
c4ad648e 20855 else
627b6fe2
DJ
20856 {
20857 if (GET_MODE (XEXP (x, 1)) == DImode)
20858 *total = rs6000_cost->divdi;
20859 else
20860 *total = rs6000_cost->divsi;
20861 }
20862 /* Add in shift and subtract for MOD. */
20863 if (code == MOD || code == UMOD)
20864 *total += COSTS_N_INSNS (2);
066cd967 20865 return false;
3c50106f 20866
32f56aad 20867 case CTZ:
3c50106f
RH
20868 case FFS:
20869 *total = COSTS_N_INSNS (4);
066cd967 20870 return false;
3c50106f 20871
32f56aad
DE
20872 case POPCOUNT:
20873 *total = COSTS_N_INSNS (6);
20874 return false;
20875
06a67bdd 20876 case NOT:
066cd967
DE
20877 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20878 {
20879 *total = 0;
20880 return false;
20881 }
20882 /* FALLTHRU */
20883
20884 case AND:
32f56aad 20885 case CLZ:
066cd967
DE
20886 case IOR:
20887 case XOR:
d5861a7a
DE
20888 case ZERO_EXTRACT:
20889 *total = COSTS_N_INSNS (1);
20890 return false;
20891
066cd967
DE
20892 case ASHIFT:
20893 case ASHIFTRT:
20894 case LSHIFTRT:
20895 case ROTATE:
20896 case ROTATERT:
d5861a7a 20897 /* Handle mul_highpart. */
066cd967
DE
20898 if (outer_code == TRUNCATE
20899 && GET_CODE (XEXP (x, 0)) == MULT)
20900 {
20901 if (mode == DImode)
20902 *total = rs6000_cost->muldi;
20903 else
20904 *total = rs6000_cost->mulsi;
20905 return true;
20906 }
d5861a7a
DE
20907 else if (outer_code == AND)
20908 *total = 0;
20909 else
20910 *total = COSTS_N_INSNS (1);
20911 return false;
20912
20913 case SIGN_EXTEND:
20914 case ZERO_EXTEND:
20915 if (GET_CODE (XEXP (x, 0)) == MEM)
20916 *total = 0;
20917 else
20918 *total = COSTS_N_INSNS (1);
066cd967 20919 return false;
06a67bdd 20920
066cd967
DE
20921 case COMPARE:
20922 case NEG:
20923 case ABS:
20924 if (!FLOAT_MODE_P (mode))
20925 {
20926 *total = COSTS_N_INSNS (1);
20927 return false;
20928 }
20929 /* FALLTHRU */
20930
20931 case FLOAT:
20932 case UNSIGNED_FLOAT:
20933 case FIX:
20934 case UNSIGNED_FIX:
06a67bdd
RS
20935 case FLOAT_TRUNCATE:
20936 *total = rs6000_cost->fp;
066cd967 20937 return false;
06a67bdd 20938
a2af5043
DJ
20939 case FLOAT_EXTEND:
20940 if (mode == DFmode)
20941 *total = 0;
20942 else
20943 *total = rs6000_cost->fp;
20944 return false;
20945
06a67bdd
RS
20946 case UNSPEC:
20947 switch (XINT (x, 1))
20948 {
20949 case UNSPEC_FRSP:
20950 *total = rs6000_cost->fp;
20951 return true;
20952
20953 default:
20954 break;
20955 }
20956 break;
20957
20958 case CALL:
20959 case IF_THEN_ELSE:
20960 if (optimize_size)
20961 {
20962 *total = COSTS_N_INSNS (1);
20963 return true;
20964 }
066cd967
DE
20965 else if (FLOAT_MODE_P (mode)
20966 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20967 {
20968 *total = rs6000_cost->fp;
20969 return false;
20970 }
06a67bdd
RS
20971 break;
20972
c0600ecd
DE
20973 case EQ:
20974 case GTU:
20975 case LTU:
22e54023
DE
20976 /* Carry bit requires mode == Pmode.
20977 NEG or PLUS already counted so only add one. */
20978 if (mode == Pmode
20979 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 20980 {
22e54023
DE
20981 *total = COSTS_N_INSNS (1);
20982 return true;
20983 }
20984 if (outer_code == SET)
20985 {
20986 if (XEXP (x, 1) == const0_rtx)
c0600ecd 20987 {
22e54023 20988 *total = COSTS_N_INSNS (2);
c0600ecd 20989 return true;
c0600ecd 20990 }
22e54023
DE
20991 else if (mode == Pmode)
20992 {
20993 *total = COSTS_N_INSNS (3);
20994 return false;
20995 }
20996 }
20997 /* FALLTHRU */
20998
20999 case GT:
21000 case LT:
21001 case UNORDERED:
21002 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21003 {
21004 *total = COSTS_N_INSNS (2);
21005 return true;
c0600ecd 21006 }
22e54023
DE
21007 /* CC COMPARE. */
21008 if (outer_code == COMPARE)
21009 {
21010 *total = 0;
21011 return true;
21012 }
21013 break;
c0600ecd 21014
3c50106f 21015 default:
06a67bdd 21016 break;
3c50106f 21017 }
06a67bdd
RS
21018
21019 return false;
3c50106f
RH
21020}
21021
34bb030a
DE
21022/* A C expression returning the cost of moving data from a register of class
21023 CLASS1 to one of CLASS2. */
21024
21025int
f676971a 21026rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21027 enum reg_class from, enum reg_class to)
34bb030a
DE
21028{
21029 /* Moves from/to GENERAL_REGS. */
21030 if (reg_classes_intersect_p (to, GENERAL_REGS)
21031 || reg_classes_intersect_p (from, GENERAL_REGS))
21032 {
21033 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21034 from = to;
21035
21036 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21037 return (rs6000_memory_move_cost (mode, from, 0)
21038 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21039
c4ad648e
AM
21040 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21041 shift. */
34bb030a
DE
21042 else if (from == CR_REGS)
21043 return 4;
21044
21045 else
c4ad648e 21046 /* A move will cost one instruction per GPR moved. */
c8b622ff 21047 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21048 }
21049
c4ad648e 21050 /* Moving between two similar registers is just one instruction. */
34bb030a 21051 else if (reg_classes_intersect_p (to, from))
7393f7f8 21052 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21053
c4ad648e 21054 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21055 else
f676971a 21056 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21057 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21058}
21059
21060/* A C expressions returning the cost of moving data of MODE from a register to
21061 or from memory. */
21062
21063int
f676971a 21064rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21065 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21066{
21067 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21068 return 4 * hard_regno_nregs[0][mode];
34bb030a 21069 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21070 return 4 * hard_regno_nregs[32][mode];
34bb030a 21071 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21072 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21073 else
21074 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21075}
21076
9c78b944
DE
21077/* Returns a code for a target-specific builtin that implements
21078 reciprocal of the function, or NULL_TREE if not available. */
21079
21080static tree
21081rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21082 bool sqrt ATTRIBUTE_UNUSED)
21083{
21084 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21085 && flag_finite_math_only && !flag_trapping_math
21086 && flag_unsafe_math_optimizations))
21087 return NULL_TREE;
21088
21089 if (md_fn)
21090 return NULL_TREE;
21091 else
21092 switch (fn)
21093 {
21094 case BUILT_IN_SQRTF:
21095 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21096
21097 default:
21098 return NULL_TREE;
21099 }
21100}
21101
ef765ea9
DE
21102/* Newton-Raphson approximation of single-precision floating point divide n/d.
21103 Assumes no trapping math and finite arguments. */
21104
21105void
9c78b944 21106rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21107{
21108 rtx x0, e0, e1, y1, u0, v0, one;
21109
21110 x0 = gen_reg_rtx (SFmode);
21111 e0 = gen_reg_rtx (SFmode);
21112 e1 = gen_reg_rtx (SFmode);
21113 y1 = gen_reg_rtx (SFmode);
21114 u0 = gen_reg_rtx (SFmode);
21115 v0 = gen_reg_rtx (SFmode);
21116 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21117
21118 /* x0 = 1./d estimate */
21119 emit_insn (gen_rtx_SET (VOIDmode, x0,
21120 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21121 UNSPEC_FRES)));
21122 /* e0 = 1. - d * x0 */
21123 emit_insn (gen_rtx_SET (VOIDmode, e0,
21124 gen_rtx_MINUS (SFmode, one,
21125 gen_rtx_MULT (SFmode, d, x0))));
21126 /* e1 = e0 + e0 * e0 */
21127 emit_insn (gen_rtx_SET (VOIDmode, e1,
21128 gen_rtx_PLUS (SFmode,
21129 gen_rtx_MULT (SFmode, e0, e0), e0)));
21130 /* y1 = x0 + e1 * x0 */
21131 emit_insn (gen_rtx_SET (VOIDmode, y1,
21132 gen_rtx_PLUS (SFmode,
21133 gen_rtx_MULT (SFmode, e1, x0), x0)));
21134 /* u0 = n * y1 */
21135 emit_insn (gen_rtx_SET (VOIDmode, u0,
21136 gen_rtx_MULT (SFmode, n, y1)));
21137 /* v0 = n - d * u0 */
21138 emit_insn (gen_rtx_SET (VOIDmode, v0,
21139 gen_rtx_MINUS (SFmode, n,
21140 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21141 /* dst = u0 + v0 * y1 */
21142 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21143 gen_rtx_PLUS (SFmode,
21144 gen_rtx_MULT (SFmode, v0, y1), u0)));
21145}
21146
21147/* Newton-Raphson approximation of double-precision floating point divide n/d.
21148 Assumes no trapping math and finite arguments. */
21149
21150void
9c78b944 21151rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21152{
21153 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21154
21155 x0 = gen_reg_rtx (DFmode);
21156 e0 = gen_reg_rtx (DFmode);
21157 e1 = gen_reg_rtx (DFmode);
21158 e2 = gen_reg_rtx (DFmode);
21159 y1 = gen_reg_rtx (DFmode);
21160 y2 = gen_reg_rtx (DFmode);
21161 y3 = gen_reg_rtx (DFmode);
21162 u0 = gen_reg_rtx (DFmode);
21163 v0 = gen_reg_rtx (DFmode);
21164 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21165
21166 /* x0 = 1./d estimate */
21167 emit_insn (gen_rtx_SET (VOIDmode, x0,
21168 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21169 UNSPEC_FRES)));
21170 /* e0 = 1. - d * x0 */
21171 emit_insn (gen_rtx_SET (VOIDmode, e0,
21172 gen_rtx_MINUS (DFmode, one,
21173 gen_rtx_MULT (SFmode, d, x0))));
21174 /* y1 = x0 + e0 * x0 */
21175 emit_insn (gen_rtx_SET (VOIDmode, y1,
21176 gen_rtx_PLUS (DFmode,
21177 gen_rtx_MULT (DFmode, e0, x0), x0)));
21178 /* e1 = e0 * e0 */
21179 emit_insn (gen_rtx_SET (VOIDmode, e1,
21180 gen_rtx_MULT (DFmode, e0, e0)));
21181 /* y2 = y1 + e1 * y1 */
21182 emit_insn (gen_rtx_SET (VOIDmode, y2,
21183 gen_rtx_PLUS (DFmode,
21184 gen_rtx_MULT (DFmode, e1, y1), y1)));
21185 /* e2 = e1 * e1 */
21186 emit_insn (gen_rtx_SET (VOIDmode, e2,
21187 gen_rtx_MULT (DFmode, e1, e1)));
21188 /* y3 = y2 + e2 * y2 */
21189 emit_insn (gen_rtx_SET (VOIDmode, y3,
21190 gen_rtx_PLUS (DFmode,
21191 gen_rtx_MULT (DFmode, e2, y2), y2)));
21192 /* u0 = n * y3 */
21193 emit_insn (gen_rtx_SET (VOIDmode, u0,
21194 gen_rtx_MULT (DFmode, n, y3)));
21195 /* v0 = n - d * u0 */
21196 emit_insn (gen_rtx_SET (VOIDmode, v0,
21197 gen_rtx_MINUS (DFmode, n,
21198 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21199 /* dst = u0 + v0 * y3 */
21200 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21201 gen_rtx_PLUS (DFmode,
21202 gen_rtx_MULT (DFmode, v0, y3), u0)));
21203}
21204
565ef4ba 21205
9c78b944
DE
21206/* Newton-Raphson approximation of single-precision floating point rsqrt.
21207 Assumes no trapping math and finite arguments. */
21208
21209void
21210rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21211{
21212 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21213 half, one, halfthree, c1, cond, label;
21214
21215 x0 = gen_reg_rtx (SFmode);
21216 x1 = gen_reg_rtx (SFmode);
21217 x2 = gen_reg_rtx (SFmode);
21218 y1 = gen_reg_rtx (SFmode);
21219 u0 = gen_reg_rtx (SFmode);
21220 u1 = gen_reg_rtx (SFmode);
21221 u2 = gen_reg_rtx (SFmode);
21222 v0 = gen_reg_rtx (SFmode);
21223 v1 = gen_reg_rtx (SFmode);
21224 v2 = gen_reg_rtx (SFmode);
21225 t0 = gen_reg_rtx (SFmode);
21226 halfthree = gen_reg_rtx (SFmode);
21227 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21228 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21229
21230 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21231 emit_insn (gen_rtx_SET (VOIDmode, t0,
21232 gen_rtx_MULT (SFmode, src, src)));
21233
21234 emit_insn (gen_rtx_SET (VOIDmode, cond,
21235 gen_rtx_COMPARE (CCFPmode, t0, src)));
21236 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21237 emit_unlikely_jump (c1, label);
21238
21239 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21240 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21241
21242 /* halfthree = 1.5 = 1.0 + 0.5 */
21243 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21244 gen_rtx_PLUS (SFmode, one, half)));
21245
21246 /* x0 = rsqrt estimate */
21247 emit_insn (gen_rtx_SET (VOIDmode, x0,
21248 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21249 UNSPEC_RSQRT)));
21250
21251 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21252 emit_insn (gen_rtx_SET (VOIDmode, y1,
21253 gen_rtx_MINUS (SFmode,
21254 gen_rtx_MULT (SFmode, src, halfthree),
21255 src)));
21256
21257 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21258 emit_insn (gen_rtx_SET (VOIDmode, u0,
21259 gen_rtx_MULT (SFmode, x0, x0)));
21260 emit_insn (gen_rtx_SET (VOIDmode, v0,
21261 gen_rtx_MINUS (SFmode,
21262 halfthree,
21263 gen_rtx_MULT (SFmode, y1, u0))));
21264 emit_insn (gen_rtx_SET (VOIDmode, x1,
21265 gen_rtx_MULT (SFmode, x0, v0)));
21266
21267 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21268 emit_insn (gen_rtx_SET (VOIDmode, u1,
21269 gen_rtx_MULT (SFmode, x1, x1)));
21270 emit_insn (gen_rtx_SET (VOIDmode, v1,
21271 gen_rtx_MINUS (SFmode,
21272 halfthree,
21273 gen_rtx_MULT (SFmode, y1, u1))));
21274 emit_insn (gen_rtx_SET (VOIDmode, x2,
21275 gen_rtx_MULT (SFmode, x1, v1)));
21276
21277 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21278 emit_insn (gen_rtx_SET (VOIDmode, u2,
21279 gen_rtx_MULT (SFmode, x2, x2)));
21280 emit_insn (gen_rtx_SET (VOIDmode, v2,
21281 gen_rtx_MINUS (SFmode,
21282 halfthree,
21283 gen_rtx_MULT (SFmode, y1, u2))));
21284 emit_insn (gen_rtx_SET (VOIDmode, dst,
21285 gen_rtx_MULT (SFmode, x2, v2)));
21286
21287 emit_label (XEXP (label, 0));
21288}
21289
565ef4ba
RS
21290/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21291 target, and SRC is the argument operand. */
21292
21293void
21294rs6000_emit_popcount (rtx dst, rtx src)
21295{
21296 enum machine_mode mode = GET_MODE (dst);
21297 rtx tmp1, tmp2;
21298
21299 tmp1 = gen_reg_rtx (mode);
21300
21301 if (mode == SImode)
21302 {
21303 emit_insn (gen_popcntbsi2 (tmp1, src));
21304 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21305 NULL_RTX, 0);
21306 tmp2 = force_reg (SImode, tmp2);
21307 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21308 }
21309 else
21310 {
21311 emit_insn (gen_popcntbdi2 (tmp1, src));
21312 tmp2 = expand_mult (DImode, tmp1,
21313 GEN_INT ((HOST_WIDE_INT)
21314 0x01010101 << 32 | 0x01010101),
21315 NULL_RTX, 0);
21316 tmp2 = force_reg (DImode, tmp2);
21317 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21318 }
21319}
21320
21321
21322/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21323 target, and SRC is the argument operand. */
21324
21325void
21326rs6000_emit_parity (rtx dst, rtx src)
21327{
21328 enum machine_mode mode = GET_MODE (dst);
21329 rtx tmp;
21330
21331 tmp = gen_reg_rtx (mode);
21332 if (mode == SImode)
21333 {
21334 /* Is mult+shift >= shift+xor+shift+xor? */
21335 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21336 {
21337 rtx tmp1, tmp2, tmp3, tmp4;
21338
21339 tmp1 = gen_reg_rtx (SImode);
21340 emit_insn (gen_popcntbsi2 (tmp1, src));
21341
21342 tmp2 = gen_reg_rtx (SImode);
21343 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21344 tmp3 = gen_reg_rtx (SImode);
21345 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21346
21347 tmp4 = gen_reg_rtx (SImode);
21348 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21349 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21350 }
21351 else
21352 rs6000_emit_popcount (tmp, src);
21353 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21354 }
21355 else
21356 {
21357 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21358 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21359 {
21360 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21361
21362 tmp1 = gen_reg_rtx (DImode);
21363 emit_insn (gen_popcntbdi2 (tmp1, src));
21364
21365 tmp2 = gen_reg_rtx (DImode);
21366 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21367 tmp3 = gen_reg_rtx (DImode);
21368 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21369
21370 tmp4 = gen_reg_rtx (DImode);
21371 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21372 tmp5 = gen_reg_rtx (DImode);
21373 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21374
21375 tmp6 = gen_reg_rtx (DImode);
21376 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21377 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21378 }
21379 else
21380 rs6000_emit_popcount (tmp, src);
21381 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21382 }
21383}
21384
ded9bf77
AH
21385/* Return an RTX representing where to find the function value of a
21386 function returning MODE. */
21387static rtx
21388rs6000_complex_function_value (enum machine_mode mode)
21389{
21390 unsigned int regno;
21391 rtx r1, r2;
21392 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21393 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21394
18f63bfa
AH
21395 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21396 regno = FP_ARG_RETURN;
354ed18f
AH
21397 else
21398 {
18f63bfa 21399 regno = GP_ARG_RETURN;
ded9bf77 21400
18f63bfa
AH
21401 /* 32-bit is OK since it'll go in r3/r4. */
21402 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21403 return gen_rtx_REG (mode, regno);
21404 }
21405
18f63bfa
AH
21406 if (inner_bytes >= 8)
21407 return gen_rtx_REG (mode, regno);
21408
ded9bf77
AH
21409 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21410 const0_rtx);
21411 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21412 GEN_INT (inner_bytes));
ded9bf77
AH
21413 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21414}
21415
a6ebc39a
AH
21416/* Define how to find the value returned by a function.
21417 VALTYPE is the data type of the value (as a tree).
21418 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21419 otherwise, FUNC is 0.
21420
21421 On the SPE, both FPs and vectors are returned in r3.
21422
21423 On RS/6000 an integer value is in r3 and a floating-point value is in
21424 fp1, unless -msoft-float. */
21425
21426rtx
586de218 21427rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21428{
21429 enum machine_mode mode;
2a8fa26c 21430 unsigned int regno;
a6ebc39a 21431
594a51fe
SS
21432 /* Special handling for structs in darwin64. */
21433 if (rs6000_darwin64_abi
21434 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21435 && TREE_CODE (valtype) == RECORD_TYPE
21436 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21437 {
21438 CUMULATIVE_ARGS valcum;
21439 rtx valret;
21440
0b5383eb 21441 valcum.words = 0;
594a51fe
SS
21442 valcum.fregno = FP_ARG_MIN_REG;
21443 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21444 /* Do a trial code generation as if this were going to be passed as
21445 an argument; if any part goes in memory, we return NULL. */
21446 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21447 if (valret)
21448 return valret;
21449 /* Otherwise fall through to standard ABI rules. */
21450 }
21451
0e67400a
FJ
21452 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21453 {
21454 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21455 return gen_rtx_PARALLEL (DImode,
21456 gen_rtvec (2,
21457 gen_rtx_EXPR_LIST (VOIDmode,
21458 gen_rtx_REG (SImode, GP_ARG_RETURN),
21459 const0_rtx),
21460 gen_rtx_EXPR_LIST (VOIDmode,
21461 gen_rtx_REG (SImode,
21462 GP_ARG_RETURN + 1),
21463 GEN_INT (4))));
21464 }
0f086e42
FJ
21465 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21466 {
21467 return gen_rtx_PARALLEL (DCmode,
21468 gen_rtvec (4,
21469 gen_rtx_EXPR_LIST (VOIDmode,
21470 gen_rtx_REG (SImode, GP_ARG_RETURN),
21471 const0_rtx),
21472 gen_rtx_EXPR_LIST (VOIDmode,
21473 gen_rtx_REG (SImode,
21474 GP_ARG_RETURN + 1),
21475 GEN_INT (4)),
21476 gen_rtx_EXPR_LIST (VOIDmode,
21477 gen_rtx_REG (SImode,
21478 GP_ARG_RETURN + 2),
21479 GEN_INT (8)),
21480 gen_rtx_EXPR_LIST (VOIDmode,
21481 gen_rtx_REG (SImode,
21482 GP_ARG_RETURN + 3),
21483 GEN_INT (12))));
21484 }
602ea4d3 21485
7348aa7f
FXC
21486 mode = TYPE_MODE (valtype);
21487 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21488 || POINTER_TYPE_P (valtype))
b78d48dd 21489 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21490
00b79d54 21491 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21492 {
21493 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21494 {
21495 switch (mode)
21496 {
21497 default:
21498 gcc_unreachable ();
21499 case SDmode:
21500 regno = GP_ARG_RETURN;
21501 break;
21502 case DDmode:
21503 regno = FP_ARG_RETURN;
21504 break;
21505 case TDmode:
21506 /* Use f2:f3 specified by the ABI. */
21507 regno = FP_ARG_RETURN + 1;
21508 break;
21509 }
21510 }
21511 else
21512 regno = GP_ARG_RETURN;
21513 }
00b79d54 21514 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21515 regno = FP_ARG_RETURN;
ded9bf77 21516 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21517 && targetm.calls.split_complex_arg)
ded9bf77 21518 return rs6000_complex_function_value (mode);
44688022 21519 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21520 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21521 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21522 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21523 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
21524 && (mode == DFmode || mode == DCmode
21525 || mode == TFmode || mode == TCmode))
18f63bfa 21526 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21527 else
21528 regno = GP_ARG_RETURN;
21529
21530 return gen_rtx_REG (mode, regno);
21531}
21532
ded9bf77
AH
21533/* Define how to find the value returned by a library function
21534 assuming the value has mode MODE. */
21535rtx
21536rs6000_libcall_value (enum machine_mode mode)
21537{
21538 unsigned int regno;
21539
2e6c9641
FJ
21540 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21541 {
21542 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21543 return gen_rtx_PARALLEL (DImode,
21544 gen_rtvec (2,
21545 gen_rtx_EXPR_LIST (VOIDmode,
21546 gen_rtx_REG (SImode, GP_ARG_RETURN),
21547 const0_rtx),
21548 gen_rtx_EXPR_LIST (VOIDmode,
21549 gen_rtx_REG (SImode,
21550 GP_ARG_RETURN + 1),
21551 GEN_INT (4))));
21552 }
21553
00b79d54 21554 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21555 {
21556 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21557 {
21558 switch (mode)
21559 {
21560 default:
21561 gcc_unreachable ();
21562 case SDmode:
21563 regno = GP_ARG_RETURN;
21564 break;
21565 case DDmode:
21566 regno = FP_ARG_RETURN;
21567 break;
21568 case TDmode:
21569 /* Use f2:f3 specified by the ABI. */
21570 regno = FP_ARG_RETURN + 1;
21571 break;
21572 }
21573 }
21574 else
21575 regno = GP_ARG_RETURN;
21576 }
00b79d54 21577 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21578 && TARGET_HARD_FLOAT && TARGET_FPRS)
21579 regno = FP_ARG_RETURN;
44688022
AM
21580 else if (ALTIVEC_VECTOR_MODE (mode)
21581 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21582 regno = ALTIVEC_ARG_RETURN;
42ba5130 21583 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21584 return rs6000_complex_function_value (mode);
18f63bfa 21585 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
21586 && (mode == DFmode || mode == DCmode
21587 || mode == TFmode || mode == TCmode))
18f63bfa 21588 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21589 else
21590 regno = GP_ARG_RETURN;
21591
21592 return gen_rtx_REG (mode, regno);
21593}
21594
d1d0c603
JJ
21595/* Define the offset between two registers, FROM to be eliminated and its
21596 replacement TO, at the start of a routine. */
21597HOST_WIDE_INT
21598rs6000_initial_elimination_offset (int from, int to)
21599{
21600 rs6000_stack_t *info = rs6000_stack_info ();
21601 HOST_WIDE_INT offset;
21602
7d5175e1 21603 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21604 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21605 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21606 {
21607 offset = info->push_p ? 0 : -info->total_size;
21608 if (FRAME_GROWS_DOWNWARD)
5b667039 21609 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21610 }
21611 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21612 offset = FRAME_GROWS_DOWNWARD
5b667039 21613 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21614 : 0;
21615 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21616 offset = info->total_size;
21617 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21618 offset = info->push_p ? info->total_size : 0;
21619 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21620 offset = 0;
21621 else
37409796 21622 gcc_unreachable ();
d1d0c603
JJ
21623
21624 return offset;
21625}
21626
58646b77 21627/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21628
c8e4f0e9 21629static bool
3101faab 21630rs6000_is_opaque_type (const_tree type)
62e1dfcf 21631{
58646b77 21632 return (type == opaque_V2SI_type_node
2abe3e28 21633 || type == opaque_V2SF_type_node
58646b77
PB
21634 || type == opaque_p_V2SI_type_node
21635 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21636}
21637
96714395 21638static rtx
a2369ed3 21639rs6000_dwarf_register_span (rtx reg)
96714395
AH
21640{
21641 unsigned regno;
21642
4d4cbc0e
AH
21643 if (TARGET_SPE
21644 && (SPE_VECTOR_MODE (GET_MODE (reg))
21645 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
21646 ;
21647 else
96714395
AH
21648 return NULL_RTX;
21649
21650 regno = REGNO (reg);
21651
21652 /* The duality of the SPE register size wreaks all kinds of havoc.
21653 This is a way of distinguishing r0 in 32-bits from r0 in
21654 64-bits. */
21655 return
21656 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21657 BYTES_BIG_ENDIAN
21658 ? gen_rtvec (2,
21659 gen_rtx_REG (SImode, regno + 1200),
21660 gen_rtx_REG (SImode, regno))
21661 : gen_rtvec (2,
21662 gen_rtx_REG (SImode, regno),
21663 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21664}
21665
37ea0b7e
JM
21666/* Fill in sizes for SPE register high parts in table used by unwinder. */
21667
21668static void
21669rs6000_init_dwarf_reg_sizes_extra (tree address)
21670{
21671 if (TARGET_SPE)
21672 {
21673 int i;
21674 enum machine_mode mode = TYPE_MODE (char_type_node);
21675 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21676 rtx mem = gen_rtx_MEM (BLKmode, addr);
21677 rtx value = gen_int_mode (4, mode);
21678
21679 for (i = 1201; i < 1232; i++)
21680 {
21681 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21682 HOST_WIDE_INT offset
21683 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21684
21685 emit_move_insn (adjust_address (mem, mode, offset), value);
21686 }
21687 }
21688}
21689
93c9d1ba
AM
21690/* Map internal gcc register numbers to DWARF2 register numbers. */
21691
21692unsigned int
21693rs6000_dbx_register_number (unsigned int regno)
21694{
21695 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21696 return regno;
21697 if (regno == MQ_REGNO)
21698 return 100;
1de43f85 21699 if (regno == LR_REGNO)
93c9d1ba 21700 return 108;
1de43f85 21701 if (regno == CTR_REGNO)
93c9d1ba
AM
21702 return 109;
21703 if (CR_REGNO_P (regno))
21704 return regno - CR0_REGNO + 86;
21705 if (regno == XER_REGNO)
21706 return 101;
21707 if (ALTIVEC_REGNO_P (regno))
21708 return regno - FIRST_ALTIVEC_REGNO + 1124;
21709 if (regno == VRSAVE_REGNO)
21710 return 356;
21711 if (regno == VSCR_REGNO)
21712 return 67;
21713 if (regno == SPE_ACC_REGNO)
21714 return 99;
21715 if (regno == SPEFSCR_REGNO)
21716 return 612;
21717 /* SPE high reg number. We get these values of regno from
21718 rs6000_dwarf_register_span. */
37409796
NS
21719 gcc_assert (regno >= 1200 && regno < 1232);
21720 return regno;
93c9d1ba
AM
21721}
21722
93f90be6 21723/* target hook eh_return_filter_mode */
f676971a 21724static enum machine_mode
93f90be6
FJ
21725rs6000_eh_return_filter_mode (void)
21726{
21727 return TARGET_32BIT ? SImode : word_mode;
21728}
21729
00b79d54
BE
21730/* Target hook for scalar_mode_supported_p. */
21731static bool
21732rs6000_scalar_mode_supported_p (enum machine_mode mode)
21733{
21734 if (DECIMAL_FLOAT_MODE_P (mode))
21735 return true;
21736 else
21737 return default_scalar_mode_supported_p (mode);
21738}
21739
f676971a
EC
21740/* Target hook for vector_mode_supported_p. */
21741static bool
21742rs6000_vector_mode_supported_p (enum machine_mode mode)
21743{
21744
96038623
DE
21745 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21746 return true;
21747
f676971a
EC
21748 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21749 return true;
21750
21751 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21752 return true;
21753
21754 else
21755 return false;
21756}
21757
bb8df8a6
EC
21758/* Target hook for invalid_arg_for_unprototyped_fn. */
21759static const char *
3101faab 21760invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21761{
21762 return (!rs6000_darwin64_abi
21763 && typelist == 0
21764 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21765 && (funcdecl == NULL_TREE
21766 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21767 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21768 ? N_("AltiVec argument passed to unprototyped function")
21769 : NULL;
21770}
21771
3aebbe5f
JJ
21772/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21773 setup by using __stack_chk_fail_local hidden function instead of
21774 calling __stack_chk_fail directly. Otherwise it is better to call
21775 __stack_chk_fail directly. */
21776
21777static tree
21778rs6000_stack_protect_fail (void)
21779{
21780 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21781 ? default_hidden_stack_protect_fail ()
21782 : default_external_stack_protect_fail ();
21783}
21784
17211ab5 21785#include "gt-rs6000.h"