]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR fortran/33818 (Bogus error "Variable 'str' is used at (1) before the ENTRY...
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
279bb624 58#include "tm-constrs.h"
1bc7c5b6
ZW
59#if TARGET_XCOFF
60#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61#endif
93a27b7b
ZW
62#if TARGET_MACHO
63#include "gstab.h" /* for N_SLINE */
64#endif
9b30bae2 65
7509c759
MM
66#ifndef TARGET_NO_PROTOTYPE
67#define TARGET_NO_PROTOTYPE 0
68#endif
69
9878760c
RK
70#define min(A,B) ((A) < (B) ? (A) : (B))
71#define max(A,B) ((A) > (B) ? (A) : (B))
72
d1d0c603
JJ
73/* Structure used to define the rs6000 stack */
74typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
d1d0c603
JJ
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
111} rs6000_stack_t;
112
5b667039
JJ
113/* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */
115typedef struct machine_function GTY(())
116{
117 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
118 int ra_needs_full_frame;
119 /* Some local-dynamic symbol. */
120 const char *some_ld_name;
121 /* Whether the instruction chain has been scanned already. */
122 int insn_chain_scanned_p;
123 /* Flags if __builtin_return_address (0) was used. */
124 int ra_need_lr;
125 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126 varargs save area. */
127 HOST_WIDE_INT varargs_save_offset;
128} machine_function;
129
5248c961
RK
130/* Target cpu type */
131
132enum processor_type rs6000_cpu;
8e3f41e7
MM
133struct rs6000_cpu_select rs6000_select[3] =
134{
815cdc52
MM
135 /* switch name, tune arch */
136 { (const char *)0, "--with-cpu=", 1, 1 },
137 { (const char *)0, "-mcpu=", 1, 1 },
138 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 139};
5248c961 140
d296e02e
AP
141static GTY(()) bool rs6000_cell_dont_microcode;
142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
44cd321e
PS
149/* Align branch targets. */
150static GTY(()) bool rs6000_align_branch_targets;
151
569fa502
DN
152/* Support for -msched-costly-dep option. */
153const char *rs6000_sched_costly_dep_str;
154enum rs6000_dependence_cost rs6000_sched_costly_dep;
155
cbe26ab8
DN
156/* Support for -minsert-sched-nops option. */
157const char *rs6000_sched_insert_nops_str;
158enum rs6000_nop_insertion rs6000_sched_insert_nops;
159
7ccf35ed 160/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 161static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 162
602ea4d3 163/* Size of long double. */
6fa3f289
ZW
164int rs6000_long_double_type_size;
165
602ea4d3
JJ
166/* IEEE quad extended precision long double. */
167int rs6000_ieeequad;
168
169/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
170int rs6000_altivec_abi;
171
a3170dc6
AH
172/* Nonzero if we want SPE ABI extensions. */
173int rs6000_spe_abi;
174
5da702b1
AH
175/* Nonzero if floating point operations are done in the GPRs. */
176int rs6000_float_gprs = 0;
177
594a51fe
SS
178/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
179int rs6000_darwin64_abi;
180
a0ab749a 181/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 182static GTY(()) int common_mode_defined;
c81bebd7 183
9878760c
RK
184/* Save information from a "cmpxx" operation until the branch or scc is
185 emitted. */
9878760c
RK
186rtx rs6000_compare_op0, rs6000_compare_op1;
187int rs6000_compare_fp_p;
874a0744 188
874a0744
MM
189/* Label number of label created for -mrelocatable, to call to so we can
190 get the address of the GOT section */
191int rs6000_pic_labelno;
c81bebd7 192
b91da81f 193#ifdef USING_ELFOS_H
c81bebd7 194/* Which abi to adhere to */
9739c90c 195const char *rs6000_abi_name;
d9407988
MM
196
197/* Semantics of the small data area */
198enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199
200/* Which small data model to use */
815cdc52 201const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
202
203/* Counter for labels which are to be placed in .fixup. */
204int fixuplabelno = 0;
874a0744 205#endif
4697a36c 206
c4501e62
JJ
207/* Bit size of immediate TLS offsets and string from which it is decoded. */
208int rs6000_tls_size = 32;
209const char *rs6000_tls_size_string;
210
b6c9286a
MM
211/* ABI enumeration available for subtarget to use. */
212enum rs6000_abi rs6000_current_abi;
213
85b776df
AM
214/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
215int dot_symbols;
216
38c1f2d7 217/* Debug flags */
815cdc52 218const char *rs6000_debug_name;
38c1f2d7
MM
219int rs6000_debug_stack; /* debug stack applications */
220int rs6000_debug_arg; /* debug argument handling */
221
aabcd309 222/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
223bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
224
58646b77
PB
225/* Built in types. */
226
227tree rs6000_builtin_types[RS6000_BTI_MAX];
228tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 229
57ac7be9
AM
230const char *rs6000_traceback_name;
231static enum {
232 traceback_default = 0,
233 traceback_none,
234 traceback_part,
235 traceback_full
236} rs6000_traceback;
237
38c1f2d7
MM
238/* Flag to say the TOC is initialized */
239int toc_initialized;
9ebbca7d 240char toc_label_name[10];
38c1f2d7 241
44cd321e
PS
242/* Cached value of rs6000_variable_issue. This is cached in
243 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
244static short cached_can_issue_more;
245
d6b5193b
RS
246static GTY(()) section *read_only_data_section;
247static GTY(()) section *private_data_section;
248static GTY(()) section *read_only_private_data_section;
249static GTY(()) section *sdata2_section;
250static GTY(()) section *toc_section;
251
a3c9585f
KH
252/* Control alignment for fields within structures. */
253/* String from -malign-XXXXX. */
025d9908
KH
254int rs6000_alignment_flags;
255
78f5898b
AH
256/* True for any options that were explicitly set. */
257struct {
df01da37 258 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 259 bool alignment; /* True if -malign- was used. */
d3603e8c 260 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
261 bool spe; /* True if -mspe= was used. */
262 bool float_gprs; /* True if -mfloat-gprs= was used. */
263 bool isel; /* True if -misel was used. */
264 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 265 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
266} rs6000_explicit_options;
267
a3170dc6
AH
268struct builtin_description
269{
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
273 unsigned int mask;
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
277};
8b897cfa
RS
278\f
279/* Target cpu costs. */
280
281struct processor_costs {
c4ad648e 282 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
283 const int mulsi_const; /* cost of SImode multiplication by constant. */
284 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
285 const int muldi; /* cost of DImode multiplication. */
286 const int divsi; /* cost of SImode division. */
287 const int divdi; /* cost of DImode division. */
288 const int fp; /* cost of simple SFmode and DFmode insns. */
289 const int dmul; /* cost of DFmode multiplication (and fmadd). */
290 const int sdiv; /* cost of SFmode division (fdivs). */
291 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
292 const int cache_line_size; /* cache line size in bytes. */
293 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
294 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
295 const int simultaneous_prefetches; /* number of parallel prefetch
296 operations. */
8b897cfa
RS
297};
298
299const struct processor_costs *rs6000_cost;
300
301/* Processor costs (relative to an add) */
302
303/* Instruction size costs on 32bit processors. */
304static const
305struct processor_costs size32_cost = {
06a67bdd
RS
306 COSTS_N_INSNS (1), /* mulsi */
307 COSTS_N_INSNS (1), /* mulsi_const */
308 COSTS_N_INSNS (1), /* mulsi_const9 */
309 COSTS_N_INSNS (1), /* muldi */
310 COSTS_N_INSNS (1), /* divsi */
311 COSTS_N_INSNS (1), /* divdi */
312 COSTS_N_INSNS (1), /* fp */
313 COSTS_N_INSNS (1), /* dmul */
314 COSTS_N_INSNS (1), /* sdiv */
315 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
316 32,
317 0,
318 0,
5f732aba 319 0,
8b897cfa
RS
320};
321
322/* Instruction size costs on 64bit processors. */
323static const
324struct processor_costs size64_cost = {
06a67bdd
RS
325 COSTS_N_INSNS (1), /* mulsi */
326 COSTS_N_INSNS (1), /* mulsi_const */
327 COSTS_N_INSNS (1), /* mulsi_const9 */
328 COSTS_N_INSNS (1), /* muldi */
329 COSTS_N_INSNS (1), /* divsi */
330 COSTS_N_INSNS (1), /* divdi */
331 COSTS_N_INSNS (1), /* fp */
332 COSTS_N_INSNS (1), /* dmul */
333 COSTS_N_INSNS (1), /* sdiv */
334 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
335 128,
336 0,
337 0,
5f732aba 338 0,
8b897cfa
RS
339};
340
341/* Instruction costs on RIOS1 processors. */
342static const
343struct processor_costs rios1_cost = {
06a67bdd
RS
344 COSTS_N_INSNS (5), /* mulsi */
345 COSTS_N_INSNS (4), /* mulsi_const */
346 COSTS_N_INSNS (3), /* mulsi_const9 */
347 COSTS_N_INSNS (5), /* muldi */
348 COSTS_N_INSNS (19), /* divsi */
349 COSTS_N_INSNS (19), /* divdi */
350 COSTS_N_INSNS (2), /* fp */
351 COSTS_N_INSNS (2), /* dmul */
352 COSTS_N_INSNS (19), /* sdiv */
353 COSTS_N_INSNS (19), /* ddiv */
5f732aba
DE
354 128,
355 64, /* l1 cache */
356 512, /* l2 cache */
0b11da67 357 0, /* streams */
8b897cfa
RS
358};
359
360/* Instruction costs on RIOS2 processors. */
361static const
362struct processor_costs rios2_cost = {
06a67bdd
RS
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (13), /* divsi */
368 COSTS_N_INSNS (13), /* divdi */
369 COSTS_N_INSNS (2), /* fp */
370 COSTS_N_INSNS (2), /* dmul */
371 COSTS_N_INSNS (17), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
373 256,
374 256, /* l1 cache */
375 1024, /* l2 cache */
0b11da67 376 0, /* streams */
8b897cfa
RS
377};
378
379/* Instruction costs on RS64A processors. */
380static const
381struct processor_costs rs64a_cost = {
06a67bdd
RS
382 COSTS_N_INSNS (20), /* mulsi */
383 COSTS_N_INSNS (12), /* mulsi_const */
384 COSTS_N_INSNS (8), /* mulsi_const9 */
385 COSTS_N_INSNS (34), /* muldi */
386 COSTS_N_INSNS (65), /* divsi */
387 COSTS_N_INSNS (67), /* divdi */
388 COSTS_N_INSNS (4), /* fp */
389 COSTS_N_INSNS (4), /* dmul */
390 COSTS_N_INSNS (31), /* sdiv */
391 COSTS_N_INSNS (31), /* ddiv */
0b11da67 392 128,
5f732aba
DE
393 128, /* l1 cache */
394 2048, /* l2 cache */
0b11da67 395 1, /* streams */
8b897cfa
RS
396};
397
398/* Instruction costs on MPCCORE processors. */
399static const
400struct processor_costs mpccore_cost = {
06a67bdd
RS
401 COSTS_N_INSNS (2), /* mulsi */
402 COSTS_N_INSNS (2), /* mulsi_const */
403 COSTS_N_INSNS (2), /* mulsi_const9 */
404 COSTS_N_INSNS (2), /* muldi */
405 COSTS_N_INSNS (6), /* divsi */
406 COSTS_N_INSNS (6), /* divdi */
407 COSTS_N_INSNS (4), /* fp */
408 COSTS_N_INSNS (5), /* dmul */
409 COSTS_N_INSNS (10), /* sdiv */
410 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
411 32,
412 4, /* l1 cache */
413 16, /* l2 cache */
0b11da67 414 1, /* streams */
8b897cfa
RS
415};
416
417/* Instruction costs on PPC403 processors. */
418static const
419struct processor_costs ppc403_cost = {
06a67bdd
RS
420 COSTS_N_INSNS (4), /* mulsi */
421 COSTS_N_INSNS (4), /* mulsi_const */
422 COSTS_N_INSNS (4), /* mulsi_const9 */
423 COSTS_N_INSNS (4), /* muldi */
424 COSTS_N_INSNS (33), /* divsi */
425 COSTS_N_INSNS (33), /* divdi */
426 COSTS_N_INSNS (11), /* fp */
427 COSTS_N_INSNS (11), /* dmul */
428 COSTS_N_INSNS (11), /* sdiv */
429 COSTS_N_INSNS (11), /* ddiv */
0b11da67 430 32,
5f732aba
DE
431 4, /* l1 cache */
432 16, /* l2 cache */
0b11da67 433 1, /* streams */
8b897cfa
RS
434};
435
436/* Instruction costs on PPC405 processors. */
437static const
438struct processor_costs ppc405_cost = {
06a67bdd
RS
439 COSTS_N_INSNS (5), /* mulsi */
440 COSTS_N_INSNS (4), /* mulsi_const */
441 COSTS_N_INSNS (3), /* mulsi_const9 */
442 COSTS_N_INSNS (5), /* muldi */
443 COSTS_N_INSNS (35), /* divsi */
444 COSTS_N_INSNS (35), /* divdi */
445 COSTS_N_INSNS (11), /* fp */
446 COSTS_N_INSNS (11), /* dmul */
447 COSTS_N_INSNS (11), /* sdiv */
448 COSTS_N_INSNS (11), /* ddiv */
0b11da67 449 32,
5f732aba
DE
450 16, /* l1 cache */
451 128, /* l2 cache */
0b11da67 452 1, /* streams */
8b897cfa
RS
453};
454
455/* Instruction costs on PPC440 processors. */
456static const
457struct processor_costs ppc440_cost = {
06a67bdd
RS
458 COSTS_N_INSNS (3), /* mulsi */
459 COSTS_N_INSNS (2), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (3), /* muldi */
462 COSTS_N_INSNS (34), /* divsi */
463 COSTS_N_INSNS (34), /* divdi */
464 COSTS_N_INSNS (5), /* fp */
465 COSTS_N_INSNS (5), /* dmul */
466 COSTS_N_INSNS (19), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
0b11da67 468 32,
5f732aba
DE
469 32, /* l1 cache */
470 256, /* l2 cache */
0b11da67 471 1, /* streams */
8b897cfa
RS
472};
473
474/* Instruction costs on PPC601 processors. */
475static const
476struct processor_costs ppc601_cost = {
06a67bdd
RS
477 COSTS_N_INSNS (5), /* mulsi */
478 COSTS_N_INSNS (5), /* mulsi_const */
479 COSTS_N_INSNS (5), /* mulsi_const9 */
480 COSTS_N_INSNS (5), /* muldi */
481 COSTS_N_INSNS (36), /* divsi */
482 COSTS_N_INSNS (36), /* divdi */
483 COSTS_N_INSNS (4), /* fp */
484 COSTS_N_INSNS (5), /* dmul */
485 COSTS_N_INSNS (17), /* sdiv */
486 COSTS_N_INSNS (31), /* ddiv */
0b11da67 487 32,
5f732aba
DE
488 32, /* l1 cache */
489 256, /* l2 cache */
0b11da67 490 1, /* streams */
8b897cfa
RS
491};
492
493/* Instruction costs on PPC603 processors. */
494static const
495struct processor_costs ppc603_cost = {
06a67bdd
RS
496 COSTS_N_INSNS (5), /* mulsi */
497 COSTS_N_INSNS (3), /* mulsi_const */
498 COSTS_N_INSNS (2), /* mulsi_const9 */
499 COSTS_N_INSNS (5), /* muldi */
500 COSTS_N_INSNS (37), /* divsi */
501 COSTS_N_INSNS (37), /* divdi */
502 COSTS_N_INSNS (3), /* fp */
503 COSTS_N_INSNS (4), /* dmul */
504 COSTS_N_INSNS (18), /* sdiv */
505 COSTS_N_INSNS (33), /* ddiv */
0b11da67 506 32,
5f732aba
DE
507 8, /* l1 cache */
508 64, /* l2 cache */
0b11da67 509 1, /* streams */
8b897cfa
RS
510};
511
512/* Instruction costs on PPC604 processors. */
513static const
514struct processor_costs ppc604_cost = {
06a67bdd
RS
515 COSTS_N_INSNS (4), /* mulsi */
516 COSTS_N_INSNS (4), /* mulsi_const */
517 COSTS_N_INSNS (4), /* mulsi_const9 */
518 COSTS_N_INSNS (4), /* muldi */
519 COSTS_N_INSNS (20), /* divsi */
520 COSTS_N_INSNS (20), /* divdi */
521 COSTS_N_INSNS (3), /* fp */
522 COSTS_N_INSNS (3), /* dmul */
523 COSTS_N_INSNS (18), /* sdiv */
524 COSTS_N_INSNS (32), /* ddiv */
0b11da67 525 32,
5f732aba
DE
526 16, /* l1 cache */
527 512, /* l2 cache */
0b11da67 528 1, /* streams */
8b897cfa
RS
529};
530
531/* Instruction costs on PPC604e processors. */
532static const
533struct processor_costs ppc604e_cost = {
06a67bdd
RS
534 COSTS_N_INSNS (2), /* mulsi */
535 COSTS_N_INSNS (2), /* mulsi_const */
536 COSTS_N_INSNS (2), /* mulsi_const9 */
537 COSTS_N_INSNS (2), /* muldi */
538 COSTS_N_INSNS (20), /* divsi */
539 COSTS_N_INSNS (20), /* divdi */
540 COSTS_N_INSNS (3), /* fp */
541 COSTS_N_INSNS (3), /* dmul */
542 COSTS_N_INSNS (18), /* sdiv */
543 COSTS_N_INSNS (32), /* ddiv */
0b11da67 544 32,
5f732aba
DE
545 32, /* l1 cache */
546 1024, /* l2 cache */
0b11da67 547 1, /* streams */
8b897cfa
RS
548};
549
f0517163 550/* Instruction costs on PPC620 processors. */
8b897cfa
RS
551static const
552struct processor_costs ppc620_cost = {
06a67bdd
RS
553 COSTS_N_INSNS (5), /* mulsi */
554 COSTS_N_INSNS (4), /* mulsi_const */
555 COSTS_N_INSNS (3), /* mulsi_const9 */
556 COSTS_N_INSNS (7), /* muldi */
557 COSTS_N_INSNS (21), /* divsi */
558 COSTS_N_INSNS (37), /* divdi */
559 COSTS_N_INSNS (3), /* fp */
560 COSTS_N_INSNS (3), /* dmul */
561 COSTS_N_INSNS (18), /* sdiv */
562 COSTS_N_INSNS (32), /* ddiv */
0b11da67 563 128,
5f732aba
DE
564 32, /* l1 cache */
565 1024, /* l2 cache */
0b11da67 566 1, /* streams */
f0517163
RS
567};
568
569/* Instruction costs on PPC630 processors. */
570static const
571struct processor_costs ppc630_cost = {
06a67bdd
RS
572 COSTS_N_INSNS (5), /* mulsi */
573 COSTS_N_INSNS (4), /* mulsi_const */
574 COSTS_N_INSNS (3), /* mulsi_const9 */
575 COSTS_N_INSNS (7), /* muldi */
576 COSTS_N_INSNS (21), /* divsi */
577 COSTS_N_INSNS (37), /* divdi */
578 COSTS_N_INSNS (3), /* fp */
579 COSTS_N_INSNS (3), /* dmul */
580 COSTS_N_INSNS (17), /* sdiv */
581 COSTS_N_INSNS (21), /* ddiv */
0b11da67 582 128,
5f732aba
DE
583 64, /* l1 cache */
584 1024, /* l2 cache */
0b11da67 585 1, /* streams */
8b897cfa
RS
586};
587
d296e02e
AP
588/* Instruction costs on Cell processor. */
589/* COSTS_N_INSNS (1) ~ one add. */
590static const
591struct processor_costs ppccell_cost = {
592 COSTS_N_INSNS (9/2)+2, /* mulsi */
593 COSTS_N_INSNS (6/2), /* mulsi_const */
594 COSTS_N_INSNS (6/2), /* mulsi_const9 */
595 COSTS_N_INSNS (15/2)+2, /* muldi */
596 COSTS_N_INSNS (38/2), /* divsi */
597 COSTS_N_INSNS (70/2), /* divdi */
598 COSTS_N_INSNS (10/2), /* fp */
599 COSTS_N_INSNS (10/2), /* dmul */
600 COSTS_N_INSNS (74/2), /* sdiv */
601 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67 602 128,
5f732aba
DE
603 32, /* l1 cache */
604 512, /* l2 cache */
605 6, /* streams */
d296e02e
AP
606};
607
8b897cfa
RS
608/* Instruction costs on PPC750 and PPC7400 processors. */
609static const
610struct processor_costs ppc750_cost = {
06a67bdd
RS
611 COSTS_N_INSNS (5), /* mulsi */
612 COSTS_N_INSNS (3), /* mulsi_const */
613 COSTS_N_INSNS (2), /* mulsi_const9 */
614 COSTS_N_INSNS (5), /* muldi */
615 COSTS_N_INSNS (17), /* divsi */
616 COSTS_N_INSNS (17), /* divdi */
617 COSTS_N_INSNS (3), /* fp */
618 COSTS_N_INSNS (3), /* dmul */
619 COSTS_N_INSNS (17), /* sdiv */
620 COSTS_N_INSNS (31), /* ddiv */
0b11da67 621 32,
5f732aba
DE
622 32, /* l1 cache */
623 512, /* l2 cache */
0b11da67 624 1, /* streams */
8b897cfa
RS
625};
626
627/* Instruction costs on PPC7450 processors. */
628static const
629struct processor_costs ppc7450_cost = {
06a67bdd
RS
630 COSTS_N_INSNS (4), /* mulsi */
631 COSTS_N_INSNS (3), /* mulsi_const */
632 COSTS_N_INSNS (3), /* mulsi_const9 */
633 COSTS_N_INSNS (4), /* muldi */
634 COSTS_N_INSNS (23), /* divsi */
635 COSTS_N_INSNS (23), /* divdi */
636 COSTS_N_INSNS (5), /* fp */
637 COSTS_N_INSNS (5), /* dmul */
638 COSTS_N_INSNS (21), /* sdiv */
639 COSTS_N_INSNS (35), /* ddiv */
0b11da67 640 32,
5f732aba
DE
641 32, /* l1 cache */
642 1024, /* l2 cache */
0b11da67 643 1, /* streams */
8b897cfa 644};
a3170dc6 645
8b897cfa
RS
646/* Instruction costs on PPC8540 processors. */
647static const
648struct processor_costs ppc8540_cost = {
06a67bdd
RS
649 COSTS_N_INSNS (4), /* mulsi */
650 COSTS_N_INSNS (4), /* mulsi_const */
651 COSTS_N_INSNS (4), /* mulsi_const9 */
652 COSTS_N_INSNS (4), /* muldi */
653 COSTS_N_INSNS (19), /* divsi */
654 COSTS_N_INSNS (19), /* divdi */
655 COSTS_N_INSNS (4), /* fp */
656 COSTS_N_INSNS (4), /* dmul */
657 COSTS_N_INSNS (29), /* sdiv */
658 COSTS_N_INSNS (29), /* ddiv */
0b11da67 659 32,
5f732aba
DE
660 32, /* l1 cache */
661 256, /* l2 cache */
0b11da67 662 1, /* prefetch streams /*/
8b897cfa
RS
663};
664
665/* Instruction costs on POWER4 and POWER5 processors. */
666static const
667struct processor_costs power4_cost = {
06a67bdd
RS
668 COSTS_N_INSNS (3), /* mulsi */
669 COSTS_N_INSNS (2), /* mulsi_const */
670 COSTS_N_INSNS (2), /* mulsi_const9 */
671 COSTS_N_INSNS (4), /* muldi */
672 COSTS_N_INSNS (18), /* divsi */
673 COSTS_N_INSNS (34), /* divdi */
674 COSTS_N_INSNS (3), /* fp */
675 COSTS_N_INSNS (3), /* dmul */
676 COSTS_N_INSNS (17), /* sdiv */
677 COSTS_N_INSNS (17), /* ddiv */
0b11da67 678 128,
5f732aba
DE
679 32, /* l1 cache */
680 1024, /* l2 cache */
0b11da67 681 8, /* prefetch streams /*/
8b897cfa
RS
682};
683
44cd321e
PS
684/* Instruction costs on POWER6 processors. */
685static const
686struct processor_costs power6_cost = {
687 COSTS_N_INSNS (8), /* mulsi */
688 COSTS_N_INSNS (8), /* mulsi_const */
689 COSTS_N_INSNS (8), /* mulsi_const9 */
690 COSTS_N_INSNS (8), /* muldi */
691 COSTS_N_INSNS (22), /* divsi */
692 COSTS_N_INSNS (28), /* divdi */
693 COSTS_N_INSNS (3), /* fp */
694 COSTS_N_INSNS (3), /* dmul */
695 COSTS_N_INSNS (13), /* sdiv */
696 COSTS_N_INSNS (16), /* ddiv */
0b11da67 697 128,
5f732aba
DE
698 64, /* l1 cache */
699 2048, /* l2 cache */
0b11da67 700 16, /* prefetch streams */
44cd321e
PS
701};
702
8b897cfa 703\f
a2369ed3 704static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 705static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 706static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
707static void rs6000_emit_stack_tie (void);
708static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
709static rtx spe_synthesize_frame_save (rtx);
710static bool spe_func_has_64bit_regs_p (void);
b20a9cca 711static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 712 int, HOST_WIDE_INT);
a2369ed3
DJ
713static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
714static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
715static unsigned rs6000_hash_constant (rtx);
716static unsigned toc_hash_function (const void *);
717static int toc_hash_eq (const void *, const void *);
718static int constant_pool_expr_1 (rtx, int *, int *);
719static bool constant_pool_expr_p (rtx);
d04b6e6e 720static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
721static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
722static struct machine_function * rs6000_init_machine_status (void);
723static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 724static bool no_global_regs_above (int);
5add3202 725#ifdef HAVE_GAS_HIDDEN
a2369ed3 726static void rs6000_assemble_visibility (tree, int);
5add3202 727#endif
a2369ed3
DJ
728static int rs6000_ra_ever_killed (void);
729static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 730static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 731static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 732static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 733static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 734static const char *rs6000_mangle_type (const_tree);
b86fe7b4 735extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 736static void rs6000_set_default_type_attributes (tree);
52ff33d0 737static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
738static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
739static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
740static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
741 tree);
a2369ed3 742static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 743static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 744static void rs6000_file_start (void);
7c262518 745#if TARGET_ELF
9b580a0b 746static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
747static void rs6000_elf_asm_out_constructor (rtx, int);
748static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 749static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 750static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
751static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
752 unsigned HOST_WIDE_INT);
a56d7372 753static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 754 ATTRIBUTE_UNUSED;
7c262518 755#endif
3101faab 756static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
cbaaba19 757#if TARGET_XCOFF
0d5817b2 758static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 759static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 760static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 761static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 762static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 763static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 764 unsigned HOST_WIDE_INT);
d6b5193b
RS
765static void rs6000_xcoff_unique_section (tree, int);
766static section *rs6000_xcoff_select_rtx_section
767 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
768static const char * rs6000_xcoff_strip_name_encoding (const char *);
769static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
770static void rs6000_xcoff_file_start (void);
771static void rs6000_xcoff_file_end (void);
f1384257 772#endif
a2369ed3
DJ
773static int rs6000_variable_issue (FILE *, int, rtx, int);
774static bool rs6000_rtx_costs (rtx, int, int, int *);
775static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 776static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 777static bool is_microcoded_insn (rtx);
d296e02e 778static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
779static bool is_cracked_insn (rtx);
780static bool is_branch_slot_insn (rtx);
44cd321e 781static bool is_load_insn (rtx);
e3a0e200 782static rtx get_store_dest (rtx pat);
44cd321e
PS
783static bool is_store_insn (rtx);
784static bool set_to_load_agen (rtx,rtx);
982afe02 785static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
786static int rs6000_adjust_priority (rtx, int);
787static int rs6000_issue_rate (void);
b198261f 788static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
789static rtx get_next_active_insn (rtx, rtx);
790static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
791static bool insn_must_be_first_in_group (rtx);
792static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
793static bool is_costly_group (rtx *, rtx);
794static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
795static int redefine_groups (FILE *, int, rtx, rtx);
796static int pad_groups (FILE *, int, rtx, rtx);
797static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
798static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
799static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 800static int rs6000_use_sched_lookahead (void);
d296e02e 801static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 802static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 803static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
804static tree rs6000_builtin_mul_widen_even (tree);
805static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 806static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 807
58646b77 808static void def_builtin (int, const char *, tree, int);
3101faab 809static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
810static void rs6000_init_builtins (void);
811static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
812static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
813static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
814static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
815static void altivec_init_builtins (void);
816static void rs6000_common_init_builtins (void);
c15c90bb 817static void rs6000_init_libfuncs (void);
a2369ed3 818
96038623
DE
819static void paired_init_builtins (void);
820static rtx paired_expand_builtin (tree, rtx, bool *);
821static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
822static rtx paired_expand_stv_builtin (enum insn_code, tree);
823static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
824
b20a9cca
AM
825static void enable_mask_for_builtins (struct builtin_description *, int,
826 enum rs6000_builtins,
827 enum rs6000_builtins);
7c62e993 828static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
829static void spe_init_builtins (void);
830static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 831static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
832static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
833static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
834static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
835static rs6000_stack_t *rs6000_stack_info (void);
836static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
837
838static rtx altivec_expand_builtin (tree, rtx, bool *);
839static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
840static rtx altivec_expand_st_builtin (tree, rtx, bool *);
841static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
842static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 843static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 844 const char *, tree, rtx);
b4a62fa0 845static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 846static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
847static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
848static rtx altivec_expand_vec_set_builtin (tree);
849static rtx altivec_expand_vec_ext_builtin (tree, rtx);
850static int get_element_number (tree, tree);
78f5898b 851static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 852static void rs6000_parse_tls_size_option (void);
5da702b1 853static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
854static int first_altivec_reg_to_save (void);
855static unsigned int compute_vrsave_mask (void);
9390387d 856static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
857static void is_altivec_return_reg (rtx, void *);
858static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
859int easy_vector_constant (rtx, enum machine_mode);
3101faab 860static bool rs6000_is_opaque_type (const_tree);
a2369ed3 861static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 862static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 863static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 864static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
865static rtx rs6000_tls_get_addr (void);
866static rtx rs6000_got_sym (void);
9390387d 867static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
868static const char *rs6000_get_some_local_dynamic_name (void);
869static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 870static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 871static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 872 enum machine_mode, tree);
0b5383eb
DJ
873static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
874 HOST_WIDE_INT);
875static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
876 tree, HOST_WIDE_INT);
877static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
878 HOST_WIDE_INT,
879 rtx[], int *);
880static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
881 const_tree, HOST_WIDE_INT,
882 rtx[], int *);
883static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 884static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 885static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
886static void setup_incoming_varargs (CUMULATIVE_ARGS *,
887 enum machine_mode, tree,
888 int *, int);
8cd5a4e0 889static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 890 const_tree, bool);
78a52f11
RH
891static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
892 tree, bool);
3101faab 893static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
894#if TARGET_MACHO
895static void macho_branch_islands (void);
efdba735
SH
896static int no_previous_def (tree function_name);
897static tree get_prev_label (tree function_name);
c4e18b1c 898static void rs6000_darwin_file_start (void);
efdba735
SH
899#endif
900
c35d187f 901static tree rs6000_build_builtin_va_list (void);
23a60a04 902static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 903static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 904static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 905static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 906static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 907 enum machine_mode);
94ff898d 908static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
909 enum machine_mode);
910static int get_vsel_insn (enum machine_mode);
911static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 912static tree rs6000_stack_protect_fail (void);
21213b4c
DP
913
914const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
915static enum machine_mode rs6000_eh_return_filter_mode (void);
916
17211ab5
GK
917/* Hash table stuff for keeping track of TOC entries. */
918
919struct toc_hash_struct GTY(())
920{
921 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
922 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
923 rtx key;
924 enum machine_mode key_mode;
925 int labelno;
926};
927
928static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
929\f
930/* Default register names. */
931char rs6000_reg_names[][8] =
932{
802a0058
MM
933 "0", "1", "2", "3", "4", "5", "6", "7",
934 "8", "9", "10", "11", "12", "13", "14", "15",
935 "16", "17", "18", "19", "20", "21", "22", "23",
936 "24", "25", "26", "27", "28", "29", "30", "31",
937 "0", "1", "2", "3", "4", "5", "6", "7",
938 "8", "9", "10", "11", "12", "13", "14", "15",
939 "16", "17", "18", "19", "20", "21", "22", "23",
940 "24", "25", "26", "27", "28", "29", "30", "31",
941 "mq", "lr", "ctr","ap",
942 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
943 "xer",
944 /* AltiVec registers. */
0cd5e3a1
AH
945 "0", "1", "2", "3", "4", "5", "6", "7",
946 "8", "9", "10", "11", "12", "13", "14", "15",
947 "16", "17", "18", "19", "20", "21", "22", "23",
948 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
949 "vrsave", "vscr",
950 /* SPE registers. */
7d5175e1
JJ
951 "spe_acc", "spefscr",
952 /* Soft frame pointer. */
953 "sfp"
c81bebd7
MM
954};
955
956#ifdef TARGET_REGNAMES
8b60264b 957static const char alt_reg_names[][8] =
c81bebd7 958{
802a0058
MM
959 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
960 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
961 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
962 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
963 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
964 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
965 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
966 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
967 "mq", "lr", "ctr", "ap",
968 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 969 "xer",
59a4c851 970 /* AltiVec registers. */
0ac081f6 971 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
972 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
973 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
974 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
975 "vrsave", "vscr",
976 /* SPE registers. */
7d5175e1
JJ
977 "spe_acc", "spefscr",
978 /* Soft frame pointer. */
979 "sfp"
c81bebd7
MM
980};
981#endif
9878760c 982\f
daf11973
MM
983#ifndef MASK_STRICT_ALIGN
984#define MASK_STRICT_ALIGN 0
985#endif
ffcfcb5f
AM
986#ifndef TARGET_PROFILE_KERNEL
987#define TARGET_PROFILE_KERNEL 0
988#endif
3961e8fe
RH
989
990/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
991#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
992\f
993/* Initialize the GCC target structure. */
91d231cb
JM
994#undef TARGET_ATTRIBUTE_TABLE
995#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
996#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
997#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 998
301d03af
RS
999#undef TARGET_ASM_ALIGNED_DI_OP
1000#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1001
1002/* Default unaligned ops are only provided for ELF. Find the ops needed
1003 for non-ELF systems. */
1004#ifndef OBJECT_FORMAT_ELF
cbaaba19 1005#if TARGET_XCOFF
ae6c1efd 1006/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1007 64-bit targets. */
1008#undef TARGET_ASM_UNALIGNED_HI_OP
1009#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1010#undef TARGET_ASM_UNALIGNED_SI_OP
1011#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1012#undef TARGET_ASM_UNALIGNED_DI_OP
1013#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1014#else
1015/* For Darwin. */
1016#undef TARGET_ASM_UNALIGNED_HI_OP
1017#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1018#undef TARGET_ASM_UNALIGNED_SI_OP
1019#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1020#undef TARGET_ASM_UNALIGNED_DI_OP
1021#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1022#undef TARGET_ASM_ALIGNED_DI_OP
1023#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1024#endif
1025#endif
1026
1027/* This hook deals with fixups for relocatable code and DI-mode objects
1028 in 64-bit code. */
1029#undef TARGET_ASM_INTEGER
1030#define TARGET_ASM_INTEGER rs6000_assemble_integer
1031
93638d7a
AM
1032#ifdef HAVE_GAS_HIDDEN
1033#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1034#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1035#endif
1036
c4501e62
JJ
1037#undef TARGET_HAVE_TLS
1038#define TARGET_HAVE_TLS HAVE_AS_TLS
1039
1040#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1041#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1042
08c148a8
NB
1043#undef TARGET_ASM_FUNCTION_PROLOGUE
1044#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1045#undef TARGET_ASM_FUNCTION_EPILOGUE
1046#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1047
b54cf83a
DE
1048#undef TARGET_SCHED_VARIABLE_ISSUE
1049#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1050
c237e94a
ZW
1051#undef TARGET_SCHED_ISSUE_RATE
1052#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1053#undef TARGET_SCHED_ADJUST_COST
1054#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1055#undef TARGET_SCHED_ADJUST_PRIORITY
1056#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1057#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1058#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1059#undef TARGET_SCHED_INIT
1060#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1061#undef TARGET_SCHED_FINISH
1062#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1063#undef TARGET_SCHED_REORDER
1064#define TARGET_SCHED_REORDER rs6000_sched_reorder
1065#undef TARGET_SCHED_REORDER2
1066#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1067
be12c2b0
VM
1068#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1069#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1070
d296e02e
AP
1071#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1072#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1073
7ccf35ed
DN
1074#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1075#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1076#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1077#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1078#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1079#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1080#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1081#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1082
5b900a4c
DN
1083#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1084#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1085
0ac081f6
AH
1086#undef TARGET_INIT_BUILTINS
1087#define TARGET_INIT_BUILTINS rs6000_init_builtins
1088
1089#undef TARGET_EXPAND_BUILTIN
1090#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1091
608063c3
JB
1092#undef TARGET_MANGLE_TYPE
1093#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1094
c15c90bb
ZW
1095#undef TARGET_INIT_LIBFUNCS
1096#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1097
f1384257 1098#if TARGET_MACHO
0e5dbd9b 1099#undef TARGET_BINDS_LOCAL_P
31920d83 1100#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1101#endif
0e5dbd9b 1102
77ccdfed
EC
1103#undef TARGET_MS_BITFIELD_LAYOUT_P
1104#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1105
3961e8fe
RH
1106#undef TARGET_ASM_OUTPUT_MI_THUNK
1107#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1108
3961e8fe 1109#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1110#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1111
4977bab6
ZW
1112#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1113#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1114
2e3f0db6
DJ
1115#undef TARGET_INVALID_WITHIN_DOLOOP
1116#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1117
3c50106f
RH
1118#undef TARGET_RTX_COSTS
1119#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1120#undef TARGET_ADDRESS_COST
1121#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1122
c8e4f0e9 1123#undef TARGET_VECTOR_OPAQUE_P
58646b77 1124#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1125
96714395
AH
1126#undef TARGET_DWARF_REGISTER_SPAN
1127#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1128
37ea0b7e
JM
1129#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1130#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1131
c6e8c921
GK
1132/* On rs6000, function arguments are promoted, as are function return
1133 values. */
1134#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1135#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1136#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1137#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1138
c6e8c921
GK
1139#undef TARGET_RETURN_IN_MEMORY
1140#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1141
1142#undef TARGET_SETUP_INCOMING_VARARGS
1143#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1144
1145/* Always strict argument naming on rs6000. */
1146#undef TARGET_STRICT_ARGUMENT_NAMING
1147#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1148#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1149#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1150#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1151#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1152#undef TARGET_MUST_PASS_IN_STACK
1153#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1154#undef TARGET_PASS_BY_REFERENCE
1155#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1156#undef TARGET_ARG_PARTIAL_BYTES
1157#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1158
c35d187f
RH
1159#undef TARGET_BUILD_BUILTIN_VA_LIST
1160#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1161
cd3ce9b4
JM
1162#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1163#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1164
93f90be6
FJ
1165#undef TARGET_EH_RETURN_FILTER_MODE
1166#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1167
00b79d54
BE
1168#undef TARGET_SCALAR_MODE_SUPPORTED_P
1169#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1170
f676971a
EC
1171#undef TARGET_VECTOR_MODE_SUPPORTED_P
1172#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1173
4d3e6fae
FJ
1174#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1175#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1176
78f5898b
AH
1177#undef TARGET_HANDLE_OPTION
1178#define TARGET_HANDLE_OPTION rs6000_handle_option
1179
1180#undef TARGET_DEFAULT_TARGET_FLAGS
1181#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1182 (TARGET_DEFAULT)
78f5898b 1183
3aebbe5f
JJ
1184#undef TARGET_STACK_PROTECT_FAIL
1185#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1186
445cf5eb
JM
1187/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1188 The PowerPC architecture requires only weak consistency among
1189 processors--that is, memory accesses between processors need not be
1190 sequentially consistent and memory accesses among processors can occur
1191 in any order. The ability to order memory accesses weakly provides
1192 opportunities for more efficient use of the system bus. Unless a
1193 dependency exists, the 604e allows read operations to precede store
1194 operations. */
1195#undef TARGET_RELAXED_ORDERING
1196#define TARGET_RELAXED_ORDERING true
1197
fdbe66f2
EB
1198#ifdef HAVE_AS_TLS
1199#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1200#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1201#endif
1202
aacd3885
RS
1203/* Use a 32-bit anchor range. This leads to sequences like:
1204
1205 addis tmp,anchor,high
1206 add dest,tmp,low
1207
1208 where tmp itself acts as an anchor, and can be shared between
1209 accesses to the same 64k page. */
1210#undef TARGET_MIN_ANCHOR_OFFSET
1211#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1212#undef TARGET_MAX_ANCHOR_OFFSET
1213#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1214#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1215#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1216
9c78b944
DE
1217#undef TARGET_BUILTIN_RECIPROCAL
1218#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1219
f6897b10 1220struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1221\f
0d1fbc8c
AH
1222
1223/* Value is 1 if hard register REGNO can hold a value of machine-mode
1224 MODE. */
1225static int
1226rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1227{
1228 /* The GPRs can hold any mode, but values bigger than one register
1229 cannot go past R31. */
1230 if (INT_REGNO_P (regno))
1231 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1232
a5a97921 1233 /* The float registers can only hold floating modes and DImode.
7393f7f8 1234 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1235 if (FP_REGNO_P (regno))
1236 return
96038623 1237 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1238 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1239 && mode != SDmode
0d1fbc8c
AH
1240 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1241 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1242 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1243 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1244 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1245
1246 /* The CR register can only hold CC modes. */
1247 if (CR_REGNO_P (regno))
1248 return GET_MODE_CLASS (mode) == MODE_CC;
1249
1250 if (XER_REGNO_P (regno))
1251 return mode == PSImode;
1252
1253 /* AltiVec only in AldyVec registers. */
1254 if (ALTIVEC_REGNO_P (regno))
1255 return ALTIVEC_VECTOR_MODE (mode);
1256
1257 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1258 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1259 return 1;
1260
1261 /* We cannot put TImode anywhere except general register and it must be
1262 able to fit within the register set. */
1263
1264 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1265}
1266
1267/* Initialize rs6000_hard_regno_mode_ok_p table. */
1268static void
1269rs6000_init_hard_regno_mode_ok (void)
1270{
1271 int r, m;
1272
1273 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1274 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1275 if (rs6000_hard_regno_mode_ok (r, m))
1276 rs6000_hard_regno_mode_ok_p[m][r] = true;
1277}
1278
e4cad568
GK
1279#if TARGET_MACHO
1280/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1281
1282static void
1283darwin_rs6000_override_options (void)
1284{
1285 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1286 off. */
1287 rs6000_altivec_abi = 1;
1288 TARGET_ALTIVEC_VRSAVE = 1;
1289 if (DEFAULT_ABI == ABI_DARWIN)
1290 {
1291 if (MACHO_DYNAMIC_NO_PIC_P)
1292 {
1293 if (flag_pic)
1294 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1295 flag_pic = 0;
1296 }
1297 else if (flag_pic == 1)
1298 {
1299 flag_pic = 2;
1300 }
1301 }
1302 if (TARGET_64BIT && ! TARGET_POWERPC64)
1303 {
1304 target_flags |= MASK_POWERPC64;
1305 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1306 }
1307 if (flag_mkernel)
1308 {
1309 rs6000_default_long_calls = 1;
1310 target_flags |= MASK_SOFT_FLOAT;
1311 }
1312
1313 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1314 Altivec. */
1315 if (!flag_mkernel && !flag_apple_kext
1316 && TARGET_64BIT
1317 && ! (target_flags_explicit & MASK_ALTIVEC))
1318 target_flags |= MASK_ALTIVEC;
1319
1320 /* Unless the user (not the configurer) has explicitly overridden
1321 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1322 G4 unless targetting the kernel. */
1323 if (!flag_mkernel
1324 && !flag_apple_kext
1325 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1326 && ! (target_flags_explicit & MASK_ALTIVEC)
1327 && ! rs6000_select[1].string)
1328 {
1329 target_flags |= MASK_ALTIVEC;
1330 }
1331}
1332#endif
1333
c1e55850
GK
1334/* If not otherwise specified by a target, make 'long double' equivalent to
1335 'double'. */
1336
1337#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1338#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1339#endif
1340
5248c961
RK
1341/* Override command line options. Mostly we process the processor
1342 type and sometimes adjust other TARGET_ options. */
1343
1344void
d779d0dc 1345rs6000_override_options (const char *default_cpu)
5248c961 1346{
c4d38ccb 1347 size_t i, j;
8e3f41e7 1348 struct rs6000_cpu_select *ptr;
66188a7e 1349 int set_masks;
5248c961 1350
66188a7e 1351 /* Simplifications for entries below. */
85638c0d 1352
66188a7e
GK
1353 enum {
1354 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1355 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1356 };
85638c0d 1357
66188a7e
GK
1358 /* This table occasionally claims that a processor does not support
1359 a particular feature even though it does, but the feature is slower
1360 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1361 complete description of the processor's support.
66188a7e
GK
1362
1363 Please keep this list in order, and don't forget to update the
1364 documentation in invoke.texi when adding a new processor or
1365 flag. */
5248c961
RK
1366 static struct ptt
1367 {
8b60264b
KG
1368 const char *const name; /* Canonical processor name. */
1369 const enum processor_type processor; /* Processor type enum value. */
1370 const int target_enable; /* Target flags to enable. */
8b60264b 1371 } const processor_target_table[]
66188a7e 1372 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1373 {"403", PROCESSOR_PPC403,
66188a7e 1374 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1375 {"405", PROCESSOR_PPC405,
716019c0
JM
1376 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1377 {"405fp", PROCESSOR_PPC405,
1378 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1379 {"440", PROCESSOR_PPC440,
716019c0
JM
1380 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1381 {"440fp", PROCESSOR_PPC440,
1382 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1383 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1384 {"601", PROCESSOR_PPC601,
66188a7e
GK
1385 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1386 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1387 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1388 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1389 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1390 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1391 {"620", PROCESSOR_PPC620,
1392 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1393 {"630", PROCESSOR_PPC630,
1394 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1395 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1396 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1397 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1398 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1399 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1400 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1401 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1402 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1403 /* 8548 has a dummy entry for now. */
a45bce6e 1404 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1405 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1406 {"970", PROCESSOR_POWER4,
66188a7e 1407 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1408 {"cell", PROCESSOR_CELL,
1409 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1410 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1411 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1412 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1413 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1414 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1415 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1416 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1417 {"power2", PROCESSOR_POWER,
1418 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1419 {"power3", PROCESSOR_PPC630,
1420 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1421 {"power4", PROCESSOR_POWER4,
fc091c8e 1422 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1423 {"power5", PROCESSOR_POWER5,
432218ba
DE
1424 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1425 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1426 {"power5+", PROCESSOR_POWER5,
1427 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1428 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1429 {"power6", PROCESSOR_POWER6,
e118597e 1430 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1431 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1432 {"power6x", PROCESSOR_POWER6,
1433 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1434 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1435 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1436 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1437 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1438 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1439 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1440 {"rios2", PROCESSOR_RIOS2,
1441 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1442 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1443 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1444 {"rs64", PROCESSOR_RS64A,
1445 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1446 };
5248c961 1447
ca7558fc 1448 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1449
66188a7e
GK
1450 /* Some OSs don't support saving the high part of 64-bit registers on
1451 context switch. Other OSs don't support saving Altivec registers.
1452 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1453 settings; if the user wants either, the user must explicitly specify
1454 them and we won't interfere with the user's specification. */
1455
1456 enum {
1457 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1458 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1459 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1460 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1461 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1462 };
0d1fbc8c
AH
1463
1464 rs6000_init_hard_regno_mode_ok ();
1465
c4ad648e 1466 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1467#ifdef OS_MISSING_POWERPC64
1468 if (OS_MISSING_POWERPC64)
1469 set_masks &= ~MASK_POWERPC64;
1470#endif
1471#ifdef OS_MISSING_ALTIVEC
1472 if (OS_MISSING_ALTIVEC)
1473 set_masks &= ~MASK_ALTIVEC;
1474#endif
1475
768875a8
AM
1476 /* Don't override by the processor default if given explicitly. */
1477 set_masks &= ~target_flags_explicit;
957211c3 1478
a4f6c312 1479 /* Identify the processor type. */
8e3f41e7 1480 rs6000_select[0].string = default_cpu;
3cb999d8 1481 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1482
b6a1cbae 1483 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1484 {
8e3f41e7
MM
1485 ptr = &rs6000_select[i];
1486 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1487 {
8e3f41e7
MM
1488 for (j = 0; j < ptt_size; j++)
1489 if (! strcmp (ptr->string, processor_target_table[j].name))
1490 {
1491 if (ptr->set_tune_p)
1492 rs6000_cpu = processor_target_table[j].processor;
1493
1494 if (ptr->set_arch_p)
1495 {
66188a7e
GK
1496 target_flags &= ~set_masks;
1497 target_flags |= (processor_target_table[j].target_enable
1498 & set_masks);
8e3f41e7
MM
1499 }
1500 break;
1501 }
1502
4406229e 1503 if (j == ptt_size)
8e3f41e7 1504 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1505 }
1506 }
8a61d227 1507
993f19a8 1508 if (TARGET_E500)
a3170dc6
AH
1509 rs6000_isel = 1;
1510
dff9f1b6
DE
1511 /* If we are optimizing big endian systems for space, use the load/store
1512 multiple and string instructions. */
ef792183 1513 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1514 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1515
a4f6c312
SS
1516 /* Don't allow -mmultiple or -mstring on little endian systems
1517 unless the cpu is a 750, because the hardware doesn't support the
1518 instructions used in little endian mode, and causes an alignment
1519 trap. The 750 does not cause an alignment trap (except when the
1520 target is unaligned). */
bef84347 1521
b21fb038 1522 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1523 {
1524 if (TARGET_MULTIPLE)
1525 {
1526 target_flags &= ~MASK_MULTIPLE;
b21fb038 1527 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1528 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1529 }
1530
1531 if (TARGET_STRING)
1532 {
1533 target_flags &= ~MASK_STRING;
b21fb038 1534 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1535 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1536 }
1537 }
3933e0e1 1538
38c1f2d7
MM
1539 /* Set debug flags */
1540 if (rs6000_debug_name)
1541 {
bfc79d3b 1542 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1543 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1544 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1545 rs6000_debug_stack = 1;
bfc79d3b 1546 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1547 rs6000_debug_arg = 1;
1548 else
c725bd79 1549 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1550 }
1551
57ac7be9
AM
1552 if (rs6000_traceback_name)
1553 {
1554 if (! strncmp (rs6000_traceback_name, "full", 4))
1555 rs6000_traceback = traceback_full;
1556 else if (! strncmp (rs6000_traceback_name, "part", 4))
1557 rs6000_traceback = traceback_part;
1558 else if (! strncmp (rs6000_traceback_name, "no", 2))
1559 rs6000_traceback = traceback_none;
1560 else
9e637a26 1561 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1562 rs6000_traceback_name);
1563 }
1564
78f5898b
AH
1565 if (!rs6000_explicit_options.long_double)
1566 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1567
602ea4d3 1568#ifndef POWERPC_LINUX
d3603e8c 1569 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1570 rs6000_ieeequad = 1;
1571#endif
1572
6d0ef01e
HP
1573 /* Set Altivec ABI as default for powerpc64 linux. */
1574 if (TARGET_ELF && TARGET_64BIT)
1575 {
1576 rs6000_altivec_abi = 1;
78f5898b 1577 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1578 }
1579
594a51fe
SS
1580 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1581 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1582 {
1583 rs6000_darwin64_abi = 1;
9c7956fd 1584#if TARGET_MACHO
6ac49599 1585 darwin_one_byte_bool = 1;
9c7956fd 1586#endif
d9168963
SS
1587 /* Default to natural alignment, for better performance. */
1588 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1589 }
1590
194c524a
DE
1591 /* Place FP constants in the constant pool instead of TOC
1592 if section anchors enabled. */
1593 if (flag_section_anchors)
1594 TARGET_NO_FP_IN_TOC = 1;
1595
c4501e62
JJ
1596 /* Handle -mtls-size option. */
1597 rs6000_parse_tls_size_option ();
1598
a7ae18e2
AH
1599#ifdef SUBTARGET_OVERRIDE_OPTIONS
1600 SUBTARGET_OVERRIDE_OPTIONS;
1601#endif
1602#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1603 SUBSUBTARGET_OVERRIDE_OPTIONS;
1604#endif
4d4cbc0e
AH
1605#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1606 SUB3TARGET_OVERRIDE_OPTIONS;
1607#endif
a7ae18e2 1608
5da702b1
AH
1609 if (TARGET_E500)
1610 {
1611 /* The e500 does not have string instructions, and we set
1612 MASK_STRING above when optimizing for size. */
1613 if ((target_flags & MASK_STRING) != 0)
1614 target_flags = target_flags & ~MASK_STRING;
1615 }
1616 else if (rs6000_select[1].string != NULL)
1617 {
1618 /* For the powerpc-eabispe configuration, we set all these by
1619 default, so let's unset them if we manually set another
1620 CPU that is not the E500. */
78f5898b 1621 if (!rs6000_explicit_options.abi)
5da702b1 1622 rs6000_spe_abi = 0;
78f5898b 1623 if (!rs6000_explicit_options.spe)
5da702b1 1624 rs6000_spe = 0;
78f5898b 1625 if (!rs6000_explicit_options.float_gprs)
5da702b1 1626 rs6000_float_gprs = 0;
78f5898b 1627 if (!rs6000_explicit_options.isel)
5da702b1
AH
1628 rs6000_isel = 0;
1629 }
b5044283 1630
eca0d5e8
JM
1631 /* Detect invalid option combinations with E500. */
1632 CHECK_E500_OPTIONS;
1633
ec507f2d 1634 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1635 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1636 && rs6000_cpu != PROCESSOR_POWER6
1637 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1638 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1639 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1640 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1641 || rs6000_cpu == PROCESSOR_POWER5
1642 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1643
ec507f2d
DE
1644 rs6000_sched_restricted_insns_priority
1645 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1646
569fa502 1647 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1648 rs6000_sched_costly_dep
1649 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1650
569fa502
DN
1651 if (rs6000_sched_costly_dep_str)
1652 {
f676971a 1653 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1654 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1655 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1656 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1657 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1658 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1659 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1660 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1661 else
c4ad648e 1662 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1663 }
1664
1665 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1666 rs6000_sched_insert_nops
1667 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1668
cbe26ab8
DN
1669 if (rs6000_sched_insert_nops_str)
1670 {
1671 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1672 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1673 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1674 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1675 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1676 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1677 else
c4ad648e 1678 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1679 }
1680
c81bebd7 1681#ifdef TARGET_REGNAMES
a4f6c312
SS
1682 /* If the user desires alternate register names, copy in the
1683 alternate names now. */
c81bebd7 1684 if (TARGET_REGNAMES)
4e135bdd 1685 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1686#endif
1687
df01da37 1688 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1689 If -maix-struct-return or -msvr4-struct-return was explicitly
1690 used, don't override with the ABI default. */
df01da37
DE
1691 if (!rs6000_explicit_options.aix_struct_ret)
1692 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1693
602ea4d3 1694 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1695 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1696
f676971a 1697 if (TARGET_TOC)
9ebbca7d 1698 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1699
301d03af
RS
1700 /* We can only guarantee the availability of DI pseudo-ops when
1701 assembling for 64-bit targets. */
ae6c1efd 1702 if (!TARGET_64BIT)
301d03af
RS
1703 {
1704 targetm.asm_out.aligned_op.di = NULL;
1705 targetm.asm_out.unaligned_op.di = NULL;
1706 }
1707
1494c534
DE
1708 /* Set branch target alignment, if not optimizing for size. */
1709 if (!optimize_size)
1710 {
d296e02e
AP
1711 /* Cell wants to be aligned 8byte for dual issue. */
1712 if (rs6000_cpu == PROCESSOR_CELL)
1713 {
1714 if (align_functions <= 0)
1715 align_functions = 8;
1716 if (align_jumps <= 0)
1717 align_jumps = 8;
1718 if (align_loops <= 0)
1719 align_loops = 8;
1720 }
44cd321e 1721 if (rs6000_align_branch_targets)
1494c534
DE
1722 {
1723 if (align_functions <= 0)
1724 align_functions = 16;
1725 if (align_jumps <= 0)
1726 align_jumps = 16;
1727 if (align_loops <= 0)
1728 align_loops = 16;
1729 }
1730 if (align_jumps_max_skip <= 0)
1731 align_jumps_max_skip = 15;
1732 if (align_loops_max_skip <= 0)
1733 align_loops_max_skip = 15;
1734 }
2792d578 1735
71f123ca
FS
1736 /* Arrange to save and restore machine status around nested functions. */
1737 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1738
1739 /* We should always be splitting complex arguments, but we can't break
1740 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1741 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1742 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1743
1744 /* Initialize rs6000_cost with the appropriate target costs. */
1745 if (optimize_size)
1746 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1747 else
1748 switch (rs6000_cpu)
1749 {
1750 case PROCESSOR_RIOS1:
1751 rs6000_cost = &rios1_cost;
1752 break;
1753
1754 case PROCESSOR_RIOS2:
1755 rs6000_cost = &rios2_cost;
1756 break;
1757
1758 case PROCESSOR_RS64A:
1759 rs6000_cost = &rs64a_cost;
1760 break;
1761
1762 case PROCESSOR_MPCCORE:
1763 rs6000_cost = &mpccore_cost;
1764 break;
1765
1766 case PROCESSOR_PPC403:
1767 rs6000_cost = &ppc403_cost;
1768 break;
1769
1770 case PROCESSOR_PPC405:
1771 rs6000_cost = &ppc405_cost;
1772 break;
1773
1774 case PROCESSOR_PPC440:
1775 rs6000_cost = &ppc440_cost;
1776 break;
1777
1778 case PROCESSOR_PPC601:
1779 rs6000_cost = &ppc601_cost;
1780 break;
1781
1782 case PROCESSOR_PPC603:
1783 rs6000_cost = &ppc603_cost;
1784 break;
1785
1786 case PROCESSOR_PPC604:
1787 rs6000_cost = &ppc604_cost;
1788 break;
1789
1790 case PROCESSOR_PPC604e:
1791 rs6000_cost = &ppc604e_cost;
1792 break;
1793
1794 case PROCESSOR_PPC620:
8b897cfa
RS
1795 rs6000_cost = &ppc620_cost;
1796 break;
1797
f0517163
RS
1798 case PROCESSOR_PPC630:
1799 rs6000_cost = &ppc630_cost;
1800 break;
1801
982afe02 1802 case PROCESSOR_CELL:
d296e02e
AP
1803 rs6000_cost = &ppccell_cost;
1804 break;
1805
8b897cfa
RS
1806 case PROCESSOR_PPC750:
1807 case PROCESSOR_PPC7400:
1808 rs6000_cost = &ppc750_cost;
1809 break;
1810
1811 case PROCESSOR_PPC7450:
1812 rs6000_cost = &ppc7450_cost;
1813 break;
1814
1815 case PROCESSOR_PPC8540:
1816 rs6000_cost = &ppc8540_cost;
1817 break;
1818
1819 case PROCESSOR_POWER4:
1820 case PROCESSOR_POWER5:
1821 rs6000_cost = &power4_cost;
1822 break;
1823
44cd321e
PS
1824 case PROCESSOR_POWER6:
1825 rs6000_cost = &power6_cost;
1826 break;
1827
8b897cfa 1828 default:
37409796 1829 gcc_unreachable ();
8b897cfa 1830 }
0b11da67
DE
1831
1832 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1833 set_param_value ("simultaneous-prefetches",
1834 rs6000_cost->simultaneous_prefetches);
1835 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1836 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1837 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1838 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1839 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1840 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
5248c961 1841}
5accd822 1842
7ccf35ed
DN
1843/* Implement targetm.vectorize.builtin_mask_for_load. */
1844static tree
1845rs6000_builtin_mask_for_load (void)
1846{
1847 if (TARGET_ALTIVEC)
1848 return altivec_builtin_mask_for_load;
1849 else
1850 return 0;
1851}
1852
f57d17f1
TM
1853/* Implement targetm.vectorize.builtin_conversion. */
1854static tree
1855rs6000_builtin_conversion (enum tree_code code, tree type)
1856{
1857 if (!TARGET_ALTIVEC)
1858 return NULL_TREE;
982afe02 1859
f57d17f1
TM
1860 switch (code)
1861 {
1862 case FLOAT_EXPR:
1863 switch (TYPE_MODE (type))
1864 {
1865 case V4SImode:
982afe02 1866 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1867 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1868 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1869 default:
1870 return NULL_TREE;
1871 }
1872 default:
1873 return NULL_TREE;
1874 }
1875}
1876
89d67cca
DN
1877/* Implement targetm.vectorize.builtin_mul_widen_even. */
1878static tree
1879rs6000_builtin_mul_widen_even (tree type)
1880{
1881 if (!TARGET_ALTIVEC)
1882 return NULL_TREE;
1883
1884 switch (TYPE_MODE (type))
1885 {
1886 case V8HImode:
982afe02 1887 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1888 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1889 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1890
1891 case V16QImode:
1892 return TYPE_UNSIGNED (type) ?
1893 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1894 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1895 default:
1896 return NULL_TREE;
1897 }
1898}
1899
1900/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1901static tree
1902rs6000_builtin_mul_widen_odd (tree type)
1903{
1904 if (!TARGET_ALTIVEC)
1905 return NULL_TREE;
1906
1907 switch (TYPE_MODE (type))
1908 {
1909 case V8HImode:
1910 return TYPE_UNSIGNED (type) ?
1911 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1912 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1913
1914 case V16QImode:
1915 return TYPE_UNSIGNED (type) ?
1916 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1917 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1918 default:
1919 return NULL_TREE;
1920 }
1921}
1922
5b900a4c
DN
1923
1924/* Return true iff, data reference of TYPE can reach vector alignment (16)
1925 after applying N number of iterations. This routine does not determine
1926 how may iterations are required to reach desired alignment. */
1927
1928static bool
3101faab 1929rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
1930{
1931 if (is_packed)
1932 return false;
1933
1934 if (TARGET_32BIT)
1935 {
1936 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1937 return true;
1938
1939 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1940 return true;
1941
1942 return false;
1943 }
1944 else
1945 {
1946 if (TARGET_MACHO)
1947 return false;
1948
1949 /* Assuming that all other types are naturally aligned. CHECKME! */
1950 return true;
1951 }
1952}
1953
5da702b1
AH
1954/* Handle generic options of the form -mfoo=yes/no.
1955 NAME is the option name.
1956 VALUE is the option value.
1957 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1958 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1959static void
5da702b1 1960rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1961{
5da702b1 1962 if (value == 0)
993f19a8 1963 return;
5da702b1
AH
1964 else if (!strcmp (value, "yes"))
1965 *flag = 1;
1966 else if (!strcmp (value, "no"))
1967 *flag = 0;
08b57fb3 1968 else
5da702b1 1969 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1970}
1971
c4501e62
JJ
1972/* Validate and record the size specified with the -mtls-size option. */
1973
1974static void
863d938c 1975rs6000_parse_tls_size_option (void)
c4501e62
JJ
1976{
1977 if (rs6000_tls_size_string == 0)
1978 return;
1979 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1980 rs6000_tls_size = 16;
1981 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1982 rs6000_tls_size = 32;
1983 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1984 rs6000_tls_size = 64;
1985 else
9e637a26 1986 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1987}
1988
5accd822 1989void
a2369ed3 1990optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1991{
2e3f0db6
DJ
1992 if (DEFAULT_ABI == ABI_DARWIN)
1993 /* The Darwin libraries never set errno, so we might as well
1994 avoid calling them when that's the only reason we would. */
1995 flag_errno_math = 0;
59d6560b
DE
1996
1997 /* Double growth factor to counter reduced min jump length. */
1998 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
1999
2000 /* Enable section anchors by default.
2001 Skip section anchors for Objective C and Objective C++
2002 until front-ends fixed. */
23f99493 2003 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2004 flag_section_anchors = 1;
5accd822 2005}
78f5898b
AH
2006
2007/* Implement TARGET_HANDLE_OPTION. */
2008
2009static bool
2010rs6000_handle_option (size_t code, const char *arg, int value)
2011{
2012 switch (code)
2013 {
2014 case OPT_mno_power:
2015 target_flags &= ~(MASK_POWER | MASK_POWER2
2016 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2017 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2018 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2019 break;
2020 case OPT_mno_powerpc:
2021 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2022 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2023 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2024 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2025 break;
2026 case OPT_mfull_toc:
d2894ab5
DE
2027 target_flags &= ~MASK_MINIMAL_TOC;
2028 TARGET_NO_FP_IN_TOC = 0;
2029 TARGET_NO_SUM_IN_TOC = 0;
2030 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2031#ifdef TARGET_USES_SYSV4_OPT
2032 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2033 just the same as -mminimal-toc. */
2034 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2035 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2036#endif
2037 break;
2038
2039#ifdef TARGET_USES_SYSV4_OPT
2040 case OPT_mtoc:
2041 /* Make -mtoc behave like -mminimal-toc. */
2042 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2043 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2044 break;
2045#endif
2046
2047#ifdef TARGET_USES_AIX64_OPT
2048 case OPT_maix64:
2049#else
2050 case OPT_m64:
2051#endif
2c9c9afd
AM
2052 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2053 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2054 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2055 break;
2056
2057#ifdef TARGET_USES_AIX64_OPT
2058 case OPT_maix32:
2059#else
2060 case OPT_m32:
2061#endif
2062 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2063 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2064 break;
2065
2066 case OPT_minsert_sched_nops_:
2067 rs6000_sched_insert_nops_str = arg;
2068 break;
2069
2070 case OPT_mminimal_toc:
2071 if (value == 1)
2072 {
d2894ab5
DE
2073 TARGET_NO_FP_IN_TOC = 0;
2074 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2075 }
2076 break;
2077
2078 case OPT_mpower:
2079 if (value == 1)
c2dba4ab
AH
2080 {
2081 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2082 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2083 }
78f5898b
AH
2084 break;
2085
2086 case OPT_mpower2:
2087 if (value == 1)
c2dba4ab
AH
2088 {
2089 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2090 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2091 }
78f5898b
AH
2092 break;
2093
2094 case OPT_mpowerpc_gpopt:
2095 case OPT_mpowerpc_gfxopt:
2096 if (value == 1)
c2dba4ab
AH
2097 {
2098 target_flags |= MASK_POWERPC;
2099 target_flags_explicit |= MASK_POWERPC;
2100 }
78f5898b
AH
2101 break;
2102
df01da37
DE
2103 case OPT_maix_struct_return:
2104 case OPT_msvr4_struct_return:
2105 rs6000_explicit_options.aix_struct_ret = true;
2106 break;
2107
78f5898b
AH
2108 case OPT_mvrsave_:
2109 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2110 break;
78f5898b
AH
2111
2112 case OPT_misel_:
2113 rs6000_explicit_options.isel = true;
2114 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2115 break;
2116
2117 case OPT_mspe_:
2118 rs6000_explicit_options.spe = true;
2119 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2120 break;
2121
2122 case OPT_mdebug_:
2123 rs6000_debug_name = arg;
2124 break;
2125
2126#ifdef TARGET_USES_SYSV4_OPT
2127 case OPT_mcall_:
2128 rs6000_abi_name = arg;
2129 break;
2130
2131 case OPT_msdata_:
2132 rs6000_sdata_name = arg;
2133 break;
2134
2135 case OPT_mtls_size_:
2136 rs6000_tls_size_string = arg;
2137 break;
2138
2139 case OPT_mrelocatable:
2140 if (value == 1)
c2dba4ab 2141 {
e0bf274f
AM
2142 target_flags |= MASK_MINIMAL_TOC;
2143 target_flags_explicit |= MASK_MINIMAL_TOC;
2144 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2145 }
78f5898b
AH
2146 break;
2147
2148 case OPT_mrelocatable_lib:
2149 if (value == 1)
c2dba4ab 2150 {
e0bf274f
AM
2151 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2152 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2153 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2154 }
78f5898b 2155 else
c2dba4ab
AH
2156 {
2157 target_flags &= ~MASK_RELOCATABLE;
2158 target_flags_explicit |= MASK_RELOCATABLE;
2159 }
78f5898b
AH
2160 break;
2161#endif
2162
2163 case OPT_mabi_:
78f5898b
AH
2164 if (!strcmp (arg, "altivec"))
2165 {
d3603e8c 2166 rs6000_explicit_options.abi = true;
78f5898b
AH
2167 rs6000_altivec_abi = 1;
2168 rs6000_spe_abi = 0;
2169 }
2170 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2171 {
2172 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2173 the default for rs6000_spe_abi to be chosen later. */
2174 rs6000_altivec_abi = 0;
2175 }
78f5898b
AH
2176 else if (! strcmp (arg, "spe"))
2177 {
d3603e8c 2178 rs6000_explicit_options.abi = true;
78f5898b
AH
2179 rs6000_spe_abi = 1;
2180 rs6000_altivec_abi = 0;
2181 if (!TARGET_SPE_ABI)
2182 error ("not configured for ABI: '%s'", arg);
2183 }
2184 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2185 {
2186 rs6000_explicit_options.abi = true;
2187 rs6000_spe_abi = 0;
2188 }
78f5898b
AH
2189
2190 /* These are here for testing during development only, do not
2191 document in the manual please. */
2192 else if (! strcmp (arg, "d64"))
2193 {
2194 rs6000_darwin64_abi = 1;
2195 warning (0, "Using darwin64 ABI");
2196 }
2197 else if (! strcmp (arg, "d32"))
2198 {
2199 rs6000_darwin64_abi = 0;
2200 warning (0, "Using old darwin ABI");
2201 }
2202
602ea4d3
JJ
2203 else if (! strcmp (arg, "ibmlongdouble"))
2204 {
d3603e8c 2205 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2206 rs6000_ieeequad = 0;
2207 warning (0, "Using IBM extended precision long double");
2208 }
2209 else if (! strcmp (arg, "ieeelongdouble"))
2210 {
d3603e8c 2211 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2212 rs6000_ieeequad = 1;
2213 warning (0, "Using IEEE extended precision long double");
2214 }
2215
78f5898b
AH
2216 else
2217 {
2218 error ("unknown ABI specified: '%s'", arg);
2219 return false;
2220 }
2221 break;
2222
2223 case OPT_mcpu_:
2224 rs6000_select[1].string = arg;
2225 break;
2226
2227 case OPT_mtune_:
2228 rs6000_select[2].string = arg;
2229 break;
2230
2231 case OPT_mtraceback_:
2232 rs6000_traceback_name = arg;
2233 break;
2234
2235 case OPT_mfloat_gprs_:
2236 rs6000_explicit_options.float_gprs = true;
2237 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2238 rs6000_float_gprs = 1;
2239 else if (! strcmp (arg, "double"))
2240 rs6000_float_gprs = 2;
2241 else if (! strcmp (arg, "no"))
2242 rs6000_float_gprs = 0;
2243 else
2244 {
2245 error ("invalid option for -mfloat-gprs: '%s'", arg);
2246 return false;
2247 }
2248 break;
2249
2250 case OPT_mlong_double_:
2251 rs6000_explicit_options.long_double = true;
2252 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2253 if (value != 64 && value != 128)
2254 {
2255 error ("Unknown switch -mlong-double-%s", arg);
2256 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2257 return false;
2258 }
2259 else
2260 rs6000_long_double_type_size = value;
2261 break;
2262
2263 case OPT_msched_costly_dep_:
2264 rs6000_sched_costly_dep_str = arg;
2265 break;
2266
2267 case OPT_malign_:
2268 rs6000_explicit_options.alignment = true;
2269 if (! strcmp (arg, "power"))
2270 {
2271 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2272 some C library functions, so warn about it. The flag may be
2273 useful for performance studies from time to time though, so
2274 don't disable it entirely. */
2275 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2276 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2277 " it is incompatible with the installed C and C++ libraries");
2278 rs6000_alignment_flags = MASK_ALIGN_POWER;
2279 }
2280 else if (! strcmp (arg, "natural"))
2281 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2282 else
2283 {
2284 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2285 return false;
2286 }
2287 break;
2288 }
2289 return true;
2290}
3cfa4909
MM
2291\f
2292/* Do anything needed at the start of the asm file. */
2293
1bc7c5b6 2294static void
863d938c 2295rs6000_file_start (void)
3cfa4909 2296{
c4d38ccb 2297 size_t i;
3cfa4909 2298 char buffer[80];
d330fd93 2299 const char *start = buffer;
3cfa4909 2300 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2301 const char *default_cpu = TARGET_CPU_DEFAULT;
2302 FILE *file = asm_out_file;
2303
2304 default_file_start ();
2305
2306#ifdef TARGET_BI_ARCH
2307 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2308 default_cpu = 0;
2309#endif
3cfa4909
MM
2310
2311 if (flag_verbose_asm)
2312 {
2313 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2314 rs6000_select[0].string = default_cpu;
2315
b6a1cbae 2316 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2317 {
2318 ptr = &rs6000_select[i];
2319 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2320 {
2321 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2322 start = "";
2323 }
2324 }
2325
9c6b4ed9 2326 if (PPC405_ERRATUM77)
b0bfee6e 2327 {
9c6b4ed9 2328 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2329 start = "";
2330 }
b0bfee6e 2331
b91da81f 2332#ifdef USING_ELFOS_H
3cfa4909
MM
2333 switch (rs6000_sdata)
2334 {
2335 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2336 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2337 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2338 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2339 }
2340
2341 if (rs6000_sdata && g_switch_value)
2342 {
307b599c
MK
2343 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2344 g_switch_value);
3cfa4909
MM
2345 start = "";
2346 }
2347#endif
2348
2349 if (*start == '\0')
949ea356 2350 putc ('\n', file);
3cfa4909 2351 }
b723e82f 2352
e51917ae
JM
2353#ifdef HAVE_AS_GNU_ATTRIBUTE
2354 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2355 {
2356 fprintf (file, "\t.gnu_attribute 4, %d\n",
2357 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2358 fprintf (file, "\t.gnu_attribute 8, %d\n",
2359 (TARGET_ALTIVEC_ABI ? 2
2360 : TARGET_SPE_ABI ? 3
2361 : 1));
2362 }
e51917ae
JM
2363#endif
2364
b723e82f
JJ
2365 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2366 {
d6b5193b
RS
2367 switch_to_section (toc_section);
2368 switch_to_section (text_section);
b723e82f 2369 }
3cfa4909 2370}
c4e18b1c 2371
5248c961 2372\f
a0ab749a 2373/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2374
2375int
863d938c 2376direct_return (void)
9878760c 2377{
4697a36c
MM
2378 if (reload_completed)
2379 {
2380 rs6000_stack_t *info = rs6000_stack_info ();
2381
2382 if (info->first_gp_reg_save == 32
2383 && info->first_fp_reg_save == 64
00b960c7 2384 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2385 && ! info->lr_save_p
2386 && ! info->cr_save_p
00b960c7 2387 && info->vrsave_mask == 0
c81fc13e 2388 && ! info->push_p)
4697a36c
MM
2389 return 1;
2390 }
2391
2392 return 0;
9878760c
RK
2393}
2394
4e74d8ec
MM
2395/* Return the number of instructions it takes to form a constant in an
2396 integer register. */
2397
48d72335 2398int
a2369ed3 2399num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2400{
2401 /* signed constant loadable with {cal|addi} */
547b216d 2402 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2403 return 1;
2404
4e74d8ec 2405 /* constant loadable with {cau|addis} */
547b216d
DE
2406 else if ((value & 0xffff) == 0
2407 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2408 return 1;
2409
5f59ecb7 2410#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2411 else if (TARGET_POWERPC64)
4e74d8ec 2412 {
a65c591c
DE
2413 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2414 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2415
a65c591c 2416 if (high == 0 || high == -1)
4e74d8ec
MM
2417 return 2;
2418
a65c591c 2419 high >>= 1;
4e74d8ec 2420
a65c591c 2421 if (low == 0)
4e74d8ec 2422 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2423 else
2424 return (num_insns_constant_wide (high)
e396202a 2425 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2426 }
2427#endif
2428
2429 else
2430 return 2;
2431}
2432
2433int
a2369ed3 2434num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2435{
37409796 2436 HOST_WIDE_INT low, high;
bb8df8a6 2437
37409796 2438 switch (GET_CODE (op))
0d30d435 2439 {
37409796 2440 case CONST_INT:
0d30d435 2441#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2442 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2443 && mask64_operand (op, mode))
c4ad648e 2444 return 2;
0d30d435
DE
2445 else
2446#endif
2447 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2448
37409796
NS
2449 case CONST_DOUBLE:
2450 if (mode == SFmode)
2451 {
2452 long l;
2453 REAL_VALUE_TYPE rv;
bb8df8a6 2454
37409796
NS
2455 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2456 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2457 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2458 }
a260abc9 2459
37409796
NS
2460 if (mode == VOIDmode || mode == DImode)
2461 {
2462 high = CONST_DOUBLE_HIGH (op);
2463 low = CONST_DOUBLE_LOW (op);
2464 }
2465 else
2466 {
2467 long l[2];
2468 REAL_VALUE_TYPE rv;
bb8df8a6 2469
37409796 2470 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2471 if (DECIMAL_FLOAT_MODE_P (mode))
2472 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2473 else
2474 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2475 high = l[WORDS_BIG_ENDIAN == 0];
2476 low = l[WORDS_BIG_ENDIAN != 0];
2477 }
47ad8c61 2478
37409796
NS
2479 if (TARGET_32BIT)
2480 return (num_insns_constant_wide (low)
2481 + num_insns_constant_wide (high));
2482 else
2483 {
2484 if ((high == 0 && low >= 0)
2485 || (high == -1 && low < 0))
2486 return num_insns_constant_wide (low);
bb8df8a6 2487
1990cd79 2488 else if (mask64_operand (op, mode))
37409796 2489 return 2;
bb8df8a6 2490
37409796
NS
2491 else if (low == 0)
2492 return num_insns_constant_wide (high) + 1;
bb8df8a6 2493
37409796
NS
2494 else
2495 return (num_insns_constant_wide (high)
2496 + num_insns_constant_wide (low) + 1);
2497 }
bb8df8a6 2498
37409796
NS
2499 default:
2500 gcc_unreachable ();
4e74d8ec 2501 }
4e74d8ec
MM
2502}
2503
0972012c
RS
2504/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2505 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2506 corresponding element of the vector, but for V4SFmode and V2SFmode,
2507 the corresponding "float" is interpreted as an SImode integer. */
2508
2509static HOST_WIDE_INT
2510const_vector_elt_as_int (rtx op, unsigned int elt)
2511{
2512 rtx tmp = CONST_VECTOR_ELT (op, elt);
2513 if (GET_MODE (op) == V4SFmode
2514 || GET_MODE (op) == V2SFmode)
2515 tmp = gen_lowpart (SImode, tmp);
2516 return INTVAL (tmp);
2517}
452a7d36 2518
77ccdfed 2519/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2520 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2521 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2522 all items are set to the same value and contain COPIES replicas of the
2523 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2524 operand and the others are set to the value of the operand's msb. */
2525
2526static bool
2527vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2528{
66180ff3
PB
2529 enum machine_mode mode = GET_MODE (op);
2530 enum machine_mode inner = GET_MODE_INNER (mode);
2531
2532 unsigned i;
2533 unsigned nunits = GET_MODE_NUNITS (mode);
2534 unsigned bitsize = GET_MODE_BITSIZE (inner);
2535 unsigned mask = GET_MODE_MASK (inner);
2536
0972012c 2537 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2538 HOST_WIDE_INT splat_val = val;
2539 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2540
2541 /* Construct the value to be splatted, if possible. If not, return 0. */
2542 for (i = 2; i <= copies; i *= 2)
452a7d36 2543 {
66180ff3
PB
2544 HOST_WIDE_INT small_val;
2545 bitsize /= 2;
2546 small_val = splat_val >> bitsize;
2547 mask >>= bitsize;
2548 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2549 return false;
2550 splat_val = small_val;
2551 }
c4ad648e 2552
66180ff3
PB
2553 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2554 if (EASY_VECTOR_15 (splat_val))
2555 ;
2556
2557 /* Also check if we can splat, and then add the result to itself. Do so if
2558 the value is positive, of if the splat instruction is using OP's mode;
2559 for splat_val < 0, the splat and the add should use the same mode. */
2560 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2561 && (splat_val >= 0 || (step == 1 && copies == 1)))
2562 ;
2563
2564 else
2565 return false;
2566
2567 /* Check if VAL is present in every STEP-th element, and the
2568 other elements are filled with its most significant bit. */
2569 for (i = 0; i < nunits - 1; ++i)
2570 {
2571 HOST_WIDE_INT desired_val;
2572 if (((i + 1) & (step - 1)) == 0)
2573 desired_val = val;
2574 else
2575 desired_val = msb_val;
2576
0972012c 2577 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2578 return false;
452a7d36 2579 }
66180ff3
PB
2580
2581 return true;
452a7d36
HP
2582}
2583
69ef87e2 2584
77ccdfed 2585/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2586 with a vspltisb, vspltish or vspltisw. */
2587
2588bool
2589easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2590{
66180ff3 2591 unsigned step, copies;
d744e06e 2592
66180ff3
PB
2593 if (mode == VOIDmode)
2594 mode = GET_MODE (op);
2595 else if (mode != GET_MODE (op))
2596 return false;
d744e06e 2597
66180ff3
PB
2598 /* Start with a vspltisw. */
2599 step = GET_MODE_NUNITS (mode) / 4;
2600 copies = 1;
2601
2602 if (vspltis_constant (op, step, copies))
2603 return true;
2604
2605 /* Then try with a vspltish. */
2606 if (step == 1)
2607 copies <<= 1;
2608 else
2609 step >>= 1;
2610
2611 if (vspltis_constant (op, step, copies))
2612 return true;
2613
2614 /* And finally a vspltisb. */
2615 if (step == 1)
2616 copies <<= 1;
2617 else
2618 step >>= 1;
2619
2620 if (vspltis_constant (op, step, copies))
2621 return true;
2622
2623 return false;
d744e06e
AH
2624}
2625
66180ff3
PB
2626/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2627 result is OP. Abort if it is not possible. */
d744e06e 2628
f676971a 2629rtx
66180ff3 2630gen_easy_altivec_constant (rtx op)
452a7d36 2631{
66180ff3
PB
2632 enum machine_mode mode = GET_MODE (op);
2633 int nunits = GET_MODE_NUNITS (mode);
2634 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2635 unsigned step = nunits / 4;
2636 unsigned copies = 1;
2637
2638 /* Start with a vspltisw. */
2639 if (vspltis_constant (op, step, copies))
2640 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2641
2642 /* Then try with a vspltish. */
2643 if (step == 1)
2644 copies <<= 1;
2645 else
2646 step >>= 1;
2647
2648 if (vspltis_constant (op, step, copies))
2649 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2650
2651 /* And finally a vspltisb. */
2652 if (step == 1)
2653 copies <<= 1;
2654 else
2655 step >>= 1;
2656
2657 if (vspltis_constant (op, step, copies))
2658 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2659
2660 gcc_unreachable ();
d744e06e
AH
2661}
2662
2663const char *
a2369ed3 2664output_vec_const_move (rtx *operands)
d744e06e
AH
2665{
2666 int cst, cst2;
2667 enum machine_mode mode;
2668 rtx dest, vec;
2669
2670 dest = operands[0];
2671 vec = operands[1];
d744e06e 2672 mode = GET_MODE (dest);
69ef87e2 2673
d744e06e
AH
2674 if (TARGET_ALTIVEC)
2675 {
66180ff3 2676 rtx splat_vec;
d744e06e
AH
2677 if (zero_constant (vec, mode))
2678 return "vxor %0,%0,%0";
37409796 2679
66180ff3
PB
2680 splat_vec = gen_easy_altivec_constant (vec);
2681 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2682 operands[1] = XEXP (splat_vec, 0);
2683 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2684 return "#";
bb8df8a6 2685
66180ff3 2686 switch (GET_MODE (splat_vec))
98ef3137 2687 {
37409796 2688 case V4SImode:
66180ff3 2689 return "vspltisw %0,%1";
c4ad648e 2690
37409796 2691 case V8HImode:
66180ff3 2692 return "vspltish %0,%1";
c4ad648e 2693
37409796 2694 case V16QImode:
66180ff3 2695 return "vspltisb %0,%1";
bb8df8a6 2696
37409796
NS
2697 default:
2698 gcc_unreachable ();
98ef3137 2699 }
69ef87e2
AH
2700 }
2701
37409796 2702 gcc_assert (TARGET_SPE);
bb8df8a6 2703
37409796
NS
2704 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2705 pattern of V1DI, V4HI, and V2SF.
2706
2707 FIXME: We should probably return # and add post reload
2708 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2709 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2710 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2711 operands[1] = CONST_VECTOR_ELT (vec, 0);
2712 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2713 if (cst == cst2)
2714 return "li %0,%1\n\tevmergelo %0,%0,%0";
2715 else
2716 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2717}
2718
f5027409
RE
2719/* Initialize TARGET of vector PAIRED to VALS. */
2720
2721void
2722paired_expand_vector_init (rtx target, rtx vals)
2723{
2724 enum machine_mode mode = GET_MODE (target);
2725 int n_elts = GET_MODE_NUNITS (mode);
2726 int n_var = 0;
2727 rtx x, new, tmp, constant_op, op1, op2;
2728 int i;
2729
2730 for (i = 0; i < n_elts; ++i)
2731 {
2732 x = XVECEXP (vals, 0, i);
2733 if (!CONSTANT_P (x))
2734 ++n_var;
2735 }
2736 if (n_var == 0)
2737 {
2738 /* Load from constant pool. */
2739 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2740 return;
2741 }
2742
2743 if (n_var == 2)
2744 {
2745 /* The vector is initialized only with non-constants. */
2746 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2747 XVECEXP (vals, 0, 1));
2748
2749 emit_move_insn (target, new);
2750 return;
2751 }
2752
2753 /* One field is non-constant and the other one is a constant. Load the
2754 constant from the constant pool and use ps_merge instruction to
2755 construct the whole vector. */
2756 op1 = XVECEXP (vals, 0, 0);
2757 op2 = XVECEXP (vals, 0, 1);
2758
2759 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2760
2761 tmp = gen_reg_rtx (GET_MODE (constant_op));
2762 emit_move_insn (tmp, constant_op);
2763
2764 if (CONSTANT_P (op1))
2765 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2766 else
2767 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2768
2769 emit_move_insn (target, new);
2770}
2771
7a4eca66
DE
2772/* Initialize vector TARGET to VALS. */
2773
2774void
2775rs6000_expand_vector_init (rtx target, rtx vals)
2776{
2777 enum machine_mode mode = GET_MODE (target);
2778 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2779 int n_elts = GET_MODE_NUNITS (mode);
2780 int n_var = 0, one_var = -1;
2781 bool all_same = true, all_const_zero = true;
2782 rtx x, mem;
2783 int i;
2784
2785 for (i = 0; i < n_elts; ++i)
2786 {
2787 x = XVECEXP (vals, 0, i);
2788 if (!CONSTANT_P (x))
2789 ++n_var, one_var = i;
2790 else if (x != CONST0_RTX (inner_mode))
2791 all_const_zero = false;
2792
2793 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2794 all_same = false;
2795 }
2796
2797 if (n_var == 0)
2798 {
2799 if (mode != V4SFmode && all_const_zero)
2800 {
2801 /* Zero register. */
2802 emit_insn (gen_rtx_SET (VOIDmode, target,
2803 gen_rtx_XOR (mode, target, target)));
2804 return;
2805 }
66180ff3 2806 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2807 {
2808 /* Splat immediate. */
66180ff3 2809 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2810 return;
2811 }
2812 else if (all_same)
2813 ; /* Splat vector element. */
2814 else
2815 {
2816 /* Load from constant pool. */
2817 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2818 return;
2819 }
2820 }
2821
2822 /* Store value to stack temp. Load vector element. Splat. */
2823 if (all_same)
2824 {
2825 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2826 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2827 XVECEXP (vals, 0, 0));
2828 x = gen_rtx_UNSPEC (VOIDmode,
2829 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2830 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2831 gen_rtvec (2,
2832 gen_rtx_SET (VOIDmode,
2833 target, mem),
2834 x)));
2835 x = gen_rtx_VEC_SELECT (inner_mode, target,
2836 gen_rtx_PARALLEL (VOIDmode,
2837 gen_rtvec (1, const0_rtx)));
2838 emit_insn (gen_rtx_SET (VOIDmode, target,
2839 gen_rtx_VEC_DUPLICATE (mode, x)));
2840 return;
2841 }
2842
2843 /* One field is non-constant. Load constant then overwrite
2844 varying field. */
2845 if (n_var == 1)
2846 {
2847 rtx copy = copy_rtx (vals);
2848
57b51d4d 2849 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2850 varying element. */
2851 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2852 rs6000_expand_vector_init (target, copy);
2853
2854 /* Insert variable. */
2855 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2856 return;
2857 }
2858
2859 /* Construct the vector in memory one field at a time
2860 and load the whole vector. */
2861 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2862 for (i = 0; i < n_elts; i++)
2863 emit_move_insn (adjust_address_nv (mem, inner_mode,
2864 i * GET_MODE_SIZE (inner_mode)),
2865 XVECEXP (vals, 0, i));
2866 emit_move_insn (target, mem);
2867}
2868
2869/* Set field ELT of TARGET to VAL. */
2870
2871void
2872rs6000_expand_vector_set (rtx target, rtx val, int elt)
2873{
2874 enum machine_mode mode = GET_MODE (target);
2875 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2876 rtx reg = gen_reg_rtx (mode);
2877 rtx mask, mem, x;
2878 int width = GET_MODE_SIZE (inner_mode);
2879 int i;
2880
2881 /* Load single variable value. */
2882 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2883 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2884 x = gen_rtx_UNSPEC (VOIDmode,
2885 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2886 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2887 gen_rtvec (2,
2888 gen_rtx_SET (VOIDmode,
2889 reg, mem),
2890 x)));
2891
2892 /* Linear sequence. */
2893 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2894 for (i = 0; i < 16; ++i)
2895 XVECEXP (mask, 0, i) = GEN_INT (i);
2896
2897 /* Set permute mask to insert element into target. */
2898 for (i = 0; i < width; ++i)
2899 XVECEXP (mask, 0, elt*width + i)
2900 = GEN_INT (i + 0x10);
2901 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2902 x = gen_rtx_UNSPEC (mode,
2903 gen_rtvec (3, target, reg,
2904 force_reg (V16QImode, x)),
2905 UNSPEC_VPERM);
2906 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2907}
2908
2909/* Extract field ELT from VEC into TARGET. */
2910
2911void
2912rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2913{
2914 enum machine_mode mode = GET_MODE (vec);
2915 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2916 rtx mem, x;
2917
2918 /* Allocate mode-sized buffer. */
2919 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2920
2921 /* Add offset to field within buffer matching vector element. */
2922 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2923
2924 /* Store single field into mode-sized buffer. */
2925 x = gen_rtx_UNSPEC (VOIDmode,
2926 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2927 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2928 gen_rtvec (2,
2929 gen_rtx_SET (VOIDmode,
2930 mem, vec),
2931 x)));
2932 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2933}
2934
0ba1b2ff
AM
2935/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2936 implement ANDing by the mask IN. */
2937void
a2369ed3 2938build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2939{
2940#if HOST_BITS_PER_WIDE_INT >= 64
2941 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2942 int shift;
2943
37409796 2944 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2945
2946 c = INTVAL (in);
2947 if (c & 1)
2948 {
2949 /* Assume c initially something like 0x00fff000000fffff. The idea
2950 is to rotate the word so that the middle ^^^^^^ group of zeros
2951 is at the MS end and can be cleared with an rldicl mask. We then
2952 rotate back and clear off the MS ^^ group of zeros with a
2953 second rldicl. */
2954 c = ~c; /* c == 0xff000ffffff00000 */
2955 lsb = c & -c; /* lsb == 0x0000000000100000 */
2956 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2957 c = ~c; /* c == 0x00fff000000fffff */
2958 c &= -lsb; /* c == 0x00fff00000000000 */
2959 lsb = c & -c; /* lsb == 0x0000100000000000 */
2960 c = ~c; /* c == 0xff000fffffffffff */
2961 c &= -lsb; /* c == 0xff00000000000000 */
2962 shift = 0;
2963 while ((lsb >>= 1) != 0)
2964 shift++; /* shift == 44 on exit from loop */
2965 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2966 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2967 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2968 }
2969 else
0ba1b2ff
AM
2970 {
2971 /* Assume c initially something like 0xff000f0000000000. The idea
2972 is to rotate the word so that the ^^^ middle group of zeros
2973 is at the LS end and can be cleared with an rldicr mask. We then
2974 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2975 a second rldicr. */
2976 lsb = c & -c; /* lsb == 0x0000010000000000 */
2977 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2978 c = ~c; /* c == 0x00fff0ffffffffff */
2979 c &= -lsb; /* c == 0x00fff00000000000 */
2980 lsb = c & -c; /* lsb == 0x0000100000000000 */
2981 c = ~c; /* c == 0xff000fffffffffff */
2982 c &= -lsb; /* c == 0xff00000000000000 */
2983 shift = 0;
2984 while ((lsb >>= 1) != 0)
2985 shift++; /* shift == 44 on exit from loop */
2986 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2987 m1 >>= shift; /* m1 == 0x0000000000000fff */
2988 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2989 }
2990
2991 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2992 masks will be all 1's. We are guaranteed more than one transition. */
2993 out[0] = GEN_INT (64 - shift);
2994 out[1] = GEN_INT (m1);
2995 out[2] = GEN_INT (shift);
2996 out[3] = GEN_INT (m2);
2997#else
045572c7
GK
2998 (void)in;
2999 (void)out;
37409796 3000 gcc_unreachable ();
0ba1b2ff 3001#endif
a260abc9
DE
3002}
3003
54b695e7 3004/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3005
3006bool
54b695e7
AH
3007invalid_e500_subreg (rtx op, enum machine_mode mode)
3008{
61c76239
JM
3009 if (TARGET_E500_DOUBLE)
3010 {
17caeff2
JM
3011 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3012 subreg:TI and reg:TF. */
61c76239 3013 if (GET_CODE (op) == SUBREG
17caeff2 3014 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 3015 && REG_P (SUBREG_REG (op))
17caeff2
JM
3016 && (GET_MODE (SUBREG_REG (op)) == DFmode
3017 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3018 return true;
3019
17caeff2
JM
3020 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3021 reg:TI. */
61c76239 3022 if (GET_CODE (op) == SUBREG
17caeff2 3023 && (mode == DFmode || mode == TFmode)
61c76239 3024 && REG_P (SUBREG_REG (op))
17caeff2
JM
3025 && (GET_MODE (SUBREG_REG (op)) == DImode
3026 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
3027 return true;
3028 }
54b695e7 3029
61c76239
JM
3030 if (TARGET_SPE
3031 && GET_CODE (op) == SUBREG
3032 && mode == SImode
54b695e7 3033 && REG_P (SUBREG_REG (op))
14502dad 3034 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3035 return true;
3036
3037 return false;
3038}
3039
58182de3 3040/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3041 field is an FP double while the FP fields remain word aligned. */
3042
19d66194 3043unsigned int
fa5b0972
AM
3044rs6000_special_round_type_align (tree type, unsigned int computed,
3045 unsigned int specified)
95727fb8 3046{
fa5b0972 3047 unsigned int align = MAX (computed, specified);
95727fb8 3048 tree field = TYPE_FIELDS (type);
95727fb8 3049
bb8df8a6 3050 /* Skip all non field decls */
85962ac8 3051 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3052 field = TREE_CHAIN (field);
3053
fa5b0972
AM
3054 if (field != NULL && field != type)
3055 {
3056 type = TREE_TYPE (field);
3057 while (TREE_CODE (type) == ARRAY_TYPE)
3058 type = TREE_TYPE (type);
3059
3060 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3061 align = MAX (align, 64);
3062 }
95727fb8 3063
fa5b0972 3064 return align;
95727fb8
AP
3065}
3066
58182de3
GK
3067/* Darwin increases record alignment to the natural alignment of
3068 the first field. */
3069
3070unsigned int
3071darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3072 unsigned int specified)
3073{
3074 unsigned int align = MAX (computed, specified);
3075
3076 if (TYPE_PACKED (type))
3077 return align;
3078
3079 /* Find the first field, looking down into aggregates. */
3080 do {
3081 tree field = TYPE_FIELDS (type);
3082 /* Skip all non field decls */
3083 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3084 field = TREE_CHAIN (field);
3085 if (! field)
3086 break;
3087 type = TREE_TYPE (field);
3088 while (TREE_CODE (type) == ARRAY_TYPE)
3089 type = TREE_TYPE (type);
3090 } while (AGGREGATE_TYPE_P (type));
3091
3092 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3093 align = MAX (align, TYPE_ALIGN (type));
3094
3095 return align;
3096}
3097
a4f6c312 3098/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3099
3100int
f676971a 3101small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3102 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3103{
38c1f2d7 3104#if TARGET_ELF
5f59ecb7 3105 rtx sym_ref;
7509c759 3106
d9407988 3107 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3108 return 0;
a54d04b7 3109
f607bc57 3110 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3111 return 0;
3112
88228c4b
MM
3113 if (GET_CODE (op) == SYMBOL_REF)
3114 sym_ref = op;
3115
3116 else if (GET_CODE (op) != CONST
3117 || GET_CODE (XEXP (op, 0)) != PLUS
3118 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3119 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3120 return 0;
3121
88228c4b 3122 else
dbf55e53
MM
3123 {
3124 rtx sum = XEXP (op, 0);
3125 HOST_WIDE_INT summand;
3126
3127 /* We have to be careful here, because it is the referenced address
c4ad648e 3128 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3129 summand = INTVAL (XEXP (sum, 1));
307b599c 3130 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3131 return 0;
dbf55e53
MM
3132
3133 sym_ref = XEXP (sum, 0);
3134 }
88228c4b 3135
20bfcd69 3136 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3137#else
3138 return 0;
3139#endif
7509c759 3140}
46c07df8 3141
3a1f863f 3142/* Return true if either operand is a general purpose register. */
46c07df8 3143
3a1f863f
DE
3144bool
3145gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3146{
3a1f863f
DE
3147 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3148 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3149}
3150
9ebbca7d 3151\f
4d588c14
RH
3152/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3153
f676971a
EC
3154static int
3155constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3156{
9390387d 3157 switch (GET_CODE (op))
9ebbca7d
GK
3158 {
3159 case SYMBOL_REF:
c4501e62
JJ
3160 if (RS6000_SYMBOL_REF_TLS_P (op))
3161 return 0;
3162 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3163 {
3164 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3165 {
3166 *have_sym = 1;
3167 return 1;
3168 }
3169 else
3170 return 0;
3171 }
3172 else if (! strcmp (XSTR (op, 0), toc_label_name))
3173 {
3174 *have_toc = 1;
3175 return 1;
3176 }
3177 else
3178 return 0;
9ebbca7d
GK
3179 case PLUS:
3180 case MINUS:
c1f11548
DE
3181 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3182 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3183 case CONST:
a4f6c312 3184 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3185 case CONST_INT:
a4f6c312 3186 return 1;
9ebbca7d 3187 default:
a4f6c312 3188 return 0;
9ebbca7d
GK
3189 }
3190}
3191
4d588c14 3192static bool
a2369ed3 3193constant_pool_expr_p (rtx op)
9ebbca7d
GK
3194{
3195 int have_sym = 0;
3196 int have_toc = 0;
3197 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3198}
3199
48d72335 3200bool
a2369ed3 3201toc_relative_expr_p (rtx op)
9ebbca7d 3202{
4d588c14
RH
3203 int have_sym = 0;
3204 int have_toc = 0;
3205 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3206}
3207
4d588c14 3208bool
a2369ed3 3209legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3210{
3211 return (TARGET_TOC
3212 && GET_CODE (x) == PLUS
3213 && GET_CODE (XEXP (x, 0)) == REG
3214 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3215 && constant_pool_expr_p (XEXP (x, 1)));
3216}
3217
d04b6e6e
EB
3218static bool
3219legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3220{
3221 return (DEFAULT_ABI == ABI_V4
3222 && !flag_pic && !TARGET_TOC
3223 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3224 && small_data_operand (x, mode));
3225}
3226
60cdabab
DE
3227/* SPE offset addressing is limited to 5-bits worth of double words. */
3228#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3229
76d2b81d
DJ
3230bool
3231rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3232{
3233 unsigned HOST_WIDE_INT offset, extra;
3234
3235 if (GET_CODE (x) != PLUS)
3236 return false;
3237 if (GET_CODE (XEXP (x, 0)) != REG)
3238 return false;
3239 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3240 return false;
60cdabab
DE
3241 if (legitimate_constant_pool_address_p (x))
3242 return true;
4d588c14
RH
3243 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3244 return false;
3245
3246 offset = INTVAL (XEXP (x, 1));
3247 extra = 0;
3248 switch (mode)
3249 {
3250 case V16QImode:
3251 case V8HImode:
3252 case V4SFmode:
3253 case V4SImode:
7a4eca66
DE
3254 /* AltiVec vector modes. Only reg+reg addressing is valid and
3255 constant offset zero should not occur due to canonicalization.
3256 Allow any offset when not strict before reload. */
3257 return !strict;
4d588c14
RH
3258
3259 case V4HImode:
3260 case V2SImode:
3261 case V1DImode:
3262 case V2SFmode:
d42a3bae
RE
3263 /* Paired vector modes. Only reg+reg addressing is valid and
3264 constant offset zero should not occur due to canonicalization.
3265 Allow any offset when not strict before reload. */
3266 if (TARGET_PAIRED_FLOAT)
3267 return !strict;
4d588c14
RH
3268 /* SPE vector modes. */
3269 return SPE_CONST_OFFSET_OK (offset);
3270
3271 case DFmode:
7393f7f8 3272 case DDmode:
4d4cbc0e
AH
3273 if (TARGET_E500_DOUBLE)
3274 return SPE_CONST_OFFSET_OK (offset);
3275
4d588c14 3276 case DImode:
54b695e7
AH
3277 /* On e500v2, we may have:
3278
3279 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3280
3281 Which gets addressed with evldd instructions. */
3282 if (TARGET_E500_DOUBLE)
3283 return SPE_CONST_OFFSET_OK (offset);
3284
7393f7f8 3285 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3286 extra = 4;
3287 else if (offset & 3)
3288 return false;
3289 break;
3290
3291 case TFmode:
17caeff2
JM
3292 if (TARGET_E500_DOUBLE)
3293 return (SPE_CONST_OFFSET_OK (offset)
3294 && SPE_CONST_OFFSET_OK (offset + 8));
3295
4d588c14 3296 case TImode:
7393f7f8
BE
3297 case TDmode:
3298 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3299 extra = 12;
3300 else if (offset & 3)
3301 return false;
3302 else
3303 extra = 8;
3304 break;
3305
3306 default:
3307 break;
3308 }
3309
b1917422
AM
3310 offset += 0x8000;
3311 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3312}
3313
6fb5fa3c 3314bool
a2369ed3 3315legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3316{
3317 rtx op0, op1;
3318
3319 if (GET_CODE (x) != PLUS)
3320 return false;
850e8d3d 3321
4d588c14
RH
3322 op0 = XEXP (x, 0);
3323 op1 = XEXP (x, 1);
3324
bf00cc0f 3325 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3326 replaced with proper base and index regs. */
3327 if (!strict
3328 && reload_in_progress
3329 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3330 && REG_P (op1))
3331 return true;
3332
3333 return (REG_P (op0) && REG_P (op1)
3334 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3335 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3336 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3337 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3338}
3339
48d72335 3340inline bool
a2369ed3 3341legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3342{
3343 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3344}
3345
48d72335 3346bool
4c81e946
FJ
3347macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3348{
c4ad648e 3349 if (!TARGET_MACHO || !flag_pic
9390387d 3350 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3351 return false;
3352 x = XEXP (x, 0);
4c81e946
FJ
3353
3354 if (GET_CODE (x) != LO_SUM)
3355 return false;
3356 if (GET_CODE (XEXP (x, 0)) != REG)
3357 return false;
3358 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3359 return false;
3360 x = XEXP (x, 1);
3361
3362 return CONSTANT_P (x);
3363}
3364
4d588c14 3365static bool
a2369ed3 3366legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3367{
3368 if (GET_CODE (x) != LO_SUM)
3369 return false;
3370 if (GET_CODE (XEXP (x, 0)) != REG)
3371 return false;
3372 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3373 return false;
54b695e7 3374 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3375 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3376 || mode == DImode))
f82f556d 3377 return false;
4d588c14
RH
3378 x = XEXP (x, 1);
3379
8622e235 3380 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3381 {
a29077da 3382 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3383 return false;
3384 if (TARGET_TOC)
3385 return false;
3386 if (GET_MODE_NUNITS (mode) != 1)
3387 return false;
5e5f01b9 3388 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
3389 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
3390 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
3391 return false;
3392
3393 return CONSTANT_P (x);
3394 }
3395
3396 return false;
3397}
3398
3399
9ebbca7d
GK
3400/* Try machine-dependent ways of modifying an illegitimate address
3401 to be legitimate. If we find one, return the new, valid address.
3402 This is used from only one place: `memory_address' in explow.c.
3403
a4f6c312
SS
3404 OLDX is the address as it was before break_out_memory_refs was
3405 called. In some cases it is useful to look at this to decide what
3406 needs to be done.
9ebbca7d 3407
a4f6c312 3408 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3409
a4f6c312
SS
3410 It is always safe for this function to do nothing. It exists to
3411 recognize opportunities to optimize the output.
9ebbca7d
GK
3412
3413 On RS/6000, first check for the sum of a register with a constant
3414 integer that is out of range. If so, generate code to add the
3415 constant with the low-order 16 bits masked to the register and force
3416 this result into another register (this can be done with `cau').
3417 Then generate an address of REG+(CONST&0xffff), allowing for the
3418 possibility of bit 16 being a one.
3419
3420 Then check for the sum of a register and something not constant, try to
3421 load the other things into a register and return the sum. */
4d588c14 3422
9ebbca7d 3423rtx
a2369ed3
DJ
3424rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3425 enum machine_mode mode)
0ac081f6 3426{
c4501e62
JJ
3427 if (GET_CODE (x) == SYMBOL_REF)
3428 {
3429 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3430 if (model != 0)
3431 return rs6000_legitimize_tls_address (x, model);
3432 }
3433
f676971a 3434 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3435 && GET_CODE (XEXP (x, 0)) == REG
3436 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3437 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3438 && !(SPE_VECTOR_MODE (mode)
3439 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3440 || mode == DImode))))
f676971a 3441 {
9ebbca7d
GK
3442 HOST_WIDE_INT high_int, low_int;
3443 rtx sum;
a65c591c
DE
3444 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3445 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3446 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3447 GEN_INT (high_int)), 0);
3448 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3449 }
f676971a 3450 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3451 && GET_CODE (XEXP (x, 0)) == REG
3452 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3453 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3454 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3455 || TARGET_POWERPC64
7393f7f8
BE
3456 || (((mode != DImode && mode != DFmode && mode != DDmode)
3457 || TARGET_E500_DOUBLE)
3458 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3459 && (TARGET_POWERPC64 || mode != DImode)
3460 && mode != TImode)
3461 {
3462 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3463 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3464 }
0ac081f6
AH
3465 else if (ALTIVEC_VECTOR_MODE (mode))
3466 {
3467 rtx reg;
3468
3469 /* Make sure both operands are registers. */
3470 if (GET_CODE (x) == PLUS)
9f85ed45 3471 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3472 force_reg (Pmode, XEXP (x, 1)));
3473
3474 reg = force_reg (Pmode, x);
3475 return reg;
3476 }
4d4cbc0e 3477 else if (SPE_VECTOR_MODE (mode)
17caeff2 3478 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3479 || mode == DDmode || mode == TDmode
54b695e7 3480 || mode == DImode)))
a3170dc6 3481 {
54b695e7
AH
3482 if (mode == DImode)
3483 return NULL_RTX;
a3170dc6
AH
3484 /* We accept [reg + reg] and [reg + OFFSET]. */
3485
3486 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3487 {
3488 rtx op1 = XEXP (x, 0);
3489 rtx op2 = XEXP (x, 1);
a3170dc6 3490
c4ad648e 3491 op1 = force_reg (Pmode, op1);
a3170dc6 3492
c4ad648e
AM
3493 if (GET_CODE (op2) != REG
3494 && (GET_CODE (op2) != CONST_INT
3495 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3496 op2 = force_reg (Pmode, op2);
a3170dc6 3497
c4ad648e
AM
3498 return gen_rtx_PLUS (Pmode, op1, op2);
3499 }
a3170dc6
AH
3500
3501 return force_reg (Pmode, x);
3502 }
f1384257
AM
3503 else if (TARGET_ELF
3504 && TARGET_32BIT
3505 && TARGET_NO_TOC
3506 && ! flag_pic
9ebbca7d 3507 && GET_CODE (x) != CONST_INT
f676971a 3508 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3509 && CONSTANT_P (x)
6ac7bf2c
GK
3510 && GET_MODE_NUNITS (mode) == 1
3511 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 3512 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
3513 {
3514 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3515 emit_insn (gen_elf_high (reg, x));
3516 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3517 }
ee890fe2
SS
3518 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3519 && ! flag_pic
ab82a49f
AP
3520#if TARGET_MACHO
3521 && ! MACHO_DYNAMIC_NO_PIC_P
3522#endif
ee890fe2 3523 && GET_CODE (x) != CONST_INT
f676971a 3524 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3525 && CONSTANT_P (x)
f82f556d 3526 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 3527 && mode != DImode
ee890fe2
SS
3528 && mode != TImode)
3529 {
3530 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3531 emit_insn (gen_macho_high (reg, x));
3532 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3533 }
f676971a 3534 else if (TARGET_TOC
4d588c14 3535 && constant_pool_expr_p (x)
a9098fd0 3536 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3537 {
3538 return create_TOC_reference (x);
3539 }
3540 else
3541 return NULL_RTX;
3542}
258bfae2 3543
fdbe66f2 3544/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3545 We need to emit DTP-relative relocations. */
3546
fdbe66f2 3547static void
c973d557
JJ
3548rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3549{
3550 switch (size)
3551 {
3552 case 4:
3553 fputs ("\t.long\t", file);
3554 break;
3555 case 8:
3556 fputs (DOUBLE_INT_ASM_OP, file);
3557 break;
3558 default:
37409796 3559 gcc_unreachable ();
c973d557
JJ
3560 }
3561 output_addr_const (file, x);
3562 fputs ("@dtprel+0x8000", file);
3563}
3564
c4501e62
JJ
3565/* Construct the SYMBOL_REF for the tls_get_addr function. */
3566
3567static GTY(()) rtx rs6000_tls_symbol;
3568static rtx
863d938c 3569rs6000_tls_get_addr (void)
c4501e62
JJ
3570{
3571 if (!rs6000_tls_symbol)
3572 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3573
3574 return rs6000_tls_symbol;
3575}
3576
3577/* Construct the SYMBOL_REF for TLS GOT references. */
3578
3579static GTY(()) rtx rs6000_got_symbol;
3580static rtx
863d938c 3581rs6000_got_sym (void)
c4501e62
JJ
3582{
3583 if (!rs6000_got_symbol)
3584 {
3585 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3586 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3587 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3588 }
c4501e62
JJ
3589
3590 return rs6000_got_symbol;
3591}
3592
3593/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3594 this (thread-local) address. */
3595
3596static rtx
a2369ed3 3597rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3598{
3599 rtx dest, insn;
3600
3601 dest = gen_reg_rtx (Pmode);
3602 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3603 {
3604 rtx tlsreg;
3605
3606 if (TARGET_64BIT)
3607 {
3608 tlsreg = gen_rtx_REG (Pmode, 13);
3609 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3610 }
3611 else
3612 {
3613 tlsreg = gen_rtx_REG (Pmode, 2);
3614 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3615 }
3616 emit_insn (insn);
3617 }
3618 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3619 {
3620 rtx tlsreg, tmp;
3621
3622 tmp = gen_reg_rtx (Pmode);
3623 if (TARGET_64BIT)
3624 {
3625 tlsreg = gen_rtx_REG (Pmode, 13);
3626 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3627 }
3628 else
3629 {
3630 tlsreg = gen_rtx_REG (Pmode, 2);
3631 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3632 }
3633 emit_insn (insn);
3634 if (TARGET_64BIT)
3635 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3636 else
3637 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3638 emit_insn (insn);
3639 }
3640 else
3641 {
3642 rtx r3, got, tga, tmp1, tmp2, eqv;
3643
4fed8f8f
AM
3644 /* We currently use relocations like @got@tlsgd for tls, which
3645 means the linker will handle allocation of tls entries, placing
3646 them in the .got section. So use a pointer to the .got section,
3647 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3648 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3649 if (TARGET_64BIT)
972f427b 3650 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3651 else
3652 {
3653 if (flag_pic == 1)
3654 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3655 else
3656 {
3657 rtx gsym = rs6000_got_sym ();
3658 got = gen_reg_rtx (Pmode);
3659 if (flag_pic == 0)
3660 rs6000_emit_move (got, gsym, Pmode);
3661 else
3662 {
e65a3857 3663 rtx tmp3, mem;
c4501e62
JJ
3664 rtx first, last;
3665
c4501e62
JJ
3666 tmp1 = gen_reg_rtx (Pmode);
3667 tmp2 = gen_reg_rtx (Pmode);
3668 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3669 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3670
e65a3857
DE
3671 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3672 emit_move_insn (tmp1,
1de43f85 3673 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3674 emit_move_insn (tmp2, mem);
3675 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3676 last = emit_move_insn (got, tmp3);
bd94cb6e 3677 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3678 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3679 }
3680 }
3681 }
3682
3683 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3684 {
3685 r3 = gen_rtx_REG (Pmode, 3);
3686 if (TARGET_64BIT)
3687 insn = gen_tls_gd_64 (r3, got, addr);
3688 else
3689 insn = gen_tls_gd_32 (r3, got, addr);
3690 start_sequence ();
3691 emit_insn (insn);
3692 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3693 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3694 insn = emit_call_insn (insn);
3695 CONST_OR_PURE_CALL_P (insn) = 1;
3696 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3697 insn = get_insns ();
3698 end_sequence ();
3699 emit_libcall_block (insn, dest, r3, addr);
3700 }
3701 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3702 {
3703 r3 = gen_rtx_REG (Pmode, 3);
3704 if (TARGET_64BIT)
3705 insn = gen_tls_ld_64 (r3, got);
3706 else
3707 insn = gen_tls_ld_32 (r3, got);
3708 start_sequence ();
3709 emit_insn (insn);
3710 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3711 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3712 insn = emit_call_insn (insn);
3713 CONST_OR_PURE_CALL_P (insn) = 1;
3714 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3715 insn = get_insns ();
3716 end_sequence ();
3717 tmp1 = gen_reg_rtx (Pmode);
3718 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3719 UNSPEC_TLSLD);
3720 emit_libcall_block (insn, tmp1, r3, eqv);
3721 if (rs6000_tls_size == 16)
3722 {
3723 if (TARGET_64BIT)
3724 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3725 else
3726 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3727 }
3728 else if (rs6000_tls_size == 32)
3729 {
3730 tmp2 = gen_reg_rtx (Pmode);
3731 if (TARGET_64BIT)
3732 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3733 else
3734 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3735 emit_insn (insn);
3736 if (TARGET_64BIT)
3737 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3738 else
3739 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3740 }
3741 else
3742 {
3743 tmp2 = gen_reg_rtx (Pmode);
3744 if (TARGET_64BIT)
3745 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3746 else
3747 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3748 emit_insn (insn);
3749 insn = gen_rtx_SET (Pmode, dest,
3750 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3751 }
3752 emit_insn (insn);
3753 }
3754 else
3755 {
a7b376ee 3756 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3757 tmp2 = gen_reg_rtx (Pmode);
3758 if (TARGET_64BIT)
3759 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3760 else
3761 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3762 emit_insn (insn);
3763 if (TARGET_64BIT)
3764 insn = gen_tls_tls_64 (dest, tmp2, addr);
3765 else
3766 insn = gen_tls_tls_32 (dest, tmp2, addr);
3767 emit_insn (insn);
3768 }
3769 }
3770
3771 return dest;
3772}
3773
c4501e62
JJ
3774/* Return 1 if X contains a thread-local symbol. */
3775
3776bool
a2369ed3 3777rs6000_tls_referenced_p (rtx x)
c4501e62 3778{
cd413cab
AP
3779 if (! TARGET_HAVE_TLS)
3780 return false;
3781
c4501e62
JJ
3782 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3783}
3784
3785/* Return 1 if *X is a thread-local symbol. This is the same as
3786 rs6000_tls_symbol_ref except for the type of the unused argument. */
3787
9390387d 3788static int
a2369ed3 3789rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3790{
3791 return RS6000_SYMBOL_REF_TLS_P (*x);
3792}
3793
24ea750e
DJ
3794/* The convention appears to be to define this wherever it is used.
3795 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3796 is now used here. */
3797#ifndef REG_MODE_OK_FOR_BASE_P
3798#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3799#endif
3800
3801/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3802 replace the input X, or the original X if no replacement is called for.
3803 The output parameter *WIN is 1 if the calling macro should goto WIN,
3804 0 if it should not.
3805
3806 For RS/6000, we wish to handle large displacements off a base
3807 register by splitting the addend across an addiu/addis and the mem insn.
3808 This cuts number of extra insns needed from 3 to 1.
3809
3810 On Darwin, we use this to generate code for floating point constants.
3811 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3812 The Darwin code is inside #if TARGET_MACHO because only then is
3813 machopic_function_base_name() defined. */
3814rtx
f676971a 3815rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3816 int opnum, int type,
3817 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3818{
f676971a 3819 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3820 if (GET_CODE (x) == PLUS
3821 && GET_CODE (XEXP (x, 0)) == PLUS
3822 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3823 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3824 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3825 {
3826 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3827 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3828 opnum, (enum reload_type)type);
24ea750e
DJ
3829 *win = 1;
3830 return x;
3831 }
3deb2758 3832
24ea750e
DJ
3833#if TARGET_MACHO
3834 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3835 && GET_CODE (x) == LO_SUM
3836 && GET_CODE (XEXP (x, 0)) == PLUS
3837 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3838 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3839 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3840 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3841 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3842 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3843 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3844 {
3845 /* Result of previous invocation of this function on Darwin
6f317ef3 3846 floating point constant. */
24ea750e 3847 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3848 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3849 opnum, (enum reload_type)type);
24ea750e
DJ
3850 *win = 1;
3851 return x;
3852 }
3853#endif
4937d02d
DE
3854
3855 /* Force ld/std non-word aligned offset into base register by wrapping
3856 in offset 0. */
3857 if (GET_CODE (x) == PLUS
3858 && GET_CODE (XEXP (x, 0)) == REG
3859 && REGNO (XEXP (x, 0)) < 32
3860 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3861 && GET_CODE (XEXP (x, 1)) == CONST_INT
3862 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3863 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3864 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3865 && TARGET_POWERPC64)
3866 {
3867 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3868 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3869 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3870 opnum, (enum reload_type) type);
3871 *win = 1;
3872 return x;
3873 }
3874
24ea750e
DJ
3875 if (GET_CODE (x) == PLUS
3876 && GET_CODE (XEXP (x, 0)) == REG
3877 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3878 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3879 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3880 && !SPE_VECTOR_MODE (mode)
17caeff2 3881 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
54b695e7 3882 || mode == DImode))
78c875e8 3883 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3884 {
3885 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3886 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3887 HOST_WIDE_INT high
c4ad648e 3888 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3889
3890 /* Check for 32-bit overflow. */
3891 if (high + low != val)
c4ad648e 3892 {
24ea750e
DJ
3893 *win = 0;
3894 return x;
3895 }
3896
3897 /* Reload the high part into a base reg; leave the low part
c4ad648e 3898 in the mem directly. */
24ea750e
DJ
3899
3900 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3901 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3902 GEN_INT (high)),
3903 GEN_INT (low));
24ea750e
DJ
3904
3905 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3906 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3907 opnum, (enum reload_type)type);
24ea750e
DJ
3908 *win = 1;
3909 return x;
3910 }
4937d02d 3911
24ea750e 3912 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3913 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 3914 && !SPE_VECTOR_MODE (mode)
8308679f
DE
3915#if TARGET_MACHO
3916 && DEFAULT_ABI == ABI_DARWIN
a29077da 3917 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3918#else
3919 && DEFAULT_ABI == ABI_V4
3920 && !flag_pic
3921#endif
7393f7f8 3922 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
7b5d92b2
AM
3923 The same goes for DImode without 64-bit gprs and DFmode
3924 without fprs. */
0d8c1c97 3925 && mode != TFmode
7393f7f8 3926 && mode != TDmode
7b5d92b2
AM
3927 && (mode != DImode || TARGET_POWERPC64)
3928 && (mode != DFmode || TARGET_POWERPC64
3929 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 3930 {
8308679f 3931#if TARGET_MACHO
a29077da
GK
3932 if (flag_pic)
3933 {
3934 rtx offset = gen_rtx_CONST (Pmode,
3935 gen_rtx_MINUS (Pmode, x,
11abc112 3936 machopic_function_base_sym ()));
a29077da
GK
3937 x = gen_rtx_LO_SUM (GET_MODE (x),
3938 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3939 gen_rtx_HIGH (Pmode, offset)), offset);
3940 }
3941 else
8308679f 3942#endif
a29077da 3943 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3944 gen_rtx_HIGH (Pmode, x), x);
a29077da 3945
24ea750e 3946 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3947 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3948 opnum, (enum reload_type)type);
24ea750e
DJ
3949 *win = 1;
3950 return x;
3951 }
4937d02d 3952
dec1f3aa
DE
3953 /* Reload an offset address wrapped by an AND that represents the
3954 masking of the lower bits. Strip the outer AND and let reload
3955 convert the offset address into an indirect address. */
3956 if (TARGET_ALTIVEC
3957 && ALTIVEC_VECTOR_MODE (mode)
3958 && GET_CODE (x) == AND
3959 && GET_CODE (XEXP (x, 0)) == PLUS
3960 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3961 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3962 && GET_CODE (XEXP (x, 1)) == CONST_INT
3963 && INTVAL (XEXP (x, 1)) == -16)
3964 {
3965 x = XEXP (x, 0);
3966 *win = 1;
3967 return x;
3968 }
3969
24ea750e 3970 if (TARGET_TOC
4d588c14 3971 && constant_pool_expr_p (x)
c1f11548 3972 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 3973 {
194c524a 3974 x = create_TOC_reference (x);
24ea750e
DJ
3975 *win = 1;
3976 return x;
3977 }
3978 *win = 0;
3979 return x;
f676971a 3980}
24ea750e 3981
258bfae2
FS
3982/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3983 that is a valid memory address for an instruction.
3984 The MODE argument is the machine mode for the MEM expression
3985 that wants to use this address.
3986
3987 On the RS/6000, there are four valid address: a SYMBOL_REF that
3988 refers to a constant pool entry of an address (or the sum of it
3989 plus a constant), a short (16-bit signed) constant plus a register,
3990 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3991 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3992 we must ensure that both words are addressable or PowerPC64 with offset
3993 word aligned.
3994
3995 For modes spanning multiple registers (DFmode in 32-bit GPRs,
7393f7f8
BE
3996 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
3997 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
3998 during assembly output. */
3999int
a2369ed3 4000rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4001{
850e8d3d
DN
4002 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4003 if (TARGET_ALTIVEC
4004 && ALTIVEC_VECTOR_MODE (mode)
4005 && GET_CODE (x) == AND
4006 && GET_CODE (XEXP (x, 1)) == CONST_INT
4007 && INTVAL (XEXP (x, 1)) == -16)
4008 x = XEXP (x, 0);
4009
c4501e62
JJ
4010 if (RS6000_SYMBOL_REF_TLS_P (x))
4011 return 0;
4d588c14 4012 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4013 return 1;
4014 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4015 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4016 && !SPE_VECTOR_MODE (mode)
429ec7dc 4017 && mode != TFmode
7393f7f8 4018 && mode != TDmode
54b695e7 4019 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
4020 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4021 || mode == DImode))
258bfae2 4022 && TARGET_UPDATE
4d588c14 4023 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4024 return 1;
d04b6e6e 4025 if (legitimate_small_data_p (mode, x))
258bfae2 4026 return 1;
4d588c14 4027 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4028 return 1;
4029 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4030 if (! reg_ok_strict
4031 && GET_CODE (x) == PLUS
4032 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4033 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4034 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4035 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4036 return 1;
76d2b81d 4037 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4038 return 1;
4039 if (mode != TImode
76d2b81d 4040 && mode != TFmode
7393f7f8 4041 && mode != TDmode
a3170dc6
AH
4042 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4043 || TARGET_POWERPC64
4d4cbc0e 4044 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 4045 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4046 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4047 return 1;
6fb5fa3c
DB
4048 if (GET_CODE (x) == PRE_MODIFY
4049 && mode != TImode
4050 && mode != TFmode
4051 && mode != TDmode
4052 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4053 || TARGET_POWERPC64
4054 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
4055 && (TARGET_POWERPC64 || mode != DImode)
4056 && !ALTIVEC_VECTOR_MODE (mode)
4057 && !SPE_VECTOR_MODE (mode)
4058 /* Restrict addressing for DI because of our SUBREG hackery. */
4059 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
4060 && TARGET_UPDATE
4061 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4062 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4063 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4064 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4065 return 1;
4d588c14 4066 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4067 return 1;
4068 return 0;
4069}
4d588c14
RH
4070
4071/* Go to LABEL if ADDR (a legitimate address expression)
4072 has an effect that depends on the machine mode it is used for.
4073
4074 On the RS/6000 this is true of all integral offsets (since AltiVec
4075 modes don't allow them) or is a pre-increment or decrement.
4076
4077 ??? Except that due to conceptual problems in offsettable_address_p
4078 we can't really report the problems of integral offsets. So leave
f676971a 4079 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4080 sub-words of a TFmode operand, which is what we had before. */
4081
4082bool
a2369ed3 4083rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4084{
4085 switch (GET_CODE (addr))
4086 {
4087 case PLUS:
4088 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4089 {
4090 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4091 return val + 12 + 0x8000 >= 0x10000;
4092 }
4093 break;
4094
4095 case LO_SUM:
4096 return true;
4097
6fb5fa3c
DB
4098 case PRE_INC:
4099 case PRE_DEC:
4100 case PRE_MODIFY:
4101 return TARGET_UPDATE;
4d588c14
RH
4102
4103 default:
4104 break;
4105 }
4106
4107 return false;
4108}
d8ecbcdb 4109
d04b6e6e
EB
4110/* More elaborate version of recog's offsettable_memref_p predicate
4111 that works around the ??? note of rs6000_mode_dependent_address.
4112 In particular it accepts
4113
4114 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4115
4116 in 32-bit mode, that the recog predicate rejects. */
4117
4118bool
4119rs6000_offsettable_memref_p (rtx op)
4120{
4121 if (!MEM_P (op))
4122 return false;
4123
4124 /* First mimic offsettable_memref_p. */
4125 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4126 return true;
4127
4128 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4129 the latter predicate knows nothing about the mode of the memory
4130 reference and, therefore, assumes that it is the largest supported
4131 mode (TFmode). As a consequence, legitimate offsettable memory
4132 references are rejected. rs6000_legitimate_offset_address_p contains
4133 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4134 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4135}
4136
d8ecbcdb
AH
4137/* Return number of consecutive hard regs needed starting at reg REGNO
4138 to hold something of mode MODE.
4139 This is ordinarily the length in words of a value of mode MODE
4140 but can be less for certain modes in special long registers.
4141
4142 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4143 scalar instructions. The upper 32 bits are only available to the
4144 SIMD instructions.
4145
4146 POWER and PowerPC GPRs hold 32 bits worth;
4147 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4148
4149int
4150rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4151{
4152 if (FP_REGNO_P (regno))
4153 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4154
4155 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4156 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4157
4158 if (ALTIVEC_REGNO_P (regno))
4159 return
4160 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4161
8521c414
JM
4162 /* The value returned for SCmode in the E500 double case is 2 for
4163 ABI compatibility; storing an SCmode value in a single register
4164 would require function_arg and rs6000_spe_function_arg to handle
4165 SCmode so as to pass the value correctly in a pair of
4166 registers. */
4167 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4168 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4169
d8ecbcdb
AH
4170 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4171}
2aa4498c
AH
4172
4173/* Change register usage conditional on target flags. */
4174void
4175rs6000_conditional_register_usage (void)
4176{
4177 int i;
4178
4179 /* Set MQ register fixed (already call_used) if not POWER
4180 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4181 be allocated. */
4182 if (! TARGET_POWER)
4183 fixed_regs[64] = 1;
4184
7c9ac5c0 4185 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4186 if (TARGET_64BIT)
4187 fixed_regs[13] = call_used_regs[13]
4188 = call_really_used_regs[13] = 1;
4189
4190 /* Conditionally disable FPRs. */
4191 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4192 for (i = 32; i < 64; i++)
4193 fixed_regs[i] = call_used_regs[i]
c4ad648e 4194 = call_really_used_regs[i] = 1;
2aa4498c 4195
7c9ac5c0
PH
4196 /* The TOC register is not killed across calls in a way that is
4197 visible to the compiler. */
4198 if (DEFAULT_ABI == ABI_AIX)
4199 call_really_used_regs[2] = 0;
4200
2aa4498c
AH
4201 if (DEFAULT_ABI == ABI_V4
4202 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4203 && flag_pic == 2)
4204 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4205
4206 if (DEFAULT_ABI == ABI_V4
4207 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4208 && flag_pic == 1)
4209 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4210 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4211 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4212
4213 if (DEFAULT_ABI == ABI_DARWIN
4214 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4215 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4216 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4217 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4218
b4db40bf
JJ
4219 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4220 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4221 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4222
2aa4498c
AH
4223 if (TARGET_ALTIVEC)
4224 global_regs[VSCR_REGNO] = 1;
4225
4226 if (TARGET_SPE)
4227 {
4228 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4229 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4230 registers in prologues and epilogues. We no longer use r14
4231 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4232 pool for link-compatibility with older versions of GCC. Once
4233 "old" code has died out, we can return r14 to the allocation
4234 pool. */
4235 fixed_regs[14]
4236 = call_used_regs[14]
4237 = call_really_used_regs[14] = 1;
2aa4498c
AH
4238 }
4239
4240 if (! TARGET_ALTIVEC)
4241 {
4242 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4243 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4244 call_really_used_regs[VRSAVE_REGNO] = 1;
4245 }
4246
4247 if (TARGET_ALTIVEC_ABI)
4248 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4249 call_used_regs[i] = call_really_used_regs[i] = 1;
4250}
fb4d4348 4251\f
a4f6c312
SS
4252/* Try to output insns to set TARGET equal to the constant C if it can
4253 be done in less than N insns. Do all computations in MODE.
4254 Returns the place where the output has been placed if it can be
4255 done and the insns have been emitted. If it would take more than N
4256 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4257
4258rtx
f676971a 4259rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4260 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4261{
af8cb5c5 4262 rtx result, insn, set;
2bfcf297
DB
4263 HOST_WIDE_INT c0, c1;
4264
37409796 4265 switch (mode)
2bfcf297 4266 {
37409796
NS
4267 case QImode:
4268 case HImode:
2bfcf297 4269 if (dest == NULL)
c4ad648e 4270 dest = gen_reg_rtx (mode);
2bfcf297
DB
4271 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4272 return dest;
bb8df8a6 4273
37409796 4274 case SImode:
b3a13419 4275 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4276
d448860e 4277 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4278 GEN_INT (INTVAL (source)
4279 & (~ (HOST_WIDE_INT) 0xffff))));
4280 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4281 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4282 GEN_INT (INTVAL (source) & 0xffff))));
4283 result = dest;
37409796
NS
4284 break;
4285
4286 case DImode:
4287 switch (GET_CODE (source))
af8cb5c5 4288 {
37409796 4289 case CONST_INT:
af8cb5c5
DE
4290 c0 = INTVAL (source);
4291 c1 = -(c0 < 0);
37409796 4292 break;
bb8df8a6 4293
37409796 4294 case CONST_DOUBLE:
2bfcf297 4295#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4296 c0 = CONST_DOUBLE_LOW (source);
4297 c1 = -(c0 < 0);
2bfcf297 4298#else
af8cb5c5
DE
4299 c0 = CONST_DOUBLE_LOW (source);
4300 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4301#endif
37409796
NS
4302 break;
4303
4304 default:
4305 gcc_unreachable ();
af8cb5c5 4306 }
af8cb5c5
DE
4307
4308 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4309 break;
4310
4311 default:
4312 gcc_unreachable ();
2bfcf297 4313 }
2bfcf297 4314
af8cb5c5
DE
4315 insn = get_last_insn ();
4316 set = single_set (insn);
4317 if (! CONSTANT_P (SET_SRC (set)))
4318 set_unique_reg_note (insn, REG_EQUAL, source);
4319
4320 return result;
2bfcf297
DB
4321}
4322
4323/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4324 fall back to a straight forward decomposition. We do this to avoid
4325 exponential run times encountered when looking for longer sequences
4326 with rs6000_emit_set_const. */
4327static rtx
a2369ed3 4328rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4329{
4330 if (!TARGET_POWERPC64)
4331 {
4332 rtx operand1, operand2;
4333
4334 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4335 DImode);
d448860e 4336 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4337 DImode);
4338 emit_move_insn (operand1, GEN_INT (c1));
4339 emit_move_insn (operand2, GEN_INT (c2));
4340 }
4341 else
4342 {
bc06712d 4343 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4344
bc06712d 4345 ud1 = c1 & 0xffff;
f921c9c9 4346 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4347#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4348 c2 = c1 >> 32;
2bfcf297 4349#endif
bc06712d 4350 ud3 = c2 & 0xffff;
f921c9c9 4351 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4352
f676971a 4353 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4354 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4355 {
bc06712d 4356 if (ud1 & 0x8000)
b78d48dd 4357 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4358 else
4359 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4360 }
2bfcf297 4361
f676971a 4362 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4363 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4364 {
bc06712d 4365 if (ud2 & 0x8000)
f676971a 4366 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4367 - 0x80000000));
252b88f7 4368 else
bc06712d
TR
4369 emit_move_insn (dest, GEN_INT (ud2 << 16));
4370 if (ud1 != 0)
d448860e
JH
4371 emit_move_insn (copy_rtx (dest),
4372 gen_rtx_IOR (DImode, copy_rtx (dest),
4373 GEN_INT (ud1)));
252b88f7 4374 }
f676971a 4375 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4376 || (ud4 == 0 && ! (ud3 & 0x8000)))
4377 {
4378 if (ud3 & 0x8000)
f676971a 4379 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4380 - 0x80000000));
4381 else
4382 emit_move_insn (dest, GEN_INT (ud3 << 16));
4383
4384 if (ud2 != 0)
d448860e
JH
4385 emit_move_insn (copy_rtx (dest),
4386 gen_rtx_IOR (DImode, copy_rtx (dest),
4387 GEN_INT (ud2)));
4388 emit_move_insn (copy_rtx (dest),
4389 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4390 GEN_INT (16)));
bc06712d 4391 if (ud1 != 0)
d448860e
JH
4392 emit_move_insn (copy_rtx (dest),
4393 gen_rtx_IOR (DImode, copy_rtx (dest),
4394 GEN_INT (ud1)));
bc06712d 4395 }
f676971a 4396 else
bc06712d
TR
4397 {
4398 if (ud4 & 0x8000)
f676971a 4399 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4400 - 0x80000000));
4401 else
4402 emit_move_insn (dest, GEN_INT (ud4 << 16));
4403
4404 if (ud3 != 0)
d448860e
JH
4405 emit_move_insn (copy_rtx (dest),
4406 gen_rtx_IOR (DImode, copy_rtx (dest),
4407 GEN_INT (ud3)));
2bfcf297 4408
d448860e
JH
4409 emit_move_insn (copy_rtx (dest),
4410 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4411 GEN_INT (32)));
bc06712d 4412 if (ud2 != 0)
d448860e
JH
4413 emit_move_insn (copy_rtx (dest),
4414 gen_rtx_IOR (DImode, copy_rtx (dest),
4415 GEN_INT (ud2 << 16)));
bc06712d 4416 if (ud1 != 0)
d448860e
JH
4417 emit_move_insn (copy_rtx (dest),
4418 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4419 }
4420 }
2bfcf297
DB
4421 return dest;
4422}
4423
76d2b81d 4424/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4425 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4426
4427static void
4428rs6000_eliminate_indexed_memrefs (rtx operands[2])
4429{
4430 if (GET_CODE (operands[0]) == MEM
4431 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4432 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4433 && ! reload_in_progress)
4434 operands[0]
4435 = replace_equiv_address (operands[0],
4436 copy_addr_to_reg (XEXP (operands[0], 0)));
4437
4438 if (GET_CODE (operands[1]) == MEM
4439 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4440 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4441 && ! reload_in_progress)
4442 operands[1]
4443 = replace_equiv_address (operands[1],
4444 copy_addr_to_reg (XEXP (operands[1], 0)));
4445}
4446
fb4d4348
GK
4447/* Emit a move from SOURCE to DEST in mode MODE. */
4448void
a2369ed3 4449rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4450{
4451 rtx operands[2];
4452 operands[0] = dest;
4453 operands[1] = source;
f676971a 4454
fb4d4348
GK
4455 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4456 if (GET_CODE (operands[1]) == CONST_DOUBLE
4457 && ! FLOAT_MODE_P (mode)
4458 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4459 {
4460 /* FIXME. This should never happen. */
4461 /* Since it seems that it does, do the safe thing and convert
4462 to a CONST_INT. */
2496c7bd 4463 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4464 }
37409796
NS
4465 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4466 || FLOAT_MODE_P (mode)
4467 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4468 || CONST_DOUBLE_LOW (operands[1]) < 0)
4469 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4470 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4471
c9e8cb32
DD
4472 /* Check if GCC is setting up a block move that will end up using FP
4473 registers as temporaries. We must make sure this is acceptable. */
4474 if (GET_CODE (operands[0]) == MEM
4475 && GET_CODE (operands[1]) == MEM
4476 && mode == DImode
41543739
GK
4477 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4478 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4479 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4480 ? 32 : MEM_ALIGN (operands[0])))
4481 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4482 ? 32
41543739
GK
4483 : MEM_ALIGN (operands[1]))))
4484 && ! MEM_VOLATILE_P (operands [0])
4485 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4486 {
41543739
GK
4487 emit_move_insn (adjust_address (operands[0], SImode, 0),
4488 adjust_address (operands[1], SImode, 0));
d448860e
JH
4489 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4490 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4491 return;
4492 }
630d42a0 4493
b3a13419 4494 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4495 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4496 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4497
a3170dc6
AH
4498 if (mode == SFmode && ! TARGET_POWERPC
4499 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4500 && GET_CODE (operands[0]) == MEM)
fb4d4348 4501 {
ffc14f31
GK
4502 int regnum;
4503
4504 if (reload_in_progress || reload_completed)
4505 regnum = true_regnum (operands[1]);
4506 else if (GET_CODE (operands[1]) == REG)
4507 regnum = REGNO (operands[1]);
4508 else
4509 regnum = -1;
f676971a 4510
fb4d4348
GK
4511 /* If operands[1] is a register, on POWER it may have
4512 double-precision data in it, so truncate it to single
4513 precision. */
4514 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4515 {
4516 rtx newreg;
b3a13419 4517 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4518 : gen_reg_rtx (mode));
fb4d4348
GK
4519 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4520 operands[1] = newreg;
4521 }
4522 }
4523
c4501e62
JJ
4524 /* Recognize the case where operand[1] is a reference to thread-local
4525 data and load its address to a register. */
84f52ebd 4526 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4527 {
84f52ebd
RH
4528 enum tls_model model;
4529 rtx tmp = operands[1];
4530 rtx addend = NULL;
4531
4532 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4533 {
4534 addend = XEXP (XEXP (tmp, 0), 1);
4535 tmp = XEXP (XEXP (tmp, 0), 0);
4536 }
4537
4538 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4539 model = SYMBOL_REF_TLS_MODEL (tmp);
4540 gcc_assert (model != 0);
4541
4542 tmp = rs6000_legitimize_tls_address (tmp, model);
4543 if (addend)
4544 {
4545 tmp = gen_rtx_PLUS (mode, tmp, addend);
4546 tmp = force_operand (tmp, operands[0]);
4547 }
4548 operands[1] = tmp;
c4501e62
JJ
4549 }
4550
8f4e6caf
RH
4551 /* Handle the case where reload calls us with an invalid address. */
4552 if (reload_in_progress && mode == Pmode
69ef87e2 4553 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4554 || ! nonimmediate_operand (operands[0], mode)))
4555 goto emit_set;
4556
a9baceb1
GK
4557 /* 128-bit constant floating-point values on Darwin should really be
4558 loaded as two parts. */
8521c414 4559 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4560 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4561 {
4562 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4563 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4564 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4565 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4566 simplify_gen_subreg (imode, operands[1], mode, 0),
4567 imode);
4568 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4569 GET_MODE_SIZE (imode)),
4570 simplify_gen_subreg (imode, operands[1], mode,
4571 GET_MODE_SIZE (imode)),
4572 imode);
a9baceb1
GK
4573 return;
4574 }
4575
fb4d4348
GK
4576 /* FIXME: In the long term, this switch statement should go away
4577 and be replaced by a sequence of tests based on things like
4578 mode == Pmode. */
4579 switch (mode)
4580 {
4581 case HImode:
4582 case QImode:
4583 if (CONSTANT_P (operands[1])
4584 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4585 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4586 break;
4587
06f4e019 4588 case TFmode:
7393f7f8 4589 case TDmode:
76d2b81d
DJ
4590 rs6000_eliminate_indexed_memrefs (operands);
4591 /* fall through */
4592
fb4d4348 4593 case DFmode:
7393f7f8 4594 case DDmode:
fb4d4348 4595 case SFmode:
f676971a 4596 if (CONSTANT_P (operands[1])
fb4d4348 4597 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4598 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4599 break;
f676971a 4600
0ac081f6
AH
4601 case V16QImode:
4602 case V8HImode:
4603 case V4SFmode:
4604 case V4SImode:
a3170dc6
AH
4605 case V4HImode:
4606 case V2SFmode:
4607 case V2SImode:
00a892b8 4608 case V1DImode:
69ef87e2 4609 if (CONSTANT_P (operands[1])
d744e06e 4610 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4611 operands[1] = force_const_mem (mode, operands[1]);
4612 break;
f676971a 4613
fb4d4348 4614 case SImode:
a9098fd0 4615 case DImode:
fb4d4348
GK
4616 /* Use default pattern for address of ELF small data */
4617 if (TARGET_ELF
a9098fd0 4618 && mode == Pmode
f607bc57 4619 && DEFAULT_ABI == ABI_V4
f676971a 4620 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4621 || GET_CODE (operands[1]) == CONST)
4622 && small_data_operand (operands[1], mode))
fb4d4348
GK
4623 {
4624 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4625 return;
4626 }
4627
f607bc57 4628 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4629 && mode == Pmode && mode == SImode
4630 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4631 {
4632 emit_insn (gen_movsi_got (operands[0], operands[1]));
4633 return;
4634 }
4635
ee890fe2 4636 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4637 && TARGET_NO_TOC
4638 && ! flag_pic
a9098fd0 4639 && mode == Pmode
fb4d4348
GK
4640 && CONSTANT_P (operands[1])
4641 && GET_CODE (operands[1]) != HIGH
4642 && GET_CODE (operands[1]) != CONST_INT)
4643 {
b3a13419
ILT
4644 rtx target = (!can_create_pseudo_p ()
4645 ? operands[0]
4646 : gen_reg_rtx (mode));
fb4d4348
GK
4647
4648 /* If this is a function address on -mcall-aixdesc,
4649 convert it to the address of the descriptor. */
4650 if (DEFAULT_ABI == ABI_AIX
4651 && GET_CODE (operands[1]) == SYMBOL_REF
4652 && XSTR (operands[1], 0)[0] == '.')
4653 {
4654 const char *name = XSTR (operands[1], 0);
4655 rtx new_ref;
4656 while (*name == '.')
4657 name++;
4658 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4659 CONSTANT_POOL_ADDRESS_P (new_ref)
4660 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4661 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4662 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4663 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4664 operands[1] = new_ref;
4665 }
7509c759 4666
ee890fe2
SS
4667 if (DEFAULT_ABI == ABI_DARWIN)
4668 {
ab82a49f
AP
4669#if TARGET_MACHO
4670 if (MACHO_DYNAMIC_NO_PIC_P)
4671 {
4672 /* Take care of any required data indirection. */
4673 operands[1] = rs6000_machopic_legitimize_pic_address (
4674 operands[1], mode, operands[0]);
4675 if (operands[0] != operands[1])
4676 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4677 operands[0], operands[1]));
ab82a49f
AP
4678 return;
4679 }
4680#endif
b8a55285
AP
4681 emit_insn (gen_macho_high (target, operands[1]));
4682 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4683 return;
4684 }
4685
fb4d4348
GK
4686 emit_insn (gen_elf_high (target, operands[1]));
4687 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4688 return;
4689 }
4690
a9098fd0
GK
4691 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4692 and we have put it in the TOC, we just need to make a TOC-relative
4693 reference to it. */
4694 if (TARGET_TOC
4695 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4696 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4697 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4698 get_pool_mode (operands[1])))
fb4d4348 4699 {
a9098fd0 4700 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4701 }
a9098fd0
GK
4702 else if (mode == Pmode
4703 && CONSTANT_P (operands[1])
38886f37
AO
4704 && ((GET_CODE (operands[1]) != CONST_INT
4705 && ! easy_fp_constant (operands[1], mode))
4706 || (GET_CODE (operands[1]) == CONST_INT
4707 && num_insns_constant (operands[1], mode) > 2)
4708 || (GET_CODE (operands[0]) == REG
4709 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4710 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4711 && ! legitimate_constant_pool_address_p (operands[1])
4712 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4713 {
4714 /* Emit a USE operation so that the constant isn't deleted if
4715 expensive optimizations are turned on because nobody
4716 references it. This should only be done for operands that
4717 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4718 This should not be done for operands that contain LABEL_REFs.
4719 For now, we just handle the obvious case. */
4720 if (GET_CODE (operands[1]) != LABEL_REF)
4721 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4722
c859cda6 4723#if TARGET_MACHO
ee890fe2 4724 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4725 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4726 {
ee890fe2
SS
4727 operands[1] =
4728 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4729 operands[0]);
4730 if (operands[0] != operands[1])
4731 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4732 return;
4733 }
c859cda6 4734#endif
ee890fe2 4735
fb4d4348
GK
4736 /* If we are to limit the number of things we put in the TOC and
4737 this is a symbol plus a constant we can add in one insn,
4738 just put the symbol in the TOC and add the constant. Don't do
4739 this if reload is in progress. */
4740 if (GET_CODE (operands[1]) == CONST
4741 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4742 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4743 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4744 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4745 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4746 && ! side_effects_p (operands[0]))
4747 {
a4f6c312
SS
4748 rtx sym =
4749 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4750 rtx other = XEXP (XEXP (operands[1], 0), 1);
4751
a9098fd0
GK
4752 sym = force_reg (mode, sym);
4753 if (mode == SImode)
4754 emit_insn (gen_addsi3 (operands[0], sym, other));
4755 else
4756 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4757 return;
4758 }
4759
a9098fd0 4760 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4761
f676971a 4762 if (TARGET_TOC
4d588c14 4763 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4764 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4765 get_pool_constant (XEXP (operands[1], 0)),
4766 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4767 {
ba4828e0 4768 operands[1]
542a8afa 4769 = gen_const_mem (mode,
c4ad648e 4770 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4771 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4772 }
fb4d4348
GK
4773 }
4774 break;
a9098fd0 4775
fb4d4348 4776 case TImode:
76d2b81d
DJ
4777 rs6000_eliminate_indexed_memrefs (operands);
4778
27dc0551
DE
4779 if (TARGET_POWER)
4780 {
4781 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4782 gen_rtvec (2,
4783 gen_rtx_SET (VOIDmode,
4784 operands[0], operands[1]),
4785 gen_rtx_CLOBBER (VOIDmode,
4786 gen_rtx_SCRATCH (SImode)))));
4787 return;
4788 }
fb4d4348
GK
4789 break;
4790
4791 default:
37409796 4792 gcc_unreachable ();
fb4d4348
GK
4793 }
4794
a9098fd0
GK
4795 /* Above, we may have called force_const_mem which may have returned
4796 an invalid address. If we can, fix this up; otherwise, reload will
4797 have to deal with it. */
8f4e6caf
RH
4798 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4799 operands[1] = validize_mem (operands[1]);
a9098fd0 4800
8f4e6caf 4801 emit_set:
fb4d4348
GK
4802 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4803}
4697a36c 4804\f
2858f73a
GK
4805/* Nonzero if we can use a floating-point register to pass this arg. */
4806#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4807 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4808 && (MODE) != SDmode \
2858f73a
GK
4809 && (CUM)->fregno <= FP_ARG_MAX_REG \
4810 && TARGET_HARD_FLOAT && TARGET_FPRS)
4811
4812/* Nonzero if we can use an AltiVec register to pass this arg. */
4813#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4814 (ALTIVEC_VECTOR_MODE (MODE) \
4815 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4816 && TARGET_ALTIVEC_ABI \
83953138 4817 && (NAMED))
2858f73a 4818
c6e8c921
GK
4819/* Return a nonzero value to say to return the function value in
4820 memory, just as large structures are always returned. TYPE will be
4821 the data type of the value, and FNTYPE will be the type of the
4822 function doing the returning, or @code{NULL} for libcalls.
4823
4824 The AIX ABI for the RS/6000 specifies that all structures are
4825 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4826 specifies that structures <= 8 bytes are returned in r3/r4, but a
4827 draft put them in memory, and GCC used to implement the draft
df01da37 4828 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4829 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4830 compatibility can change DRAFT_V4_STRUCT_RET to override the
4831 default, and -m switches get the final word. See
4832 rs6000_override_options for more details.
4833
4834 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4835 long double support is enabled. These values are returned in memory.
4836
4837 int_size_in_bytes returns -1 for variable size objects, which go in
4838 memory always. The cast to unsigned makes -1 > 8. */
4839
4840static bool
586de218 4841rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 4842{
594a51fe
SS
4843 /* In the darwin64 abi, try to use registers for larger structs
4844 if possible. */
0b5383eb 4845 if (rs6000_darwin64_abi
594a51fe 4846 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4847 && int_size_in_bytes (type) > 0)
4848 {
4849 CUMULATIVE_ARGS valcum;
4850 rtx valret;
4851
4852 valcum.words = 0;
4853 valcum.fregno = FP_ARG_MIN_REG;
4854 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4855 /* Do a trial code generation as if this were going to be passed
4856 as an argument; if any part goes in memory, we return NULL. */
4857 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4858 if (valret)
4859 return false;
4860 /* Otherwise fall through to more conventional ABI rules. */
4861 }
594a51fe 4862
c6e8c921 4863 if (AGGREGATE_TYPE_P (type)
df01da37 4864 && (aix_struct_return
c6e8c921
GK
4865 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4866 return true;
b693336b 4867
bada2eb8
DE
4868 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4869 modes only exist for GCC vector types if -maltivec. */
4870 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4871 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4872 return false;
4873
b693336b
PB
4874 /* Return synthetic vectors in memory. */
4875 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4876 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4877 {
4878 static bool warned_for_return_big_vectors = false;
4879 if (!warned_for_return_big_vectors)
4880 {
d4ee4d25 4881 warning (0, "GCC vector returned by reference: "
b693336b
PB
4882 "non-standard ABI extension with no compatibility guarantee");
4883 warned_for_return_big_vectors = true;
4884 }
4885 return true;
4886 }
4887
602ea4d3 4888 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 4889 return true;
ad630bef 4890
c6e8c921
GK
4891 return false;
4892}
4893
4697a36c
MM
4894/* Initialize a variable CUM of type CUMULATIVE_ARGS
4895 for a call to a function whose data type is FNTYPE.
4896 For a library call, FNTYPE is 0.
4897
4898 For incoming args we set the number of arguments in the prototype large
1c20ae99 4899 so we never return a PARALLEL. */
4697a36c
MM
4900
4901void
f676971a 4902init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4903 rtx libname ATTRIBUTE_UNUSED, int incoming,
4904 int libcall, int n_named_args)
4697a36c
MM
4905{
4906 static CUMULATIVE_ARGS zero_cumulative;
4907
4908 *cum = zero_cumulative;
4909 cum->words = 0;
4910 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4911 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4912 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4913 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4914 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4915 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4916 cum->stdarg = fntype
4917 && (TYPE_ARG_TYPES (fntype) != 0
4918 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4919 != void_type_node));
4697a36c 4920
0f6937fe
AM
4921 cum->nargs_prototype = 0;
4922 if (incoming || cum->prototype)
4923 cum->nargs_prototype = n_named_args;
4697a36c 4924
a5c76ee6 4925 /* Check for a longcall attribute. */
3eb4e360
AM
4926 if ((!fntype && rs6000_default_long_calls)
4927 || (fntype
4928 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4929 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4930 cum->call_cookie |= CALL_LONG;
6a4cee5f 4931
4697a36c
MM
4932 if (TARGET_DEBUG_ARG)
4933 {
4934 fprintf (stderr, "\ninit_cumulative_args:");
4935 if (fntype)
4936 {
4937 tree ret_type = TREE_TYPE (fntype);
4938 fprintf (stderr, " ret code = %s,",
4939 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4940 }
4941
6a4cee5f
MM
4942 if (cum->call_cookie & CALL_LONG)
4943 fprintf (stderr, " longcall,");
4944
4697a36c
MM
4945 fprintf (stderr, " proto = %d, nargs = %d\n",
4946 cum->prototype, cum->nargs_prototype);
4947 }
f676971a 4948
c4ad648e
AM
4949 if (fntype
4950 && !TARGET_ALTIVEC
4951 && TARGET_ALTIVEC_ABI
4952 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4953 {
c85ce869 4954 error ("cannot return value in vector register because"
c4ad648e 4955 " altivec instructions are disabled, use -maltivec"
c85ce869 4956 " to enable them");
c4ad648e 4957 }
4697a36c
MM
4958}
4959\f
fe984136
RH
4960/* Return true if TYPE must be passed on the stack and not in registers. */
4961
4962static bool
586de218 4963rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
4964{
4965 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4966 return must_pass_in_stack_var_size (mode, type);
4967 else
4968 return must_pass_in_stack_var_size_or_pad (mode, type);
4969}
4970
c229cba9
DE
4971/* If defined, a C expression which determines whether, and in which
4972 direction, to pad out an argument with extra space. The value
4973 should be of type `enum direction': either `upward' to pad above
4974 the argument, `downward' to pad below, or `none' to inhibit
4975 padding.
4976
4977 For the AIX ABI structs are always stored left shifted in their
4978 argument slot. */
4979
9ebbca7d 4980enum direction
586de218 4981function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 4982{
6e985040
AM
4983#ifndef AGGREGATE_PADDING_FIXED
4984#define AGGREGATE_PADDING_FIXED 0
4985#endif
4986#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4987#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4988#endif
4989
4990 if (!AGGREGATE_PADDING_FIXED)
4991 {
4992 /* GCC used to pass structures of the same size as integer types as
4993 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4994 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4995 passed padded downward, except that -mstrict-align further
4996 muddied the water in that multi-component structures of 2 and 4
4997 bytes in size were passed padded upward.
4998
4999 The following arranges for best compatibility with previous
5000 versions of gcc, but removes the -mstrict-align dependency. */
5001 if (BYTES_BIG_ENDIAN)
5002 {
5003 HOST_WIDE_INT size = 0;
5004
5005 if (mode == BLKmode)
5006 {
5007 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5008 size = int_size_in_bytes (type);
5009 }
5010 else
5011 size = GET_MODE_SIZE (mode);
5012
5013 if (size == 1 || size == 2 || size == 4)
5014 return downward;
5015 }
5016 return upward;
5017 }
5018
5019 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5020 {
5021 if (type != 0 && AGGREGATE_TYPE_P (type))
5022 return upward;
5023 }
c229cba9 5024
d3704c46
KH
5025 /* Fall back to the default. */
5026 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5027}
5028
b6c9286a 5029/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5030 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5031 PARM_BOUNDARY is used for all arguments.
f676971a 5032
84e9ad15
AM
5033 V.4 wants long longs and doubles to be double word aligned. Just
5034 testing the mode size is a boneheaded way to do this as it means
5035 that other types such as complex int are also double word aligned.
5036 However, we're stuck with this because changing the ABI might break
5037 existing library interfaces.
5038
b693336b
PB
5039 Doubleword align SPE vectors.
5040 Quadword align Altivec vectors.
5041 Quadword align large synthetic vector types. */
b6c9286a
MM
5042
5043int
b693336b 5044function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5045{
84e9ad15
AM
5046 if (DEFAULT_ABI == ABI_V4
5047 && (GET_MODE_SIZE (mode) == 8
5048 || (TARGET_HARD_FLOAT
5049 && TARGET_FPRS
7393f7f8 5050 && (mode == TFmode || mode == TDmode))))
4ed78545 5051 return 64;
ad630bef
DE
5052 else if (SPE_VECTOR_MODE (mode)
5053 || (type && TREE_CODE (type) == VECTOR_TYPE
5054 && int_size_in_bytes (type) >= 8
5055 && int_size_in_bytes (type) < 16))
e1f83b4d 5056 return 64;
ad630bef
DE
5057 else if (ALTIVEC_VECTOR_MODE (mode)
5058 || (type && TREE_CODE (type) == VECTOR_TYPE
5059 && int_size_in_bytes (type) >= 16))
0ac081f6 5060 return 128;
0b5383eb
DJ
5061 else if (rs6000_darwin64_abi && mode == BLKmode
5062 && type && TYPE_ALIGN (type) > 64)
5063 return 128;
9ebbca7d 5064 else
b6c9286a 5065 return PARM_BOUNDARY;
b6c9286a 5066}
c53bdcf5 5067
294bd182
AM
5068/* For a function parm of MODE and TYPE, return the starting word in
5069 the parameter area. NWORDS of the parameter area are already used. */
5070
5071static unsigned int
5072rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5073{
5074 unsigned int align;
5075 unsigned int parm_offset;
5076
5077 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5078 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5079 return nwords + (-(parm_offset + nwords) & align);
5080}
5081
c53bdcf5
AM
5082/* Compute the size (in words) of a function argument. */
5083
5084static unsigned long
5085rs6000_arg_size (enum machine_mode mode, tree type)
5086{
5087 unsigned long size;
5088
5089 if (mode != BLKmode)
5090 size = GET_MODE_SIZE (mode);
5091 else
5092 size = int_size_in_bytes (type);
5093
5094 if (TARGET_32BIT)
5095 return (size + 3) >> 2;
5096 else
5097 return (size + 7) >> 3;
5098}
b6c9286a 5099\f
0b5383eb 5100/* Use this to flush pending int fields. */
594a51fe
SS
5101
5102static void
0b5383eb
DJ
5103rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5104 HOST_WIDE_INT bitpos)
594a51fe 5105{
0b5383eb
DJ
5106 unsigned int startbit, endbit;
5107 int intregs, intoffset;
5108 enum machine_mode mode;
594a51fe 5109
0b5383eb
DJ
5110 if (cum->intoffset == -1)
5111 return;
594a51fe 5112
0b5383eb
DJ
5113 intoffset = cum->intoffset;
5114 cum->intoffset = -1;
5115
5116 if (intoffset % BITS_PER_WORD != 0)
5117 {
5118 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5119 MODE_INT, 0);
5120 if (mode == BLKmode)
594a51fe 5121 {
0b5383eb
DJ
5122 /* We couldn't find an appropriate mode, which happens,
5123 e.g., in packed structs when there are 3 bytes to load.
5124 Back intoffset back to the beginning of the word in this
5125 case. */
5126 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5127 }
594a51fe 5128 }
0b5383eb
DJ
5129
5130 startbit = intoffset & -BITS_PER_WORD;
5131 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5132 intregs = (endbit - startbit) / BITS_PER_WORD;
5133 cum->words += intregs;
5134}
5135
5136/* The darwin64 ABI calls for us to recurse down through structs,
5137 looking for elements passed in registers. Unfortunately, we have
5138 to track int register count here also because of misalignments
5139 in powerpc alignment mode. */
5140
5141static void
5142rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5143 tree type,
5144 HOST_WIDE_INT startbitpos)
5145{
5146 tree f;
5147
5148 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5149 if (TREE_CODE (f) == FIELD_DECL)
5150 {
5151 HOST_WIDE_INT bitpos = startbitpos;
5152 tree ftype = TREE_TYPE (f);
70fb00df
AP
5153 enum machine_mode mode;
5154 if (ftype == error_mark_node)
5155 continue;
5156 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5157
5158 if (DECL_SIZE (f) != 0
5159 && host_integerp (bit_position (f), 1))
5160 bitpos += int_bit_position (f);
5161
5162 /* ??? FIXME: else assume zero offset. */
5163
5164 if (TREE_CODE (ftype) == RECORD_TYPE)
5165 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5166 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5167 {
5168 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5169 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5170 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5171 }
5172 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5173 {
5174 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5175 cum->vregno++;
5176 cum->words += 2;
5177 }
5178 else if (cum->intoffset == -1)
5179 cum->intoffset = bitpos;
5180 }
594a51fe
SS
5181}
5182
4697a36c
MM
5183/* Update the data in CUM to advance over an argument
5184 of mode MODE and data type TYPE.
b2d04ecf
AM
5185 (TYPE is null for libcalls where that information may not be available.)
5186
5187 Note that for args passed by reference, function_arg will be called
5188 with MODE and TYPE set to that of the pointer to the arg, not the arg
5189 itself. */
4697a36c
MM
5190
5191void
f676971a 5192function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5193 tree type, int named, int depth)
4697a36c 5194{
0b5383eb
DJ
5195 int size;
5196
594a51fe
SS
5197 /* Only tick off an argument if we're not recursing. */
5198 if (depth == 0)
5199 cum->nargs_prototype--;
4697a36c 5200
ad630bef
DE
5201 if (TARGET_ALTIVEC_ABI
5202 && (ALTIVEC_VECTOR_MODE (mode)
5203 || (type && TREE_CODE (type) == VECTOR_TYPE
5204 && int_size_in_bytes (type) == 16)))
0ac081f6 5205 {
4ed78545
AM
5206 bool stack = false;
5207
2858f73a 5208 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5209 {
6d0ef01e
HP
5210 cum->vregno++;
5211 if (!TARGET_ALTIVEC)
c85ce869 5212 error ("cannot pass argument in vector register because"
6d0ef01e 5213 " altivec instructions are disabled, use -maltivec"
c85ce869 5214 " to enable them");
4ed78545
AM
5215
5216 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5217 even if it is going to be passed in a vector register.
4ed78545
AM
5218 Darwin does the same for variable-argument functions. */
5219 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5220 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5221 stack = true;
6d0ef01e 5222 }
4ed78545
AM
5223 else
5224 stack = true;
5225
5226 if (stack)
c4ad648e 5227 {
a594a19c 5228 int align;
f676971a 5229
2858f73a
GK
5230 /* Vector parameters must be 16-byte aligned. This places
5231 them at 2 mod 4 in terms of words in 32-bit mode, since
5232 the parameter save area starts at offset 24 from the
5233 stack. In 64-bit mode, they just have to start on an
5234 even word, since the parameter save area is 16-byte
5235 aligned. Space for GPRs is reserved even if the argument
5236 will be passed in memory. */
5237 if (TARGET_32BIT)
4ed78545 5238 align = (2 - cum->words) & 3;
2858f73a
GK
5239 else
5240 align = cum->words & 1;
c53bdcf5 5241 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5242
a594a19c
GK
5243 if (TARGET_DEBUG_ARG)
5244 {
f676971a 5245 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5246 cum->words, align);
5247 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5248 cum->nargs_prototype, cum->prototype,
2858f73a 5249 GET_MODE_NAME (mode));
a594a19c
GK
5250 }
5251 }
0ac081f6 5252 }
a4b0320c 5253 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5254 && !cum->stdarg
5255 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5256 cum->sysv_gregno++;
594a51fe
SS
5257
5258 else if (rs6000_darwin64_abi
5259 && mode == BLKmode
0b5383eb
DJ
5260 && TREE_CODE (type) == RECORD_TYPE
5261 && (size = int_size_in_bytes (type)) > 0)
5262 {
5263 /* Variable sized types have size == -1 and are
5264 treated as if consisting entirely of ints.
5265 Pad to 16 byte boundary if needed. */
5266 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5267 && (cum->words % 2) != 0)
5268 cum->words++;
5269 /* For varargs, we can just go up by the size of the struct. */
5270 if (!named)
5271 cum->words += (size + 7) / 8;
5272 else
5273 {
5274 /* It is tempting to say int register count just goes up by
5275 sizeof(type)/8, but this is wrong in a case such as
5276 { int; double; int; } [powerpc alignment]. We have to
5277 grovel through the fields for these too. */
5278 cum->intoffset = 0;
5279 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5280 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5281 size * BITS_PER_UNIT);
5282 }
5283 }
f607bc57 5284 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5285 {
a3170dc6 5286 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5287 && (mode == SFmode || mode == DFmode
7393f7f8 5288 || mode == DDmode || mode == TDmode
602ea4d3 5289 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5290 {
2d83f070
JJ
5291 /* _Decimal128 must use an even/odd register pair. This assumes
5292 that the register number is odd when fregno is odd. */
5293 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5294 cum->fregno++;
5295
5296 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5297 <= FP_ARG_V4_MAX_REG)
602ea4d3 5298 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5299 else
5300 {
602ea4d3 5301 cum->fregno = FP_ARG_V4_MAX_REG + 1;
7393f7f8 5302 if (mode == DFmode || mode == TFmode || mode == DDmode || mode == TDmode)
c4ad648e 5303 cum->words += cum->words & 1;
c53bdcf5 5304 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5305 }
4697a36c 5306 }
4cc833b7
RH
5307 else
5308 {
b2d04ecf 5309 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5310 int gregno = cum->sysv_gregno;
5311
4ed78545
AM
5312 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5313 (r7,r8) or (r9,r10). As does any other 2 word item such
5314 as complex int due to a historical mistake. */
5315 if (n_words == 2)
5316 gregno += (1 - gregno) & 1;
4cc833b7 5317
4ed78545 5318 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5319 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5320 {
4ed78545
AM
5321 /* Long long and SPE vectors are aligned on the stack.
5322 So are other 2 word items such as complex int due to
5323 a historical mistake. */
4cc833b7
RH
5324 if (n_words == 2)
5325 cum->words += cum->words & 1;
5326 cum->words += n_words;
5327 }
4697a36c 5328
4cc833b7
RH
5329 /* Note: continuing to accumulate gregno past when we've started
5330 spilling to the stack indicates the fact that we've started
5331 spilling to the stack to expand_builtin_saveregs. */
5332 cum->sysv_gregno = gregno + n_words;
5333 }
4697a36c 5334
4cc833b7
RH
5335 if (TARGET_DEBUG_ARG)
5336 {
5337 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5338 cum->words, cum->fregno);
5339 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5340 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5341 fprintf (stderr, "mode = %4s, named = %d\n",
5342 GET_MODE_NAME (mode), named);
5343 }
4697a36c
MM
5344 }
5345 else
4cc833b7 5346 {
b2d04ecf 5347 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5348 int start_words = cum->words;
5349 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5350
294bd182 5351 cum->words = align_words + n_words;
4697a36c 5352
ebb109ad 5353 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5354 && mode != SDmode
a3170dc6 5355 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5356 {
5357 /* _Decimal128 must be passed in an even/odd float register pair.
5358 This assumes that the register number is odd when fregno is
5359 odd. */
5360 if (mode == TDmode && (cum->fregno % 2) == 1)
5361 cum->fregno++;
5362 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5363 }
4cc833b7
RH
5364
5365 if (TARGET_DEBUG_ARG)
5366 {
5367 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5368 cum->words, cum->fregno);
5369 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5370 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5371 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5372 named, align_words - start_words, depth);
4cc833b7
RH
5373 }
5374 }
4697a36c 5375}
a6c9bed4 5376
f82f556d
AH
5377static rtx
5378spe_build_register_parallel (enum machine_mode mode, int gregno)
5379{
17caeff2 5380 rtx r1, r3, r5, r7;
f82f556d 5381
37409796 5382 switch (mode)
f82f556d 5383 {
37409796 5384 case DFmode:
54b695e7
AH
5385 r1 = gen_rtx_REG (DImode, gregno);
5386 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5387 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5388
5389 case DCmode:
17caeff2 5390 case TFmode:
54b695e7
AH
5391 r1 = gen_rtx_REG (DImode, gregno);
5392 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5393 r3 = gen_rtx_REG (DImode, gregno + 2);
5394 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5395 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5396
17caeff2
JM
5397 case TCmode:
5398 r1 = gen_rtx_REG (DImode, gregno);
5399 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5400 r3 = gen_rtx_REG (DImode, gregno + 2);
5401 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5402 r5 = gen_rtx_REG (DImode, gregno + 4);
5403 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5404 r7 = gen_rtx_REG (DImode, gregno + 6);
5405 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5406 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5407
37409796
NS
5408 default:
5409 gcc_unreachable ();
f82f556d 5410 }
f82f556d 5411}
b78d48dd 5412
f82f556d 5413/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5414static rtx
f676971a 5415rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5416 tree type)
a6c9bed4 5417{
f82f556d
AH
5418 int gregno = cum->sysv_gregno;
5419
5420 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5421 are passed and returned in a pair of GPRs for ABI compatibility. */
17caeff2
JM
5422 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode
5423 || mode == TFmode || mode == TCmode))
f82f556d 5424 {
b5870bee
AH
5425 int n_words = rs6000_arg_size (mode, type);
5426
f82f556d 5427 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
5428 if (mode == DFmode)
5429 gregno += (1 - gregno) & 1;
f82f556d 5430
b5870bee
AH
5431 /* Multi-reg args are not split between registers and stack. */
5432 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5433 return NULL_RTX;
5434
5435 return spe_build_register_parallel (mode, gregno);
5436 }
a6c9bed4
AH
5437 if (cum->stdarg)
5438 {
c53bdcf5 5439 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5440
5441 /* SPE vectors are put in odd registers. */
5442 if (n_words == 2 && (gregno & 1) == 0)
5443 gregno += 1;
5444
5445 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5446 {
5447 rtx r1, r2;
5448 enum machine_mode m = SImode;
5449
5450 r1 = gen_rtx_REG (m, gregno);
5451 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5452 r2 = gen_rtx_REG (m, gregno + 1);
5453 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5454 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5455 }
5456 else
b78d48dd 5457 return NULL_RTX;
a6c9bed4
AH
5458 }
5459 else
5460 {
f82f556d
AH
5461 if (gregno <= GP_ARG_MAX_REG)
5462 return gen_rtx_REG (mode, gregno);
a6c9bed4 5463 else
b78d48dd 5464 return NULL_RTX;
a6c9bed4
AH
5465 }
5466}
5467
0b5383eb
DJ
5468/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5469 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5470
0b5383eb 5471static void
bb8df8a6 5472rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5473 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5474{
0b5383eb
DJ
5475 enum machine_mode mode;
5476 unsigned int regno;
5477 unsigned int startbit, endbit;
5478 int this_regno, intregs, intoffset;
5479 rtx reg;
594a51fe 5480
0b5383eb
DJ
5481 if (cum->intoffset == -1)
5482 return;
5483
5484 intoffset = cum->intoffset;
5485 cum->intoffset = -1;
5486
5487 /* If this is the trailing part of a word, try to only load that
5488 much into the register. Otherwise load the whole register. Note
5489 that in the latter case we may pick up unwanted bits. It's not a
5490 problem at the moment but may wish to revisit. */
5491
5492 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5493 {
0b5383eb
DJ
5494 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5495 MODE_INT, 0);
5496 if (mode == BLKmode)
5497 {
5498 /* We couldn't find an appropriate mode, which happens,
5499 e.g., in packed structs when there are 3 bytes to load.
5500 Back intoffset back to the beginning of the word in this
5501 case. */
5502 intoffset = intoffset & -BITS_PER_WORD;
5503 mode = word_mode;
5504 }
5505 }
5506 else
5507 mode = word_mode;
5508
5509 startbit = intoffset & -BITS_PER_WORD;
5510 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5511 intregs = (endbit - startbit) / BITS_PER_WORD;
5512 this_regno = cum->words + intoffset / BITS_PER_WORD;
5513
5514 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5515 cum->use_stack = 1;
bb8df8a6 5516
0b5383eb
DJ
5517 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5518 if (intregs <= 0)
5519 return;
5520
5521 intoffset /= BITS_PER_UNIT;
5522 do
5523 {
5524 regno = GP_ARG_MIN_REG + this_regno;
5525 reg = gen_rtx_REG (mode, regno);
5526 rvec[(*k)++] =
5527 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5528
5529 this_regno += 1;
5530 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5531 mode = word_mode;
5532 intregs -= 1;
5533 }
5534 while (intregs > 0);
5535}
5536
5537/* Recursive workhorse for the following. */
5538
5539static void
586de218 5540rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5541 HOST_WIDE_INT startbitpos, rtx rvec[],
5542 int *k)
5543{
5544 tree f;
5545
5546 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5547 if (TREE_CODE (f) == FIELD_DECL)
5548 {
5549 HOST_WIDE_INT bitpos = startbitpos;
5550 tree ftype = TREE_TYPE (f);
70fb00df
AP
5551 enum machine_mode mode;
5552 if (ftype == error_mark_node)
5553 continue;
5554 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5555
5556 if (DECL_SIZE (f) != 0
5557 && host_integerp (bit_position (f), 1))
5558 bitpos += int_bit_position (f);
5559
5560 /* ??? FIXME: else assume zero offset. */
5561
5562 if (TREE_CODE (ftype) == RECORD_TYPE)
5563 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5564 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5565 {
0b5383eb
DJ
5566#if 0
5567 switch (mode)
594a51fe 5568 {
0b5383eb
DJ
5569 case SCmode: mode = SFmode; break;
5570 case DCmode: mode = DFmode; break;
5571 case TCmode: mode = TFmode; break;
5572 default: break;
594a51fe 5573 }
0b5383eb
DJ
5574#endif
5575 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5576 rvec[(*k)++]
bb8df8a6 5577 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5578 gen_rtx_REG (mode, cum->fregno++),
5579 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5580 if (mode == TFmode || mode == TDmode)
0b5383eb 5581 cum->fregno++;
594a51fe 5582 }
0b5383eb
DJ
5583 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5584 {
5585 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5586 rvec[(*k)++]
bb8df8a6
EC
5587 = gen_rtx_EXPR_LIST (VOIDmode,
5588 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5589 GEN_INT (bitpos / BITS_PER_UNIT));
5590 }
5591 else if (cum->intoffset == -1)
5592 cum->intoffset = bitpos;
5593 }
5594}
594a51fe 5595
0b5383eb
DJ
5596/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5597 the register(s) to be used for each field and subfield of a struct
5598 being passed by value, along with the offset of where the
5599 register's value may be found in the block. FP fields go in FP
5600 register, vector fields go in vector registers, and everything
bb8df8a6 5601 else goes in int registers, packed as in memory.
8ff40a74 5602
0b5383eb
DJ
5603 This code is also used for function return values. RETVAL indicates
5604 whether this is the case.
8ff40a74 5605
a4d05547 5606 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5607 calling convention. */
594a51fe 5608
0b5383eb 5609static rtx
586de218 5610rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5611 int named, bool retval)
5612{
5613 rtx rvec[FIRST_PSEUDO_REGISTER];
5614 int k = 1, kbase = 1;
5615 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5616 /* This is a copy; modifications are not visible to our caller. */
5617 CUMULATIVE_ARGS copy_cum = *orig_cum;
5618 CUMULATIVE_ARGS *cum = &copy_cum;
5619
5620 /* Pad to 16 byte boundary if needed. */
5621 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5622 && (cum->words % 2) != 0)
5623 cum->words++;
5624
5625 cum->intoffset = 0;
5626 cum->use_stack = 0;
5627 cum->named = named;
5628
5629 /* Put entries into rvec[] for individual FP and vector fields, and
5630 for the chunks of memory that go in int regs. Note we start at
5631 element 1; 0 is reserved for an indication of using memory, and
5632 may or may not be filled in below. */
5633 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5634 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5635
5636 /* If any part of the struct went on the stack put all of it there.
5637 This hack is because the generic code for
5638 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5639 parts of the struct are not at the beginning. */
5640 if (cum->use_stack)
5641 {
5642 if (retval)
5643 return NULL_RTX; /* doesn't go in registers at all */
5644 kbase = 0;
5645 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5646 }
5647 if (k > 1 || cum->use_stack)
5648 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5649 else
5650 return NULL_RTX;
5651}
5652
b78d48dd
FJ
5653/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5654
5655static rtx
ec6376ab 5656rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5657{
ec6376ab
AM
5658 int n_units;
5659 int i, k;
5660 rtx rvec[GP_ARG_NUM_REG + 1];
5661
5662 if (align_words >= GP_ARG_NUM_REG)
5663 return NULL_RTX;
5664
5665 n_units = rs6000_arg_size (mode, type);
5666
5667 /* Optimize the simple case where the arg fits in one gpr, except in
5668 the case of BLKmode due to assign_parms assuming that registers are
5669 BITS_PER_WORD wide. */
5670 if (n_units == 0
5671 || (n_units == 1 && mode != BLKmode))
5672 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5673
5674 k = 0;
5675 if (align_words + n_units > GP_ARG_NUM_REG)
5676 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5677 using a magic NULL_RTX component.
79773478
AM
5678 This is not strictly correct. Only some of the arg belongs in
5679 memory, not all of it. However, the normal scheme using
5680 function_arg_partial_nregs can result in unusual subregs, eg.
5681 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5682 store the whole arg to memory is often more efficient than code
5683 to store pieces, and we know that space is available in the right
5684 place for the whole arg. */
ec6376ab
AM
5685 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5686
5687 i = 0;
5688 do
36a454e1 5689 {
ec6376ab
AM
5690 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5691 rtx off = GEN_INT (i++ * 4);
5692 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5693 }
ec6376ab
AM
5694 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5695
5696 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5697}
5698
4697a36c
MM
5699/* Determine where to put an argument to a function.
5700 Value is zero to push the argument on the stack,
5701 or a hard register in which to store the argument.
5702
5703 MODE is the argument's machine mode.
5704 TYPE is the data type of the argument (as a tree).
5705 This is null for libcalls where that information may
5706 not be available.
5707 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5708 the preceding args and about the function being called. It is
5709 not modified in this routine.
4697a36c
MM
5710 NAMED is nonzero if this argument is a named parameter
5711 (otherwise it is an extra parameter matching an ellipsis).
5712
5713 On RS/6000 the first eight words of non-FP are normally in registers
5714 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5715 Under V.4, the first 8 FP args are in registers.
5716
5717 If this is floating-point and no prototype is specified, we use
5718 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5719 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5720 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5721 doesn't support PARALLEL anyway.
5722
5723 Note that for args passed by reference, function_arg will be called
5724 with MODE and TYPE set to that of the pointer to the arg, not the arg
5725 itself. */
4697a36c 5726
9390387d 5727rtx
f676971a 5728function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5729 tree type, int named)
4697a36c 5730{
4cc833b7 5731 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5732
a4f6c312
SS
5733 /* Return a marker to indicate whether CR1 needs to set or clear the
5734 bit that V.4 uses to say fp args were passed in registers.
5735 Assume that we don't need the marker for software floating point,
5736 or compiler generated library calls. */
4697a36c
MM
5737 if (mode == VOIDmode)
5738 {
f607bc57 5739 if (abi == ABI_V4
b9599e46 5740 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5741 && (cum->stdarg
5742 || (cum->nargs_prototype < 0
5743 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5744 {
a3170dc6
AH
5745 /* For the SPE, we need to crxor CR6 always. */
5746 if (TARGET_SPE_ABI)
5747 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5748 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5749 return GEN_INT (cum->call_cookie
5750 | ((cum->fregno == FP_ARG_MIN_REG)
5751 ? CALL_V4_SET_FP_ARGS
5752 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5753 }
4697a36c 5754
7509c759 5755 return GEN_INT (cum->call_cookie);
4697a36c
MM
5756 }
5757
0b5383eb
DJ
5758 if (rs6000_darwin64_abi && mode == BLKmode
5759 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5760 {
0b5383eb 5761 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5762 if (rslt != NULL_RTX)
5763 return rslt;
5764 /* Else fall through to usual handling. */
5765 }
5766
2858f73a 5767 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5768 if (TARGET_64BIT && ! cum->prototype)
5769 {
c4ad648e
AM
5770 /* Vector parameters get passed in vector register
5771 and also in GPRs or memory, in absence of prototype. */
5772 int align_words;
5773 rtx slot;
5774 align_words = (cum->words + 1) & ~1;
5775
5776 if (align_words >= GP_ARG_NUM_REG)
5777 {
5778 slot = NULL_RTX;
5779 }
5780 else
5781 {
5782 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5783 }
5784 return gen_rtx_PARALLEL (mode,
5785 gen_rtvec (2,
5786 gen_rtx_EXPR_LIST (VOIDmode,
5787 slot, const0_rtx),
5788 gen_rtx_EXPR_LIST (VOIDmode,
5789 gen_rtx_REG (mode, cum->vregno),
5790 const0_rtx)));
c72d6c26
HP
5791 }
5792 else
5793 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5794 else if (TARGET_ALTIVEC_ABI
5795 && (ALTIVEC_VECTOR_MODE (mode)
5796 || (type && TREE_CODE (type) == VECTOR_TYPE
5797 && int_size_in_bytes (type) == 16)))
0ac081f6 5798 {
2858f73a 5799 if (named || abi == ABI_V4)
a594a19c 5800 return NULL_RTX;
0ac081f6 5801 else
a594a19c
GK
5802 {
5803 /* Vector parameters to varargs functions under AIX or Darwin
5804 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5805 int align, align_words, n_words;
5806 enum machine_mode part_mode;
a594a19c
GK
5807
5808 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5809 2 mod 4 in terms of words in 32-bit mode, since the parameter
5810 save area starts at offset 24 from the stack. In 64-bit mode,
5811 they just have to start on an even word, since the parameter
5812 save area is 16-byte aligned. */
5813 if (TARGET_32BIT)
4ed78545 5814 align = (2 - cum->words) & 3;
2858f73a
GK
5815 else
5816 align = cum->words & 1;
a594a19c
GK
5817 align_words = cum->words + align;
5818
5819 /* Out of registers? Memory, then. */
5820 if (align_words >= GP_ARG_NUM_REG)
5821 return NULL_RTX;
ec6376ab
AM
5822
5823 if (TARGET_32BIT && TARGET_POWERPC64)
5824 return rs6000_mixed_function_arg (mode, type, align_words);
5825
2858f73a
GK
5826 /* The vector value goes in GPRs. Only the part of the
5827 value in GPRs is reported here. */
ec6376ab
AM
5828 part_mode = mode;
5829 n_words = rs6000_arg_size (mode, type);
5830 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5831 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5832 is either wholly in GPRs or half in GPRs and half not. */
5833 part_mode = DImode;
ec6376ab
AM
5834
5835 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5836 }
0ac081f6 5837 }
f82f556d
AH
5838 else if (TARGET_SPE_ABI && TARGET_SPE
5839 && (SPE_VECTOR_MODE (mode)
18f63bfa 5840 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5841 || mode == DDmode
17caeff2
JM
5842 || mode == DCmode
5843 || mode == TFmode
7393f7f8 5844 || mode == TDmode
17caeff2 5845 || mode == TCmode))))
a6c9bed4 5846 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5847
f607bc57 5848 else if (abi == ABI_V4)
4697a36c 5849 {
a3170dc6 5850 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5851 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5852 || (mode == TFmode && !TARGET_IEEEQUAD)
5853 || mode == DDmode || mode == TDmode))
4cc833b7 5854 {
2d83f070
JJ
5855 /* _Decimal128 must use an even/odd register pair. This assumes
5856 that the register number is odd when fregno is odd. */
5857 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5858 cum->fregno++;
5859
5860 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5861 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5862 return gen_rtx_REG (mode, cum->fregno);
5863 else
b78d48dd 5864 return NULL_RTX;
4cc833b7
RH
5865 }
5866 else
5867 {
b2d04ecf 5868 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5869 int gregno = cum->sysv_gregno;
5870
4ed78545
AM
5871 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5872 (r7,r8) or (r9,r10). As does any other 2 word item such
5873 as complex int due to a historical mistake. */
5874 if (n_words == 2)
5875 gregno += (1 - gregno) & 1;
4cc833b7 5876
4ed78545 5877 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5878 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5879 return NULL_RTX;
ec6376ab
AM
5880
5881 if (TARGET_32BIT && TARGET_POWERPC64)
5882 return rs6000_mixed_function_arg (mode, type,
5883 gregno - GP_ARG_MIN_REG);
5884 return gen_rtx_REG (mode, gregno);
4cc833b7 5885 }
4697a36c 5886 }
4cc833b7
RH
5887 else
5888 {
294bd182 5889 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5890
2d83f070
JJ
5891 /* _Decimal128 must be passed in an even/odd float register pair.
5892 This assumes that the register number is odd when fregno is odd. */
5893 if (mode == TDmode && (cum->fregno % 2) == 1)
5894 cum->fregno++;
5895
2858f73a 5896 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5897 {
ec6376ab
AM
5898 rtx rvec[GP_ARG_NUM_REG + 1];
5899 rtx r;
5900 int k;
c53bdcf5
AM
5901 bool needs_psave;
5902 enum machine_mode fmode = mode;
c53bdcf5
AM
5903 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5904
5905 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5906 {
c53bdcf5
AM
5907 /* Currently, we only ever need one reg here because complex
5908 doubles are split. */
7393f7f8
BE
5909 gcc_assert (cum->fregno == FP_ARG_MAX_REG
5910 && (fmode == TFmode || fmode == TDmode));
ec6376ab 5911
7393f7f8
BE
5912 /* Long double or _Decimal128 split over regs and memory. */
5913 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 5914 }
c53bdcf5
AM
5915
5916 /* Do we also need to pass this arg in the parameter save
5917 area? */
5918 needs_psave = (type
5919 && (cum->nargs_prototype <= 0
5920 || (DEFAULT_ABI == ABI_AIX
de17c25f 5921 && TARGET_XL_COMPAT
c53bdcf5
AM
5922 && align_words >= GP_ARG_NUM_REG)));
5923
5924 if (!needs_psave && mode == fmode)
ec6376ab 5925 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5926
ec6376ab 5927 k = 0;
c53bdcf5
AM
5928 if (needs_psave)
5929 {
ec6376ab 5930 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5931 This piece must come first, before the fprs. */
c53bdcf5
AM
5932 if (align_words < GP_ARG_NUM_REG)
5933 {
5934 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5935
5936 if (align_words + n_words > GP_ARG_NUM_REG
5937 || (TARGET_32BIT && TARGET_POWERPC64))
5938 {
5939 /* If this is partially on the stack, then we only
5940 include the portion actually in registers here. */
5941 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5942 rtx off;
79773478
AM
5943 int i = 0;
5944 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
5945 /* Not all of the arg fits in gprs. Say that it
5946 goes in memory too, using a magic NULL_RTX
5947 component. Also see comment in
5948 rs6000_mixed_function_arg for why the normal
5949 function_arg_partial_nregs scheme doesn't work
5950 in this case. */
5951 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5952 const0_rtx);
ec6376ab
AM
5953 do
5954 {
5955 r = gen_rtx_REG (rmode,
5956 GP_ARG_MIN_REG + align_words);
2e6c9641 5957 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5958 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5959 }
5960 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5961 }
5962 else
5963 {
5964 /* The whole arg fits in gprs. */
5965 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5966 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5967 }
c53bdcf5 5968 }
ec6376ab
AM
5969 else
5970 /* It's entirely in memory. */
5971 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5972 }
5973
ec6376ab
AM
5974 /* Describe where this piece goes in the fprs. */
5975 r = gen_rtx_REG (fmode, cum->fregno);
5976 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5977
5978 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5979 }
5980 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5981 {
ec6376ab
AM
5982 if (TARGET_32BIT && TARGET_POWERPC64)
5983 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5984
4eeca74f
AM
5985 if (mode == BLKmode)
5986 mode = Pmode;
5987
b2d04ecf
AM
5988 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5989 }
4cc833b7
RH
5990 else
5991 return NULL_RTX;
4697a36c 5992 }
4697a36c
MM
5993}
5994\f
ec6376ab 5995/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5996 the number of bytes passed in registers. For args passed entirely in
5997 registers or entirely in memory, zero. When an arg is described by a
5998 PARALLEL, perhaps using more than one register type, this function
5999 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6000
78a52f11
RH
6001static int
6002rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6003 tree type, bool named)
4697a36c 6004{
c53bdcf5 6005 int ret = 0;
ec6376ab 6006 int align_words;
c53bdcf5 6007
f607bc57 6008 if (DEFAULT_ABI == ABI_V4)
4697a36c 6009 return 0;
4697a36c 6010
c53bdcf5
AM
6011 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6012 && cum->nargs_prototype >= 0)
6013 return 0;
6014
0b5383eb
DJ
6015 /* In this complicated case we just disable the partial_nregs code. */
6016 if (rs6000_darwin64_abi && mode == BLKmode
6017 && TREE_CODE (type) == RECORD_TYPE
6018 && int_size_in_bytes (type) > 0)
6019 return 0;
6020
294bd182 6021 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6022
79773478
AM
6023 if (USE_FP_FOR_ARG_P (cum, mode, type))
6024 {
fb63c729
AM
6025 /* If we are passing this arg in the fixed parameter save area
6026 (gprs or memory) as well as fprs, then this function should
79773478
AM
6027 return the number of partial bytes passed in the parameter
6028 save area rather than partial bytes passed in fprs. */
6029 if (type
6030 && (cum->nargs_prototype <= 0
6031 || (DEFAULT_ABI == ABI_AIX
6032 && TARGET_XL_COMPAT
6033 && align_words >= GP_ARG_NUM_REG)))
6034 return 0;
6035 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6036 > FP_ARG_MAX_REG + 1)
ac7e839c 6037 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6038 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6039 return 0;
6040 }
6041
ec6376ab
AM
6042 if (align_words < GP_ARG_NUM_REG
6043 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6044 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6045
c53bdcf5 6046 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6047 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6048
c53bdcf5 6049 return ret;
4697a36c
MM
6050}
6051\f
6052/* A C expression that indicates when an argument must be passed by
6053 reference. If nonzero for an argument, a copy of that argument is
6054 made in memory and a pointer to the argument is passed instead of
6055 the argument itself. The pointer is passed in whatever way is
6056 appropriate for passing a pointer to that type.
6057
b2d04ecf
AM
6058 Under V.4, aggregates and long double are passed by reference.
6059
6060 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6061 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6062
6063 As an extension to all ABIs, variable sized types are passed by
6064 reference. */
4697a36c 6065
8cd5a4e0 6066static bool
f676971a 6067rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6068 enum machine_mode mode, const_tree type,
bada2eb8 6069 bool named ATTRIBUTE_UNUSED)
4697a36c 6070{
602ea4d3 6071 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6072 {
6073 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6074 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6075 return 1;
6076 }
6077
6078 if (!type)
6079 return 0;
4697a36c 6080
bada2eb8
DE
6081 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6082 {
6083 if (TARGET_DEBUG_ARG)
6084 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6085 return 1;
6086 }
6087
6088 if (int_size_in_bytes (type) < 0)
6089 {
6090 if (TARGET_DEBUG_ARG)
6091 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6092 return 1;
6093 }
6094
6095 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6096 modes only exist for GCC vector types if -maltivec. */
6097 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6098 {
6099 if (TARGET_DEBUG_ARG)
6100 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6101 return 1;
6102 }
b693336b
PB
6103
6104 /* Pass synthetic vectors in memory. */
bada2eb8 6105 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6106 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6107 {
6108 static bool warned_for_pass_big_vectors = false;
6109 if (TARGET_DEBUG_ARG)
6110 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6111 if (!warned_for_pass_big_vectors)
6112 {
d4ee4d25 6113 warning (0, "GCC vector passed by reference: "
b693336b
PB
6114 "non-standard ABI extension with no compatibility guarantee");
6115 warned_for_pass_big_vectors = true;
6116 }
6117 return 1;
6118 }
6119
b2d04ecf 6120 return 0;
4697a36c 6121}
5985c7a6
FJ
6122
6123static void
2d9db8eb 6124rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6125{
6126 int i;
6127 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6128
6129 if (nregs == 0)
6130 return;
6131
c4ad648e 6132 for (i = 0; i < nregs; i++)
5985c7a6 6133 {
9390387d 6134 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6135 if (reload_completed)
c4ad648e
AM
6136 {
6137 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6138 tem = NULL_RTX;
6139 else
6140 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6141 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6142 }
5985c7a6
FJ
6143 else
6144 tem = replace_equiv_address (tem, XEXP (tem, 0));
6145
37409796 6146 gcc_assert (tem);
5985c7a6
FJ
6147
6148 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6149 }
6150}
4697a36c
MM
6151\f
6152/* Perform any needed actions needed for a function that is receiving a
f676971a 6153 variable number of arguments.
4697a36c
MM
6154
6155 CUM is as above.
6156
6157 MODE and TYPE are the mode and type of the current parameter.
6158
6159 PRETEND_SIZE is a variable that should be set to the amount of stack
6160 that must be pushed by the prolog to pretend that our caller pushed
6161 it.
6162
6163 Normally, this macro will push all remaining incoming registers on the
6164 stack and set PRETEND_SIZE to the length of the registers pushed. */
6165
c6e8c921 6166static void
f676971a 6167setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6168 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6169 int no_rtl)
4697a36c 6170{
4cc833b7
RH
6171 CUMULATIVE_ARGS next_cum;
6172 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6173 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6174 int first_reg_offset;
6175 alias_set_type set;
4697a36c 6176
f31bf321 6177 /* Skip the last named argument. */
d34c5b80 6178 next_cum = *cum;
594a51fe 6179 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6180
f607bc57 6181 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6182 {
5b667039
JJ
6183 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6184
60e2d0ca 6185 if (! no_rtl)
5b667039
JJ
6186 {
6187 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6188 HOST_WIDE_INT offset = 0;
6189
6190 /* Try to optimize the size of the varargs save area.
6191 The ABI requires that ap.reg_save_area is doubleword
6192 aligned, but we don't need to allocate space for all
6193 the bytes, only those to which we actually will save
6194 anything. */
6195 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6196 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6197 if (TARGET_HARD_FLOAT && TARGET_FPRS
6198 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6199 && cfun->va_list_fpr_size)
6200 {
6201 if (gpr_reg_num)
6202 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6203 * UNITS_PER_FP_WORD;
6204 if (cfun->va_list_fpr_size
6205 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6206 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6207 else
6208 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6209 * UNITS_PER_FP_WORD;
6210 }
6211 if (gpr_reg_num)
6212 {
6213 offset = -((first_reg_offset * reg_size) & ~7);
6214 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6215 {
6216 gpr_reg_num = cfun->va_list_gpr_size;
6217 if (reg_size == 4 && (first_reg_offset & 1))
6218 gpr_reg_num++;
6219 }
6220 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6221 }
6222 else if (fpr_size)
6223 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6224 * UNITS_PER_FP_WORD
6225 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6226
5b667039
JJ
6227 if (gpr_size + fpr_size)
6228 {
6229 rtx reg_save_area
6230 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6231 gcc_assert (GET_CODE (reg_save_area) == MEM);
6232 reg_save_area = XEXP (reg_save_area, 0);
6233 if (GET_CODE (reg_save_area) == PLUS)
6234 {
6235 gcc_assert (XEXP (reg_save_area, 0)
6236 == virtual_stack_vars_rtx);
6237 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6238 offset += INTVAL (XEXP (reg_save_area, 1));
6239 }
6240 else
6241 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6242 }
6243
6244 cfun->machine->varargs_save_offset = offset;
6245 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6246 }
4697a36c 6247 }
60e2d0ca 6248 else
4697a36c 6249 {
d34c5b80 6250 first_reg_offset = next_cum.words;
4cc833b7 6251 save_area = virtual_incoming_args_rtx;
4697a36c 6252
fe984136 6253 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6254 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6255 }
4697a36c 6256
dfafc897 6257 set = get_varargs_alias_set ();
9d30f3c1
JJ
6258 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6259 && cfun->va_list_gpr_size)
4cc833b7 6260 {
9d30f3c1
JJ
6261 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6262
6263 if (va_list_gpr_counter_field)
6264 {
6265 /* V4 va_list_gpr_size counts number of registers needed. */
6266 if (nregs > cfun->va_list_gpr_size)
6267 nregs = cfun->va_list_gpr_size;
6268 }
6269 else
6270 {
6271 /* char * va_list instead counts number of bytes needed. */
6272 if (nregs > cfun->va_list_gpr_size / reg_size)
6273 nregs = cfun->va_list_gpr_size / reg_size;
6274 }
6275
dfafc897 6276 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6277 plus_constant (save_area,
13e2e16e
DE
6278 first_reg_offset * reg_size));
6279 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6280 set_mem_alias_set (mem, set);
8ac61af7 6281 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6282
f676971a 6283 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6284 nregs);
4697a36c
MM
6285 }
6286
4697a36c 6287 /* Save FP registers if needed. */
f607bc57 6288 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6289 && TARGET_HARD_FLOAT && TARGET_FPRS
6290 && ! no_rtl
9d30f3c1
JJ
6291 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6292 && cfun->va_list_fpr_size)
4697a36c 6293 {
9d30f3c1 6294 int fregno = next_cum.fregno, nregs;
9ebbca7d 6295 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6296 rtx lab = gen_label_rtx ();
5b667039
JJ
6297 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6298 * UNITS_PER_FP_WORD);
4697a36c 6299
c4ad648e
AM
6300 emit_jump_insn
6301 (gen_rtx_SET (VOIDmode,
6302 pc_rtx,
6303 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6304 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6305 const0_rtx),
39403d82 6306 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6307 pc_rtx)));
6308
9d30f3c1
JJ
6309 for (nregs = 0;
6310 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6311 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6312 {
5496b36f 6313 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6314 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6315 set_mem_alias_set (mem, set);
94ff898d 6316 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6317 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6318 }
4cc833b7
RH
6319
6320 emit_label (lab);
4697a36c 6321 }
4697a36c 6322}
4697a36c 6323
dfafc897 6324/* Create the va_list data type. */
2c4974b7 6325
c35d187f
RH
6326static tree
6327rs6000_build_builtin_va_list (void)
dfafc897 6328{
64c2816f 6329 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6330
9ebbca7d
GK
6331 /* For AIX, prefer 'char *' because that's what the system
6332 header files like. */
f607bc57 6333 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6334 return build_pointer_type (char_type_node);
dfafc897 6335
f1e639b1 6336 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6337 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6338
f676971a 6339 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6340 unsigned_char_type_node);
f676971a 6341 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6342 unsigned_char_type_node);
64c2816f
DT
6343 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6344 every user file. */
6345 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6346 short_unsigned_type_node);
dfafc897
FS
6347 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6348 ptr_type_node);
6349 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6350 ptr_type_node);
6351
9d30f3c1
JJ
6352 va_list_gpr_counter_field = f_gpr;
6353 va_list_fpr_counter_field = f_fpr;
6354
dfafc897
FS
6355 DECL_FIELD_CONTEXT (f_gpr) = record;
6356 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6357 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6358 DECL_FIELD_CONTEXT (f_ovf) = record;
6359 DECL_FIELD_CONTEXT (f_sav) = record;
6360
bab45a51
FS
6361 TREE_CHAIN (record) = type_decl;
6362 TYPE_NAME (record) = type_decl;
dfafc897
FS
6363 TYPE_FIELDS (record) = f_gpr;
6364 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6365 TREE_CHAIN (f_fpr) = f_res;
6366 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6367 TREE_CHAIN (f_ovf) = f_sav;
6368
6369 layout_type (record);
6370
6371 /* The correct type is an array type of one element. */
6372 return build_array_type (record, build_index_type (size_zero_node));
6373}
6374
6375/* Implement va_start. */
6376
6377void
a2369ed3 6378rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6379{
dfafc897 6380 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6381 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6382 tree gpr, fpr, ovf, sav, t;
2c4974b7 6383
dfafc897 6384 /* Only SVR4 needs something special. */
f607bc57 6385 if (DEFAULT_ABI != ABI_V4)
dfafc897 6386 {
e5faf155 6387 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6388 return;
6389 }
6390
973a648b 6391 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6392 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6393 f_res = TREE_CHAIN (f_fpr);
6394 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6395 f_sav = TREE_CHAIN (f_ovf);
6396
872a65b5 6397 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6398 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6399 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6400 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6401 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6402
6403 /* Count number of gp and fp argument registers used. */
4cc833b7 6404 words = current_function_args_info.words;
987732e0
DE
6405 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6406 GP_ARG_NUM_REG);
6407 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6408 FP_ARG_NUM_REG);
dfafc897
FS
6409
6410 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6411 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6412 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6413 words, n_gpr, n_fpr);
dfafc897 6414
9d30f3c1
JJ
6415 if (cfun->va_list_gpr_size)
6416 {
07beea0d 6417 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6418 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6419 TREE_SIDE_EFFECTS (t) = 1;
6420 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6421 }
58c8adc1 6422
9d30f3c1
JJ
6423 if (cfun->va_list_fpr_size)
6424 {
07beea0d 6425 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6426 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6427 TREE_SIDE_EFFECTS (t) = 1;
6428 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6429 }
dfafc897
FS
6430
6431 /* Find the overflow area. */
6432 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6433 if (words != 0)
5be014d5
AP
6434 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6435 size_int (words * UNITS_PER_WORD));
07beea0d 6436 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6437 TREE_SIDE_EFFECTS (t) = 1;
6438 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6439
9d30f3c1
JJ
6440 /* If there were no va_arg invocations, don't set up the register
6441 save area. */
6442 if (!cfun->va_list_gpr_size
6443 && !cfun->va_list_fpr_size
6444 && n_gpr < GP_ARG_NUM_REG
6445 && n_fpr < FP_ARG_V4_MAX_REG)
6446 return;
6447
dfafc897
FS
6448 /* Find the register save area. */
6449 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6450 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6451 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6452 size_int (cfun->machine->varargs_save_offset));
07beea0d 6453 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6454 TREE_SIDE_EFFECTS (t) = 1;
6455 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6456}
6457
6458/* Implement va_arg. */
6459
23a60a04
JM
6460tree
6461rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6462{
cd3ce9b4
JM
6463 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6464 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6465 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6466 tree lab_false, lab_over, addr;
6467 int align;
6468 tree ptrtype = build_pointer_type (type);
7393f7f8 6469 int regalign = 0;
cd3ce9b4 6470
08b0dc1b
RH
6471 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6472 {
6473 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6474 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6475 }
6476
cd3ce9b4
JM
6477 if (DEFAULT_ABI != ABI_V4)
6478 {
08b0dc1b 6479 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6480 {
6481 tree elem_type = TREE_TYPE (type);
6482 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6483 int elem_size = GET_MODE_SIZE (elem_mode);
6484
6485 if (elem_size < UNITS_PER_WORD)
6486 {
23a60a04 6487 tree real_part, imag_part;
cd3ce9b4
JM
6488 tree post = NULL_TREE;
6489
23a60a04
JM
6490 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6491 &post);
6492 /* Copy the value into a temporary, lest the formal temporary
6493 be reused out from under us. */
6494 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6495 append_to_statement_list (post, pre_p);
6496
23a60a04
JM
6497 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6498 post_p);
cd3ce9b4 6499
47a25a46 6500 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6501 }
6502 }
6503
23a60a04 6504 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6505 }
6506
6507 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6508 f_fpr = TREE_CHAIN (f_gpr);
6509 f_res = TREE_CHAIN (f_fpr);
6510 f_ovf = TREE_CHAIN (f_res);
6511 f_sav = TREE_CHAIN (f_ovf);
6512
872a65b5 6513 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6514 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6515 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6516 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6517 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6518
6519 size = int_size_in_bytes (type);
6520 rsize = (size + 3) / 4;
6521 align = 1;
6522
08b0dc1b 6523 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6524 && (TYPE_MODE (type) == SFmode
6525 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6526 || TYPE_MODE (type) == TFmode
6527 || TYPE_MODE (type) == DDmode
6528 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6529 {
6530 /* FP args go in FP registers, if present. */
cd3ce9b4 6531 reg = fpr;
602ea4d3 6532 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6533 sav_ofs = 8*4;
6534 sav_scale = 8;
602ea4d3 6535 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6536 align = 8;
6537 }
6538 else
6539 {
6540 /* Otherwise into GP registers. */
cd3ce9b4
JM
6541 reg = gpr;
6542 n_reg = rsize;
6543 sav_ofs = 0;
6544 sav_scale = 4;
6545 if (n_reg == 2)
6546 align = 8;
6547 }
6548
6549 /* Pull the value out of the saved registers.... */
6550
6551 lab_over = NULL;
6552 addr = create_tmp_var (ptr_type_node, "addr");
6553 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6554
6555 /* AltiVec vectors never go in registers when -mabi=altivec. */
6556 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6557 align = 16;
6558 else
6559 {
6560 lab_false = create_artificial_label ();
6561 lab_over = create_artificial_label ();
6562
6563 /* Long long and SPE vectors are aligned in the registers.
6564 As are any other 2 gpr item such as complex int due to a
6565 historical mistake. */
6566 u = reg;
602ea4d3 6567 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6568 {
7393f7f8 6569 regalign = 1;
cd3ce9b4 6570 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6571 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6572 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6573 }
7393f7f8
BE
6574 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6575 reg number is 0 for f1, so we want to make it odd. */
6576 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6577 {
6578 regalign = 1;
6579 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6580 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6581 }
cd3ce9b4 6582
95674810 6583 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6584 t = build2 (GE_EXPR, boolean_type_node, u, t);
6585 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6586 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6587 gimplify_and_add (t, pre_p);
6588
6589 t = sav;
6590 if (sav_ofs)
5be014d5 6591 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6592
8fb632eb
ZD
6593 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6594 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6595 u = fold_convert (sizetype, u);
6596 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6597 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6598
07beea0d 6599 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6600 gimplify_and_add (t, pre_p);
6601
6602 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6603 gimplify_and_add (t, pre_p);
6604
6605 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6606 append_to_statement_list (t, pre_p);
6607
7393f7f8 6608 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6609 {
6610 /* Ensure that we don't find any more args in regs.
7393f7f8 6611 Alignment has taken care of for special cases. */
07beea0d 6612 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6613 gimplify_and_add (t, pre_p);
6614 }
6615 }
6616
6617 /* ... otherwise out of the overflow area. */
6618
6619 /* Care for on-stack alignment if needed. */
6620 t = ovf;
6621 if (align != 1)
6622 {
5be014d5
AP
6623 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6624 t = fold_convert (sizetype, t);
4a90aeeb 6625 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6626 size_int (-align));
6627 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6628 }
6629 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6630
07beea0d 6631 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6632 gimplify_and_add (u, pre_p);
6633
5be014d5 6634 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6635 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6636 gimplify_and_add (t, pre_p);
6637
6638 if (lab_over)
6639 {
6640 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6641 append_to_statement_list (t, pre_p);
6642 }
6643
0cfbc62b
JM
6644 if (STRICT_ALIGNMENT
6645 && (TYPE_ALIGN (type)
6646 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6647 {
6648 /* The value (of type complex double, for example) may not be
6649 aligned in memory in the saved registers, so copy via a
6650 temporary. (This is the same code as used for SPARC.) */
6651 tree tmp = create_tmp_var (type, "va_arg_tmp");
6652 tree dest_addr = build_fold_addr_expr (tmp);
6653
5039610b
SL
6654 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6655 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6656
6657 gimplify_and_add (copy, pre_p);
6658 addr = dest_addr;
6659 }
6660
08b0dc1b 6661 addr = fold_convert (ptrtype, addr);
872a65b5 6662 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6663}
6664
0ac081f6
AH
6665/* Builtins. */
6666
58646b77
PB
6667static void
6668def_builtin (int mask, const char *name, tree type, int code)
6669{
96038623 6670 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6671 {
6672 if (rs6000_builtin_decls[code])
6673 abort ();
6674
6675 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6676 add_builtin_function (name, type, code, BUILT_IN_MD,
6677 NULL, NULL_TREE);
58646b77
PB
6678 }
6679}
0ac081f6 6680
24408032
AH
6681/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6682
2212663f 6683static const struct builtin_description bdesc_3arg[] =
24408032
AH
6684{
6685 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6686 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6687 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6688 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6689 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6690 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6691 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6692 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6693 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6694 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6695 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6696 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6697 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6698 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6699 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6700 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6701 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6702 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6703 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6704 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6705 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6706 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6707 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6708
6709 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6710 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6711 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6712 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6713 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6714 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6715 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6716 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6717 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6718 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6719 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6720 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6721 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6722 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6723 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
6724
6725 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
6726 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
6727 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
6728 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
6729 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
6730 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
6731 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
6732 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 6733 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 6734};
2212663f 6735
95385cbb
AH
6736/* DST operations: void foo (void *, const int, const char). */
6737
6738static const struct builtin_description bdesc_dst[] =
6739{
6740 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6741 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6742 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6743 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6744
6745 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6746 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6747 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6748 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6749};
6750
2212663f 6751/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6752
a3170dc6 6753static struct builtin_description bdesc_2arg[] =
0ac081f6 6754{
f18c054f
DB
6755 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6756 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6757 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6758 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6759 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6760 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6761 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6762 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6763 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6764 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6765 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6766 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6767 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6768 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6769 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6770 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6771 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6772 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6773 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6774 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6775 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6776 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6777 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6778 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6779 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6780 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6781 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6782 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6783 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6784 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6785 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6786 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6787 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6788 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6789 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6790 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6791 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6792 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6793 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6794 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6795 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6796 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6797 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6798 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6799 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6800 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6801 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6802 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6803 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6804 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6805 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6806 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6807 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6808 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6809 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6810 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6811 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6812 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6813 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6814 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6815 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6816 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6817 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6818 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6819 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6820 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6821 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6822 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6823 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6824 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6825 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6826 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6827 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6828 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6829 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6830 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6831 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6832 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6833 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6834 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6835 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6836 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6837 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6838 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6839 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6840 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6841 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6842 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6843 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6844 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6845 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6846 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6847 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6848 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6849 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6850 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6851 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6852 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6853 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6854 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6855 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6856 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6857 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6858 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6859 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6860 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6861 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6862 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6863 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6864 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6865 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6866
58646b77
PB
6867 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6868 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6869 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6870 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6871 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6872 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6879 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6880 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6881 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6882 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6883 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6884 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6885 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6886 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6887 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6888 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6889 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6890 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6891 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6892 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6893 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6894 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6895 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6896 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6897 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6898 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6899 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6900 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6901 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6902 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6903 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6904 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6905 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6906 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6907 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6908 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6909 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6910 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6911 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6912 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6913 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6914 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6915 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6916 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6917 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6918 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6919 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6920 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6921 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6922 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6923 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6924 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6925 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6926 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6927 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6928 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6929 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6930 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6931 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6932 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6933 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6934 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6935 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6936 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6937 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6938 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6939 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6940 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6941 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6942 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6943 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6944 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6945 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6946 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6947 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6948 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6949 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6950 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6951 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6952 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6953 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6954 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6955 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6956 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6957 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6958 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6959 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6960 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6961 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6962 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6963 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6964 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6965 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6966 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6967 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6968 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6969 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6970 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6971 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6972 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6973 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6974 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6975 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6976 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6977 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6978 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6979 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6980 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6981 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6982 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6983 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6984 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6985 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6986 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6987 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6988 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6989 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6990 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6991 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6992 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6993 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6994
96038623
DE
6995 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
6996 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
6997 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
6998 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
6999 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7000 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7001 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7002 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7003 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7004 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7005
a3170dc6
AH
7006 /* Place holder, leave as first spe builtin. */
7007 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7008 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7009 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7010 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7011 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7012 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7013 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7014 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7015 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7016 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7017 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7018 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7019 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7020 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7021 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7022 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7023 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7024 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7025 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7026 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7027 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7028 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7029 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7030 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7031 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7032 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7033 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7034 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7035 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7036 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7037 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7038 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7039 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7040 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7041 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7042 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7043 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7044 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7045 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7046 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7047 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7048 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7049 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7050 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7051 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7052 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7053 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7054 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7055 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7056 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7057 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7058 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7059 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7060 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7061 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7062 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7063 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7064 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7065 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7066 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7067 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7068 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7069 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7070 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7071 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7072 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7073 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7074 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7075 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7076 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7077 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7078 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7079 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7080 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7081 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7082 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7083 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7084 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7085 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7086 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7087 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7088 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7089 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7090 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7091 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7092 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7093 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7094 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7095 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7096 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7097 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7098 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7099 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7100 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7101 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7102 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7103 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7104 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7105 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7106 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7107 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7108 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7109 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7110 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7111 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7112 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7113 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7114 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7115 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7116
7117 /* SPE binary operations expecting a 5-bit unsigned literal. */
7118 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7119
7120 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7121 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7122 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7123 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7124 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7125 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7126 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7127 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7128 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7129 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7130 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7131 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7132 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7133 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7134 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7135 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7136 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7137 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7138 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7139 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7140 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7141 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7142 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7143 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7144 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7145 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7146
7147 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7148 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7149};
7150
7151/* AltiVec predicates. */
7152
7153struct builtin_description_predicates
7154{
7155 const unsigned int mask;
7156 const enum insn_code icode;
7157 const char *opcode;
7158 const char *const name;
7159 const enum rs6000_builtins code;
7160};
7161
7162static const struct builtin_description_predicates bdesc_altivec_preds[] =
7163{
7164 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7165 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7166 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7167 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7168 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7169 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7170 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7171 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7172 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7173 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7174 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7175 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7176 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7177
7178 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7179 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7180 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7181};
24408032 7182
a3170dc6
AH
7183/* SPE predicates. */
7184static struct builtin_description bdesc_spe_predicates[] =
7185{
7186 /* Place-holder. Leave as first. */
7187 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7188 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7189 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7190 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7191 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7192 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7193 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7194 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7195 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7196 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7197 /* Place-holder. Leave as last. */
7198 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7199};
7200
7201/* SPE evsel predicates. */
7202static struct builtin_description bdesc_spe_evsel[] =
7203{
7204 /* Place-holder. Leave as first. */
7205 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7206 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7207 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7208 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7209 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7210 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7211 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7212 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7213 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7214 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7215 /* Place-holder. Leave as last. */
7216 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7217};
7218
96038623
DE
7219/* PAIRED predicates. */
7220static const struct builtin_description bdesc_paired_preds[] =
7221{
7222 /* Place-holder. Leave as first. */
7223 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7224 /* Place-holder. Leave as last. */
7225 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7226};
7227
b6d08ca1 7228/* ABS* operations. */
100c4561
AH
7229
7230static const struct builtin_description bdesc_abs[] =
7231{
7232 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7233 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7234 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7235 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7236 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7237 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7238 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7239};
7240
617e0e1d
DB
7241/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7242 foo (VECa). */
24408032 7243
a3170dc6 7244static struct builtin_description bdesc_1arg[] =
2212663f 7245{
617e0e1d
DB
7246 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7247 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7248 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7249 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7250 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7251 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7252 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7253 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7254 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7255 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7256 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7257 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7258 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7259 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7260 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7261 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7262 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7263
58646b77
PB
7264 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7265 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7266 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7267 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7268 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7269 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7270 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7271 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7272 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7273 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7274 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7275 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7276 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7277 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7278 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7279 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7280 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7281 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7282 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7283
a3170dc6
AH
7284 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7285 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7286 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7287 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7288 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7289 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7290 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7291 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7292 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7293 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7294 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7295 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7296 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7297 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7298 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7299 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7300 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7301 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7302 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7303 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7304 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7305 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7306 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7307 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7308 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7309 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7310 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7311 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7312 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7313 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7314
7315 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7316 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7317
7318 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7319 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7320 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7321 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7322 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7323};
7324
7325static rtx
5039610b 7326rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7327{
7328 rtx pat;
5039610b 7329 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7330 rtx op0 = expand_normal (arg0);
2212663f
DB
7331 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7332 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7333
0559cc77
DE
7334 if (icode == CODE_FOR_nothing)
7335 /* Builtin not supported on this processor. */
7336 return 0;
7337
20e26713
AH
7338 /* If we got invalid arguments bail out before generating bad rtl. */
7339 if (arg0 == error_mark_node)
9a171fcd 7340 return const0_rtx;
20e26713 7341
0559cc77
DE
7342 if (icode == CODE_FOR_altivec_vspltisb
7343 || icode == CODE_FOR_altivec_vspltish
7344 || icode == CODE_FOR_altivec_vspltisw
7345 || icode == CODE_FOR_spe_evsplatfi
7346 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7347 {
7348 /* Only allow 5-bit *signed* literals. */
b44140e7 7349 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7350 || INTVAL (op0) > 15
7351 || INTVAL (op0) < -16)
b44140e7
AH
7352 {
7353 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7354 return const0_rtx;
b44140e7 7355 }
b44140e7
AH
7356 }
7357
c62f2db5 7358 if (target == 0
2212663f
DB
7359 || GET_MODE (target) != tmode
7360 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7361 target = gen_reg_rtx (tmode);
7362
7363 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7364 op0 = copy_to_mode_reg (mode0, op0);
7365
7366 pat = GEN_FCN (icode) (target, op0);
7367 if (! pat)
7368 return 0;
7369 emit_insn (pat);
0ac081f6 7370
2212663f
DB
7371 return target;
7372}
ae4b4a02 7373
100c4561 7374static rtx
5039610b 7375altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7376{
7377 rtx pat, scratch1, scratch2;
5039610b 7378 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7379 rtx op0 = expand_normal (arg0);
100c4561
AH
7380 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7381 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7382
7383 /* If we have invalid arguments, bail out before generating bad rtl. */
7384 if (arg0 == error_mark_node)
9a171fcd 7385 return const0_rtx;
100c4561
AH
7386
7387 if (target == 0
7388 || GET_MODE (target) != tmode
7389 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7390 target = gen_reg_rtx (tmode);
7391
7392 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7393 op0 = copy_to_mode_reg (mode0, op0);
7394
7395 scratch1 = gen_reg_rtx (mode0);
7396 scratch2 = gen_reg_rtx (mode0);
7397
7398 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7399 if (! pat)
7400 return 0;
7401 emit_insn (pat);
7402
7403 return target;
7404}
7405
0ac081f6 7406static rtx
5039610b 7407rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7408{
7409 rtx pat;
5039610b
SL
7410 tree arg0 = CALL_EXPR_ARG (exp, 0);
7411 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7412 rtx op0 = expand_normal (arg0);
7413 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7414 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7415 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7416 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7417
0559cc77
DE
7418 if (icode == CODE_FOR_nothing)
7419 /* Builtin not supported on this processor. */
7420 return 0;
7421
20e26713
AH
7422 /* If we got invalid arguments bail out before generating bad rtl. */
7423 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7424 return const0_rtx;
20e26713 7425
0559cc77
DE
7426 if (icode == CODE_FOR_altivec_vcfux
7427 || icode == CODE_FOR_altivec_vcfsx
7428 || icode == CODE_FOR_altivec_vctsxs
7429 || icode == CODE_FOR_altivec_vctuxs
7430 || icode == CODE_FOR_altivec_vspltb
7431 || icode == CODE_FOR_altivec_vsplth
7432 || icode == CODE_FOR_altivec_vspltw
7433 || icode == CODE_FOR_spe_evaddiw
7434 || icode == CODE_FOR_spe_evldd
7435 || icode == CODE_FOR_spe_evldh
7436 || icode == CODE_FOR_spe_evldw
7437 || icode == CODE_FOR_spe_evlhhesplat
7438 || icode == CODE_FOR_spe_evlhhossplat
7439 || icode == CODE_FOR_spe_evlhhousplat
7440 || icode == CODE_FOR_spe_evlwhe
7441 || icode == CODE_FOR_spe_evlwhos
7442 || icode == CODE_FOR_spe_evlwhou
7443 || icode == CODE_FOR_spe_evlwhsplat
7444 || icode == CODE_FOR_spe_evlwwsplat
7445 || icode == CODE_FOR_spe_evrlwi
7446 || icode == CODE_FOR_spe_evslwi
7447 || icode == CODE_FOR_spe_evsrwis
f5119d10 7448 || icode == CODE_FOR_spe_evsubifw
0559cc77 7449 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7450 {
7451 /* Only allow 5-bit unsigned literals. */
8bb418a3 7452 STRIP_NOPS (arg1);
b44140e7
AH
7453 if (TREE_CODE (arg1) != INTEGER_CST
7454 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7455 {
7456 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7457 return const0_rtx;
b44140e7 7458 }
b44140e7
AH
7459 }
7460
c62f2db5 7461 if (target == 0
0ac081f6
AH
7462 || GET_MODE (target) != tmode
7463 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7464 target = gen_reg_rtx (tmode);
7465
7466 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7467 op0 = copy_to_mode_reg (mode0, op0);
7468 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7469 op1 = copy_to_mode_reg (mode1, op1);
7470
7471 pat = GEN_FCN (icode) (target, op0, op1);
7472 if (! pat)
7473 return 0;
7474 emit_insn (pat);
7475
7476 return target;
7477}
6525c0e7 7478
ae4b4a02 7479static rtx
f676971a 7480altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7481 tree exp, rtx target)
ae4b4a02
AH
7482{
7483 rtx pat, scratch;
5039610b
SL
7484 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7485 tree arg0 = CALL_EXPR_ARG (exp, 1);
7486 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7487 rtx op0 = expand_normal (arg0);
7488 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7489 enum machine_mode tmode = SImode;
7490 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7491 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7492 int cr6_form_int;
7493
7494 if (TREE_CODE (cr6_form) != INTEGER_CST)
7495 {
7496 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7497 return const0_rtx;
ae4b4a02
AH
7498 }
7499 else
7500 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7501
37409796 7502 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7503
7504 /* If we have invalid arguments, bail out before generating bad rtl. */
7505 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7506 return const0_rtx;
ae4b4a02
AH
7507
7508 if (target == 0
7509 || GET_MODE (target) != tmode
7510 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7511 target = gen_reg_rtx (tmode);
7512
7513 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7514 op0 = copy_to_mode_reg (mode0, op0);
7515 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7516 op1 = copy_to_mode_reg (mode1, op1);
7517
7518 scratch = gen_reg_rtx (mode0);
7519
7520 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7521 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7522 if (! pat)
7523 return 0;
7524 emit_insn (pat);
7525
7526 /* The vec_any* and vec_all* predicates use the same opcodes for two
7527 different operations, but the bits in CR6 will be different
7528 depending on what information we want. So we have to play tricks
7529 with CR6 to get the right bits out.
7530
7531 If you think this is disgusting, look at the specs for the
7532 AltiVec predicates. */
7533
c4ad648e
AM
7534 switch (cr6_form_int)
7535 {
7536 case 0:
7537 emit_insn (gen_cr6_test_for_zero (target));
7538 break;
7539 case 1:
7540 emit_insn (gen_cr6_test_for_zero_reverse (target));
7541 break;
7542 case 2:
7543 emit_insn (gen_cr6_test_for_lt (target));
7544 break;
7545 case 3:
7546 emit_insn (gen_cr6_test_for_lt_reverse (target));
7547 break;
7548 default:
7549 error ("argument 1 of __builtin_altivec_predicate is out of range");
7550 break;
7551 }
ae4b4a02
AH
7552
7553 return target;
7554}
7555
96038623
DE
7556static rtx
7557paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7558{
7559 rtx pat, addr;
7560 tree arg0 = CALL_EXPR_ARG (exp, 0);
7561 tree arg1 = CALL_EXPR_ARG (exp, 1);
7562 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7563 enum machine_mode mode0 = Pmode;
7564 enum machine_mode mode1 = Pmode;
7565 rtx op0 = expand_normal (arg0);
7566 rtx op1 = expand_normal (arg1);
7567
7568 if (icode == CODE_FOR_nothing)
7569 /* Builtin not supported on this processor. */
7570 return 0;
7571
7572 /* If we got invalid arguments bail out before generating bad rtl. */
7573 if (arg0 == error_mark_node || arg1 == error_mark_node)
7574 return const0_rtx;
7575
7576 if (target == 0
7577 || GET_MODE (target) != tmode
7578 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7579 target = gen_reg_rtx (tmode);
7580
7581 op1 = copy_to_mode_reg (mode1, op1);
7582
7583 if (op0 == const0_rtx)
7584 {
7585 addr = gen_rtx_MEM (tmode, op1);
7586 }
7587 else
7588 {
7589 op0 = copy_to_mode_reg (mode0, op0);
7590 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7591 }
7592
7593 pat = GEN_FCN (icode) (target, addr);
7594
7595 if (! pat)
7596 return 0;
7597 emit_insn (pat);
7598
7599 return target;
7600}
7601
b4a62fa0 7602static rtx
5039610b 7603altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7604{
7605 rtx pat, addr;
5039610b
SL
7606 tree arg0 = CALL_EXPR_ARG (exp, 0);
7607 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7608 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7609 enum machine_mode mode0 = Pmode;
7610 enum machine_mode mode1 = Pmode;
84217346
MD
7611 rtx op0 = expand_normal (arg0);
7612 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7613
7614 if (icode == CODE_FOR_nothing)
7615 /* Builtin not supported on this processor. */
7616 return 0;
7617
7618 /* If we got invalid arguments bail out before generating bad rtl. */
7619 if (arg0 == error_mark_node || arg1 == error_mark_node)
7620 return const0_rtx;
7621
7622 if (target == 0
7623 || GET_MODE (target) != tmode
7624 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7625 target = gen_reg_rtx (tmode);
7626
f676971a 7627 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7628
7629 if (op0 == const0_rtx)
7630 {
7631 addr = gen_rtx_MEM (tmode, op1);
7632 }
7633 else
7634 {
7635 op0 = copy_to_mode_reg (mode0, op0);
7636 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7637 }
7638
7639 pat = GEN_FCN (icode) (target, addr);
7640
7641 if (! pat)
7642 return 0;
7643 emit_insn (pat);
7644
7645 return target;
7646}
7647
61bea3b0 7648static rtx
5039610b 7649spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7650{
5039610b
SL
7651 tree arg0 = CALL_EXPR_ARG (exp, 0);
7652 tree arg1 = CALL_EXPR_ARG (exp, 1);
7653 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7654 rtx op0 = expand_normal (arg0);
7655 rtx op1 = expand_normal (arg1);
7656 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7657 rtx pat;
7658 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7659 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7660 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7661
7662 /* Invalid arguments. Bail before doing anything stoopid! */
7663 if (arg0 == error_mark_node
7664 || arg1 == error_mark_node
7665 || arg2 == error_mark_node)
7666 return const0_rtx;
7667
7668 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7669 op0 = copy_to_mode_reg (mode2, op0);
7670 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7671 op1 = copy_to_mode_reg (mode0, op1);
7672 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7673 op2 = copy_to_mode_reg (mode1, op2);
7674
7675 pat = GEN_FCN (icode) (op1, op2, op0);
7676 if (pat)
7677 emit_insn (pat);
7678 return NULL_RTX;
7679}
7680
96038623
DE
7681static rtx
7682paired_expand_stv_builtin (enum insn_code icode, tree exp)
7683{
7684 tree arg0 = CALL_EXPR_ARG (exp, 0);
7685 tree arg1 = CALL_EXPR_ARG (exp, 1);
7686 tree arg2 = CALL_EXPR_ARG (exp, 2);
7687 rtx op0 = expand_normal (arg0);
7688 rtx op1 = expand_normal (arg1);
7689 rtx op2 = expand_normal (arg2);
7690 rtx pat, addr;
7691 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7692 enum machine_mode mode1 = Pmode;
7693 enum machine_mode mode2 = Pmode;
7694
7695 /* Invalid arguments. Bail before doing anything stoopid! */
7696 if (arg0 == error_mark_node
7697 || arg1 == error_mark_node
7698 || arg2 == error_mark_node)
7699 return const0_rtx;
7700
7701 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7702 op0 = copy_to_mode_reg (tmode, op0);
7703
7704 op2 = copy_to_mode_reg (mode2, op2);
7705
7706 if (op1 == const0_rtx)
7707 {
7708 addr = gen_rtx_MEM (tmode, op2);
7709 }
7710 else
7711 {
7712 op1 = copy_to_mode_reg (mode1, op1);
7713 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7714 }
7715
7716 pat = GEN_FCN (icode) (addr, op0);
7717 if (pat)
7718 emit_insn (pat);
7719 return NULL_RTX;
7720}
7721
6525c0e7 7722static rtx
5039610b 7723altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7724{
5039610b
SL
7725 tree arg0 = CALL_EXPR_ARG (exp, 0);
7726 tree arg1 = CALL_EXPR_ARG (exp, 1);
7727 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7728 rtx op0 = expand_normal (arg0);
7729 rtx op1 = expand_normal (arg1);
7730 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7731 rtx pat, addr;
7732 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7733 enum machine_mode mode1 = Pmode;
7734 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7735
7736 /* Invalid arguments. Bail before doing anything stoopid! */
7737 if (arg0 == error_mark_node
7738 || arg1 == error_mark_node
7739 || arg2 == error_mark_node)
9a171fcd 7740 return const0_rtx;
6525c0e7 7741
b4a62fa0
SB
7742 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7743 op0 = copy_to_mode_reg (tmode, op0);
7744
f676971a 7745 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7746
7747 if (op1 == const0_rtx)
7748 {
7749 addr = gen_rtx_MEM (tmode, op2);
7750 }
7751 else
7752 {
7753 op1 = copy_to_mode_reg (mode1, op1);
7754 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7755 }
6525c0e7 7756
b4a62fa0 7757 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7758 if (pat)
7759 emit_insn (pat);
7760 return NULL_RTX;
7761}
7762
2212663f 7763static rtx
5039610b 7764rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7765{
7766 rtx pat;
5039610b
SL
7767 tree arg0 = CALL_EXPR_ARG (exp, 0);
7768 tree arg1 = CALL_EXPR_ARG (exp, 1);
7769 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7770 rtx op0 = expand_normal (arg0);
7771 rtx op1 = expand_normal (arg1);
7772 rtx op2 = expand_normal (arg2);
2212663f
DB
7773 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7774 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7775 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7776 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7777
774b5662
DE
7778 if (icode == CODE_FOR_nothing)
7779 /* Builtin not supported on this processor. */
7780 return 0;
7781
20e26713
AH
7782 /* If we got invalid arguments bail out before generating bad rtl. */
7783 if (arg0 == error_mark_node
7784 || arg1 == error_mark_node
7785 || arg2 == error_mark_node)
9a171fcd 7786 return const0_rtx;
20e26713 7787
aba5fb01
NS
7788 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7789 || icode == CODE_FOR_altivec_vsldoi_v4si
7790 || icode == CODE_FOR_altivec_vsldoi_v8hi
7791 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7792 {
7793 /* Only allow 4-bit unsigned literals. */
8bb418a3 7794 STRIP_NOPS (arg2);
b44140e7
AH
7795 if (TREE_CODE (arg2) != INTEGER_CST
7796 || TREE_INT_CST_LOW (arg2) & ~0xf)
7797 {
7798 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7799 return const0_rtx;
b44140e7 7800 }
b44140e7
AH
7801 }
7802
c62f2db5 7803 if (target == 0
2212663f
DB
7804 || GET_MODE (target) != tmode
7805 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7806 target = gen_reg_rtx (tmode);
7807
7808 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7809 op0 = copy_to_mode_reg (mode0, op0);
7810 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7811 op1 = copy_to_mode_reg (mode1, op1);
7812 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7813 op2 = copy_to_mode_reg (mode2, op2);
7814
49e39588
RE
7815 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
7816 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
7817 else
7818 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
7819 if (! pat)
7820 return 0;
7821 emit_insn (pat);
7822
7823 return target;
7824}
92898235 7825
3a9b8c7e 7826/* Expand the lvx builtins. */
0ac081f6 7827static rtx
a2369ed3 7828altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7829{
5039610b 7830 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7831 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7832 tree arg0;
7833 enum machine_mode tmode, mode0;
7c3abc73 7834 rtx pat, op0;
3a9b8c7e 7835 enum insn_code icode;
92898235 7836
0ac081f6
AH
7837 switch (fcode)
7838 {
f18c054f 7839 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7840 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7841 break;
f18c054f 7842 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7843 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7844 break;
7845 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7846 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7847 break;
7848 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7849 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7850 break;
7851 default:
7852 *expandedp = false;
7853 return NULL_RTX;
7854 }
0ac081f6 7855
3a9b8c7e 7856 *expandedp = true;
f18c054f 7857
5039610b 7858 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7859 op0 = expand_normal (arg0);
3a9b8c7e
AH
7860 tmode = insn_data[icode].operand[0].mode;
7861 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7862
3a9b8c7e
AH
7863 if (target == 0
7864 || GET_MODE (target) != tmode
7865 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7866 target = gen_reg_rtx (tmode);
24408032 7867
3a9b8c7e
AH
7868 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7869 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7870
3a9b8c7e
AH
7871 pat = GEN_FCN (icode) (target, op0);
7872 if (! pat)
7873 return 0;
7874 emit_insn (pat);
7875 return target;
7876}
f18c054f 7877
3a9b8c7e
AH
7878/* Expand the stvx builtins. */
7879static rtx
f676971a 7880altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7881 bool *expandedp)
3a9b8c7e 7882{
5039610b 7883 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7884 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7885 tree arg0, arg1;
7886 enum machine_mode mode0, mode1;
7c3abc73 7887 rtx pat, op0, op1;
3a9b8c7e 7888 enum insn_code icode;
f18c054f 7889
3a9b8c7e
AH
7890 switch (fcode)
7891 {
7892 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 7893 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
7894 break;
7895 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 7896 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
7897 break;
7898 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 7899 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
7900 break;
7901 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 7902 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
7903 break;
7904 default:
7905 *expandedp = false;
7906 return NULL_RTX;
7907 }
24408032 7908
5039610b
SL
7909 arg0 = CALL_EXPR_ARG (exp, 0);
7910 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7911 op0 = expand_normal (arg0);
7912 op1 = expand_normal (arg1);
3a9b8c7e
AH
7913 mode0 = insn_data[icode].operand[0].mode;
7914 mode1 = insn_data[icode].operand[1].mode;
f18c054f 7915
3a9b8c7e
AH
7916 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7917 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7918 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7919 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 7920
3a9b8c7e
AH
7921 pat = GEN_FCN (icode) (op0, op1);
7922 if (pat)
7923 emit_insn (pat);
f18c054f 7924
3a9b8c7e
AH
7925 *expandedp = true;
7926 return NULL_RTX;
7927}
f18c054f 7928
3a9b8c7e
AH
7929/* Expand the dst builtins. */
7930static rtx
f676971a 7931altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7932 bool *expandedp)
3a9b8c7e 7933{
5039610b 7934 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7935 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7936 tree arg0, arg1, arg2;
7937 enum machine_mode mode0, mode1, mode2;
7c3abc73 7938 rtx pat, op0, op1, op2;
586de218 7939 const struct builtin_description *d;
a3170dc6 7940 size_t i;
f18c054f 7941
3a9b8c7e 7942 *expandedp = false;
f18c054f 7943
3a9b8c7e 7944 /* Handle DST variants. */
586de218 7945 d = bdesc_dst;
3a9b8c7e
AH
7946 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7947 if (d->code == fcode)
7948 {
5039610b
SL
7949 arg0 = CALL_EXPR_ARG (exp, 0);
7950 arg1 = CALL_EXPR_ARG (exp, 1);
7951 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7952 op0 = expand_normal (arg0);
7953 op1 = expand_normal (arg1);
7954 op2 = expand_normal (arg2);
3a9b8c7e
AH
7955 mode0 = insn_data[d->icode].operand[0].mode;
7956 mode1 = insn_data[d->icode].operand[1].mode;
7957 mode2 = insn_data[d->icode].operand[2].mode;
24408032 7958
3a9b8c7e
AH
7959 /* Invalid arguments, bail out before generating bad rtl. */
7960 if (arg0 == error_mark_node
7961 || arg1 == error_mark_node
7962 || arg2 == error_mark_node)
7963 return const0_rtx;
f18c054f 7964
86e7df90 7965 *expandedp = true;
8bb418a3 7966 STRIP_NOPS (arg2);
3a9b8c7e
AH
7967 if (TREE_CODE (arg2) != INTEGER_CST
7968 || TREE_INT_CST_LOW (arg2) & ~0x3)
7969 {
9e637a26 7970 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7971 return const0_rtx;
7972 }
f18c054f 7973
3a9b8c7e 7974 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7975 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7976 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7977 op1 = copy_to_mode_reg (mode1, op1);
24408032 7978
3a9b8c7e
AH
7979 pat = GEN_FCN (d->icode) (op0, op1, op2);
7980 if (pat != 0)
7981 emit_insn (pat);
f18c054f 7982
3a9b8c7e
AH
7983 return NULL_RTX;
7984 }
f18c054f 7985
3a9b8c7e
AH
7986 return NULL_RTX;
7987}
24408032 7988
7a4eca66
DE
7989/* Expand vec_init builtin. */
7990static rtx
5039610b 7991altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
7992{
7993 enum machine_mode tmode = TYPE_MODE (type);
7994 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7995 int i, n_elt = GET_MODE_NUNITS (tmode);
7996 rtvec v = rtvec_alloc (n_elt);
7997
7998 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 7999 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8000
5039610b 8001 for (i = 0; i < n_elt; ++i)
7a4eca66 8002 {
5039610b 8003 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8004 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8005 }
8006
7a4eca66
DE
8007 if (!target || !register_operand (target, tmode))
8008 target = gen_reg_rtx (tmode);
8009
8010 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8011 return target;
8012}
8013
8014/* Return the integer constant in ARG. Constrain it to be in the range
8015 of the subparts of VEC_TYPE; issue an error if not. */
8016
8017static int
8018get_element_number (tree vec_type, tree arg)
8019{
8020 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8021
8022 if (!host_integerp (arg, 1)
8023 || (elt = tree_low_cst (arg, 1), elt > max))
8024 {
8025 error ("selector must be an integer constant in the range 0..%wi", max);
8026 return 0;
8027 }
8028
8029 return elt;
8030}
8031
8032/* Expand vec_set builtin. */
8033static rtx
5039610b 8034altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8035{
8036 enum machine_mode tmode, mode1;
8037 tree arg0, arg1, arg2;
8038 int elt;
8039 rtx op0, op1;
8040
5039610b
SL
8041 arg0 = CALL_EXPR_ARG (exp, 0);
8042 arg1 = CALL_EXPR_ARG (exp, 1);
8043 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8044
8045 tmode = TYPE_MODE (TREE_TYPE (arg0));
8046 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8047 gcc_assert (VECTOR_MODE_P (tmode));
8048
8049 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8050 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8051 elt = get_element_number (TREE_TYPE (arg0), arg2);
8052
8053 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8054 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8055
8056 op0 = force_reg (tmode, op0);
8057 op1 = force_reg (mode1, op1);
8058
8059 rs6000_expand_vector_set (op0, op1, elt);
8060
8061 return op0;
8062}
8063
8064/* Expand vec_ext builtin. */
8065static rtx
5039610b 8066altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8067{
8068 enum machine_mode tmode, mode0;
8069 tree arg0, arg1;
8070 int elt;
8071 rtx op0;
8072
5039610b
SL
8073 arg0 = CALL_EXPR_ARG (exp, 0);
8074 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8075
84217346 8076 op0 = expand_normal (arg0);
7a4eca66
DE
8077 elt = get_element_number (TREE_TYPE (arg0), arg1);
8078
8079 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8080 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8081 gcc_assert (VECTOR_MODE_P (mode0));
8082
8083 op0 = force_reg (mode0, op0);
8084
8085 if (optimize || !target || !register_operand (target, tmode))
8086 target = gen_reg_rtx (tmode);
8087
8088 rs6000_expand_vector_extract (target, op0, elt);
8089
8090 return target;
8091}
8092
3a9b8c7e
AH
8093/* Expand the builtin in EXP and store the result in TARGET. Store
8094 true in *EXPANDEDP if we found a builtin to expand. */
8095static rtx
a2369ed3 8096altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8097{
586de218
KG
8098 const struct builtin_description *d;
8099 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8100 size_t i;
8101 enum insn_code icode;
5039610b 8102 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8103 tree arg0;
8104 rtx op0, pat;
8105 enum machine_mode tmode, mode0;
3a9b8c7e 8106 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8107
58646b77
PB
8108 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8109 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8110 {
8111 *expandedp = true;
ea40ba9c 8112 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8113 return const0_rtx;
8114 }
8115
3a9b8c7e
AH
8116 target = altivec_expand_ld_builtin (exp, target, expandedp);
8117 if (*expandedp)
8118 return target;
0ac081f6 8119
3a9b8c7e
AH
8120 target = altivec_expand_st_builtin (exp, target, expandedp);
8121 if (*expandedp)
8122 return target;
8123
8124 target = altivec_expand_dst_builtin (exp, target, expandedp);
8125 if (*expandedp)
8126 return target;
8127
8128 *expandedp = true;
95385cbb 8129
3a9b8c7e
AH
8130 switch (fcode)
8131 {
6525c0e7 8132 case ALTIVEC_BUILTIN_STVX:
5039610b 8133 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8134 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8135 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8136 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8137 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8138 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8139 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8140 case ALTIVEC_BUILTIN_STVXL:
5039610b 8141 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8142
95385cbb
AH
8143 case ALTIVEC_BUILTIN_MFVSCR:
8144 icode = CODE_FOR_altivec_mfvscr;
8145 tmode = insn_data[icode].operand[0].mode;
8146
8147 if (target == 0
8148 || GET_MODE (target) != tmode
8149 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8150 target = gen_reg_rtx (tmode);
f676971a 8151
95385cbb 8152 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8153 if (! pat)
8154 return 0;
8155 emit_insn (pat);
95385cbb
AH
8156 return target;
8157
8158 case ALTIVEC_BUILTIN_MTVSCR:
8159 icode = CODE_FOR_altivec_mtvscr;
5039610b 8160 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8161 op0 = expand_normal (arg0);
95385cbb
AH
8162 mode0 = insn_data[icode].operand[0].mode;
8163
8164 /* If we got invalid arguments bail out before generating bad rtl. */
8165 if (arg0 == error_mark_node)
9a171fcd 8166 return const0_rtx;
95385cbb
AH
8167
8168 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8169 op0 = copy_to_mode_reg (mode0, op0);
8170
8171 pat = GEN_FCN (icode) (op0);
8172 if (pat)
8173 emit_insn (pat);
8174 return NULL_RTX;
3a9b8c7e 8175
95385cbb
AH
8176 case ALTIVEC_BUILTIN_DSSALL:
8177 emit_insn (gen_altivec_dssall ());
8178 return NULL_RTX;
8179
8180 case ALTIVEC_BUILTIN_DSS:
8181 icode = CODE_FOR_altivec_dss;
5039610b 8182 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8183 STRIP_NOPS (arg0);
84217346 8184 op0 = expand_normal (arg0);
95385cbb
AH
8185 mode0 = insn_data[icode].operand[0].mode;
8186
8187 /* If we got invalid arguments bail out before generating bad rtl. */
8188 if (arg0 == error_mark_node)
9a171fcd 8189 return const0_rtx;
95385cbb 8190
b44140e7
AH
8191 if (TREE_CODE (arg0) != INTEGER_CST
8192 || TREE_INT_CST_LOW (arg0) & ~0x3)
8193 {
8194 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8195 return const0_rtx;
b44140e7
AH
8196 }
8197
95385cbb
AH
8198 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8199 op0 = copy_to_mode_reg (mode0, op0);
8200
8201 emit_insn (gen_altivec_dss (op0));
0ac081f6 8202 return NULL_RTX;
7a4eca66
DE
8203
8204 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8205 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8206 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8207 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8208 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8209
8210 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8211 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8212 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8213 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8214 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8215
8216 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8217 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8218 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8219 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8220 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8221
8222 default:
8223 break;
8224 /* Fall through. */
0ac081f6 8225 }
24408032 8226
100c4561 8227 /* Expand abs* operations. */
586de218 8228 d = bdesc_abs;
ca7558fc 8229 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8230 if (d->code == fcode)
5039610b 8231 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8232
ae4b4a02 8233 /* Expand the AltiVec predicates. */
586de218 8234 dp = bdesc_altivec_preds;
ca7558fc 8235 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8236 if (dp->code == fcode)
c4ad648e 8237 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8238 exp, target);
ae4b4a02 8239
6525c0e7
AH
8240 /* LV* are funky. We initialized them differently. */
8241 switch (fcode)
8242 {
8243 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8244 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8245 exp, target);
6525c0e7 8246 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8247 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8248 exp, target);
6525c0e7 8249 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8250 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8251 exp, target);
6525c0e7 8252 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8253 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8254 exp, target);
6525c0e7 8255 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8256 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8257 exp, target);
6525c0e7 8258 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8259 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8260 exp, target);
6525c0e7 8261 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8262 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8263 exp, target);
6525c0e7
AH
8264 default:
8265 break;
8266 /* Fall through. */
8267 }
95385cbb 8268
92898235 8269 *expandedp = false;
0ac081f6
AH
8270 return NULL_RTX;
8271}
8272
96038623
DE
8273/* Expand the builtin in EXP and store the result in TARGET. Store
8274 true in *EXPANDEDP if we found a builtin to expand. */
8275static rtx
8276paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8277{
8278 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8279 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8280 const struct builtin_description *d;
96038623
DE
8281 size_t i;
8282
8283 *expandedp = true;
8284
8285 switch (fcode)
8286 {
8287 case PAIRED_BUILTIN_STX:
8288 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8289 case PAIRED_BUILTIN_LX:
8290 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8291 default:
8292 break;
8293 /* Fall through. */
8294 }
8295
8296 /* Expand the paired predicates. */
23a651fc 8297 d = bdesc_paired_preds;
96038623
DE
8298 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8299 if (d->code == fcode)
8300 return paired_expand_predicate_builtin (d->icode, exp, target);
8301
8302 *expandedp = false;
8303 return NULL_RTX;
8304}
8305
a3170dc6
AH
8306/* Binops that need to be initialized manually, but can be expanded
8307 automagically by rs6000_expand_binop_builtin. */
8308static struct builtin_description bdesc_2arg_spe[] =
8309{
8310 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8311 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8312 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8313 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8314 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8315 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8316 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8317 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8318 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8319 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8320 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8321 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8322 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8323 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8324 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8325 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8326 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8327 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8328 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8329 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8330 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8331 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8332};
8333
8334/* Expand the builtin in EXP and store the result in TARGET. Store
8335 true in *EXPANDEDP if we found a builtin to expand.
8336
8337 This expands the SPE builtins that are not simple unary and binary
8338 operations. */
8339static rtx
a2369ed3 8340spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8341{
5039610b 8342 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8343 tree arg1, arg0;
8344 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8345 enum insn_code icode;
8346 enum machine_mode tmode, mode0;
8347 rtx pat, op0;
8348 struct builtin_description *d;
8349 size_t i;
8350
8351 *expandedp = true;
8352
8353 /* Syntax check for a 5-bit unsigned immediate. */
8354 switch (fcode)
8355 {
8356 case SPE_BUILTIN_EVSTDD:
8357 case SPE_BUILTIN_EVSTDH:
8358 case SPE_BUILTIN_EVSTDW:
8359 case SPE_BUILTIN_EVSTWHE:
8360 case SPE_BUILTIN_EVSTWHO:
8361 case SPE_BUILTIN_EVSTWWE:
8362 case SPE_BUILTIN_EVSTWWO:
5039610b 8363 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8364 if (TREE_CODE (arg1) != INTEGER_CST
8365 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8366 {
8367 error ("argument 2 must be a 5-bit unsigned literal");
8368 return const0_rtx;
8369 }
8370 break;
8371 default:
8372 break;
8373 }
8374
00332c9f
AH
8375 /* The evsplat*i instructions are not quite generic. */
8376 switch (fcode)
8377 {
8378 case SPE_BUILTIN_EVSPLATFI:
8379 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8380 exp, target);
00332c9f
AH
8381 case SPE_BUILTIN_EVSPLATI:
8382 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8383 exp, target);
00332c9f
AH
8384 default:
8385 break;
8386 }
8387
a3170dc6
AH
8388 d = (struct builtin_description *) bdesc_2arg_spe;
8389 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8390 if (d->code == fcode)
5039610b 8391 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8392
8393 d = (struct builtin_description *) bdesc_spe_predicates;
8394 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8395 if (d->code == fcode)
5039610b 8396 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8397
8398 d = (struct builtin_description *) bdesc_spe_evsel;
8399 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8400 if (d->code == fcode)
5039610b 8401 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8402
8403 switch (fcode)
8404 {
8405 case SPE_BUILTIN_EVSTDDX:
5039610b 8406 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8407 case SPE_BUILTIN_EVSTDHX:
5039610b 8408 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8409 case SPE_BUILTIN_EVSTDWX:
5039610b 8410 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8411 case SPE_BUILTIN_EVSTWHEX:
5039610b 8412 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8413 case SPE_BUILTIN_EVSTWHOX:
5039610b 8414 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8415 case SPE_BUILTIN_EVSTWWEX:
5039610b 8416 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8417 case SPE_BUILTIN_EVSTWWOX:
5039610b 8418 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8419 case SPE_BUILTIN_EVSTDD:
5039610b 8420 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8421 case SPE_BUILTIN_EVSTDH:
5039610b 8422 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8423 case SPE_BUILTIN_EVSTDW:
5039610b 8424 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8425 case SPE_BUILTIN_EVSTWHE:
5039610b 8426 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8427 case SPE_BUILTIN_EVSTWHO:
5039610b 8428 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8429 case SPE_BUILTIN_EVSTWWE:
5039610b 8430 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8431 case SPE_BUILTIN_EVSTWWO:
5039610b 8432 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8433 case SPE_BUILTIN_MFSPEFSCR:
8434 icode = CODE_FOR_spe_mfspefscr;
8435 tmode = insn_data[icode].operand[0].mode;
8436
8437 if (target == 0
8438 || GET_MODE (target) != tmode
8439 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8440 target = gen_reg_rtx (tmode);
f676971a 8441
a3170dc6
AH
8442 pat = GEN_FCN (icode) (target);
8443 if (! pat)
8444 return 0;
8445 emit_insn (pat);
8446 return target;
8447 case SPE_BUILTIN_MTSPEFSCR:
8448 icode = CODE_FOR_spe_mtspefscr;
5039610b 8449 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8450 op0 = expand_normal (arg0);
a3170dc6
AH
8451 mode0 = insn_data[icode].operand[0].mode;
8452
8453 if (arg0 == error_mark_node)
8454 return const0_rtx;
8455
8456 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8457 op0 = copy_to_mode_reg (mode0, op0);
8458
8459 pat = GEN_FCN (icode) (op0);
8460 if (pat)
8461 emit_insn (pat);
8462 return NULL_RTX;
8463 default:
8464 break;
8465 }
8466
8467 *expandedp = false;
8468 return NULL_RTX;
8469}
8470
96038623
DE
8471static rtx
8472paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8473{
8474 rtx pat, scratch, tmp;
8475 tree form = CALL_EXPR_ARG (exp, 0);
8476 tree arg0 = CALL_EXPR_ARG (exp, 1);
8477 tree arg1 = CALL_EXPR_ARG (exp, 2);
8478 rtx op0 = expand_normal (arg0);
8479 rtx op1 = expand_normal (arg1);
8480 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8481 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8482 int form_int;
8483 enum rtx_code code;
8484
8485 if (TREE_CODE (form) != INTEGER_CST)
8486 {
8487 error ("argument 1 of __builtin_paired_predicate must be a constant");
8488 return const0_rtx;
8489 }
8490 else
8491 form_int = TREE_INT_CST_LOW (form);
8492
8493 gcc_assert (mode0 == mode1);
8494
8495 if (arg0 == error_mark_node || arg1 == error_mark_node)
8496 return const0_rtx;
8497
8498 if (target == 0
8499 || GET_MODE (target) != SImode
8500 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8501 target = gen_reg_rtx (SImode);
8502 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8503 op0 = copy_to_mode_reg (mode0, op0);
8504 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8505 op1 = copy_to_mode_reg (mode1, op1);
8506
8507 scratch = gen_reg_rtx (CCFPmode);
8508
8509 pat = GEN_FCN (icode) (scratch, op0, op1);
8510 if (!pat)
8511 return const0_rtx;
8512
8513 emit_insn (pat);
8514
8515 switch (form_int)
8516 {
8517 /* LT bit. */
8518 case 0:
8519 code = LT;
8520 break;
8521 /* GT bit. */
8522 case 1:
8523 code = GT;
8524 break;
8525 /* EQ bit. */
8526 case 2:
8527 code = EQ;
8528 break;
8529 /* UN bit. */
8530 case 3:
8531 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8532 return target;
8533 default:
8534 error ("argument 1 of __builtin_paired_predicate is out of range");
8535 return const0_rtx;
8536 }
8537
8538 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8539 emit_move_insn (target, tmp);
8540 return target;
8541}
8542
a3170dc6 8543static rtx
5039610b 8544spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8545{
8546 rtx pat, scratch, tmp;
5039610b
SL
8547 tree form = CALL_EXPR_ARG (exp, 0);
8548 tree arg0 = CALL_EXPR_ARG (exp, 1);
8549 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8550 rtx op0 = expand_normal (arg0);
8551 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8552 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8553 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8554 int form_int;
8555 enum rtx_code code;
8556
8557 if (TREE_CODE (form) != INTEGER_CST)
8558 {
8559 error ("argument 1 of __builtin_spe_predicate must be a constant");
8560 return const0_rtx;
8561 }
8562 else
8563 form_int = TREE_INT_CST_LOW (form);
8564
37409796 8565 gcc_assert (mode0 == mode1);
a3170dc6
AH
8566
8567 if (arg0 == error_mark_node || arg1 == error_mark_node)
8568 return const0_rtx;
8569
8570 if (target == 0
8571 || GET_MODE (target) != SImode
8572 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8573 target = gen_reg_rtx (SImode);
8574
8575 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8576 op0 = copy_to_mode_reg (mode0, op0);
8577 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8578 op1 = copy_to_mode_reg (mode1, op1);
8579
8580 scratch = gen_reg_rtx (CCmode);
8581
8582 pat = GEN_FCN (icode) (scratch, op0, op1);
8583 if (! pat)
8584 return const0_rtx;
8585 emit_insn (pat);
8586
8587 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8588 _lower_. We use one compare, but look in different bits of the
8589 CR for each variant.
8590
8591 There are 2 elements in each SPE simd type (upper/lower). The CR
8592 bits are set as follows:
8593
8594 BIT0 | BIT 1 | BIT 2 | BIT 3
8595 U | L | (U | L) | (U & L)
8596
8597 So, for an "all" relationship, BIT 3 would be set.
8598 For an "any" relationship, BIT 2 would be set. Etc.
8599
8600 Following traditional nomenclature, these bits map to:
8601
8602 BIT0 | BIT 1 | BIT 2 | BIT 3
8603 LT | GT | EQ | OV
8604
8605 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8606 */
8607
8608 switch (form_int)
8609 {
8610 /* All variant. OV bit. */
8611 case 0:
8612 /* We need to get to the OV bit, which is the ORDERED bit. We
8613 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8614 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8615 So let's just use another pattern. */
8616 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8617 return target;
8618 /* Any variant. EQ bit. */
8619 case 1:
8620 code = EQ;
8621 break;
8622 /* Upper variant. LT bit. */
8623 case 2:
8624 code = LT;
8625 break;
8626 /* Lower variant. GT bit. */
8627 case 3:
8628 code = GT;
8629 break;
8630 default:
8631 error ("argument 1 of __builtin_spe_predicate is out of range");
8632 return const0_rtx;
8633 }
8634
8635 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8636 emit_move_insn (target, tmp);
8637
8638 return target;
8639}
8640
8641/* The evsel builtins look like this:
8642
8643 e = __builtin_spe_evsel_OP (a, b, c, d);
8644
8645 and work like this:
8646
8647 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8648 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8649*/
8650
8651static rtx
5039610b 8652spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8653{
8654 rtx pat, scratch;
5039610b
SL
8655 tree arg0 = CALL_EXPR_ARG (exp, 0);
8656 tree arg1 = CALL_EXPR_ARG (exp, 1);
8657 tree arg2 = CALL_EXPR_ARG (exp, 2);
8658 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8659 rtx op0 = expand_normal (arg0);
8660 rtx op1 = expand_normal (arg1);
8661 rtx op2 = expand_normal (arg2);
8662 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8663 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8664 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8665
37409796 8666 gcc_assert (mode0 == mode1);
a3170dc6
AH
8667
8668 if (arg0 == error_mark_node || arg1 == error_mark_node
8669 || arg2 == error_mark_node || arg3 == error_mark_node)
8670 return const0_rtx;
8671
8672 if (target == 0
8673 || GET_MODE (target) != mode0
8674 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8675 target = gen_reg_rtx (mode0);
8676
8677 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8678 op0 = copy_to_mode_reg (mode0, op0);
8679 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8680 op1 = copy_to_mode_reg (mode0, op1);
8681 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8682 op2 = copy_to_mode_reg (mode0, op2);
8683 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8684 op3 = copy_to_mode_reg (mode0, op3);
8685
8686 /* Generate the compare. */
8687 scratch = gen_reg_rtx (CCmode);
8688 pat = GEN_FCN (icode) (scratch, op0, op1);
8689 if (! pat)
8690 return const0_rtx;
8691 emit_insn (pat);
8692
8693 if (mode0 == V2SImode)
8694 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8695 else
8696 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8697
8698 return target;
8699}
8700
0ac081f6
AH
8701/* Expand an expression EXP that calls a built-in function,
8702 with result going to TARGET if that's convenient
8703 (and in mode MODE if that's convenient).
8704 SUBTARGET may be used as the target for computing one of EXP's operands.
8705 IGNORE is nonzero if the value is to be ignored. */
8706
8707static rtx
a2369ed3 8708rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8709 enum machine_mode mode ATTRIBUTE_UNUSED,
8710 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8711{
5039610b 8712 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8713 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8714 const struct builtin_description *d;
92898235
AH
8715 size_t i;
8716 rtx ret;
8717 bool success;
f676971a 8718
9c78b944
DE
8719 if (fcode == RS6000_BUILTIN_RECIP)
8720 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
8721
8722 if (fcode == RS6000_BUILTIN_RECIPF)
8723 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
8724
8725 if (fcode == RS6000_BUILTIN_RSQRTF)
8726 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
8727
7ccf35ed
DN
8728 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8729 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8730 {
8731 int icode = (int) CODE_FOR_altivec_lvsr;
8732 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8733 enum machine_mode mode = insn_data[icode].operand[1].mode;
8734 tree arg;
8735 rtx op, addr, pat;
8736
37409796 8737 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8738
5039610b 8739 arg = CALL_EXPR_ARG (exp, 0);
37409796 8740 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8741 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8742 addr = memory_address (mode, op);
8743 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8744 op = addr;
8745 else
8746 {
8747 /* For the load case need to negate the address. */
8748 op = gen_reg_rtx (GET_MODE (addr));
8749 emit_insn (gen_rtx_SET (VOIDmode, op,
8750 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8751 }
7ccf35ed
DN
8752 op = gen_rtx_MEM (mode, op);
8753
8754 if (target == 0
8755 || GET_MODE (target) != tmode
8756 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8757 target = gen_reg_rtx (tmode);
8758
8759 /*pat = gen_altivec_lvsr (target, op);*/
8760 pat = GEN_FCN (icode) (target, op);
8761 if (!pat)
8762 return 0;
8763 emit_insn (pat);
8764
8765 return target;
8766 }
5039610b
SL
8767
8768 /* FIXME: There's got to be a nicer way to handle this case than
8769 constructing a new CALL_EXPR. */
f57d17f1
TM
8770 if (fcode == ALTIVEC_BUILTIN_VCFUX
8771 || fcode == ALTIVEC_BUILTIN_VCFSX)
8772 {
5039610b
SL
8773 if (call_expr_nargs (exp) == 1)
8774 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8775 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8776 }
7ccf35ed 8777
0ac081f6 8778 if (TARGET_ALTIVEC)
92898235
AH
8779 {
8780 ret = altivec_expand_builtin (exp, target, &success);
8781
a3170dc6
AH
8782 if (success)
8783 return ret;
8784 }
8785 if (TARGET_SPE)
8786 {
8787 ret = spe_expand_builtin (exp, target, &success);
8788
92898235
AH
8789 if (success)
8790 return ret;
8791 }
96038623
DE
8792 if (TARGET_PAIRED_FLOAT)
8793 {
8794 ret = paired_expand_builtin (exp, target, &success);
8795
8796 if (success)
8797 return ret;
8798 }
92898235 8799
96038623 8800 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 8801
37409796
NS
8802 /* Handle simple unary operations. */
8803 d = (struct builtin_description *) bdesc_1arg;
8804 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8805 if (d->code == fcode)
5039610b 8806 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8807
37409796
NS
8808 /* Handle simple binary operations. */
8809 d = (struct builtin_description *) bdesc_2arg;
8810 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8811 if (d->code == fcode)
5039610b 8812 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8813
37409796 8814 /* Handle simple ternary operations. */
586de218 8815 d = bdesc_3arg;
37409796
NS
8816 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8817 if (d->code == fcode)
5039610b 8818 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8819
37409796 8820 gcc_unreachable ();
0ac081f6
AH
8821}
8822
7c62e993
PB
8823static tree
8824build_opaque_vector_type (tree node, int nunits)
8825{
8826 node = copy_node (node);
8827 TYPE_MAIN_VARIANT (node) = node;
8828 return build_vector_type (node, nunits);
8829}
8830
0ac081f6 8831static void
863d938c 8832rs6000_init_builtins (void)
0ac081f6 8833{
4a5eab38
PB
8834 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8835 V2SF_type_node = build_vector_type (float_type_node, 2);
8836 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8837 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8838 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8839 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8840 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8841
8842 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8843 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8844 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8845
7c62e993
PB
8846 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8847 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8848 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8849 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8850
8bb418a3
ZL
8851 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8852 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8853 'vector unsigned short'. */
8854
8dd16ecc
NS
8855 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8856 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8857 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8858 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8859
58646b77
PB
8860 long_integer_type_internal_node = long_integer_type_node;
8861 long_unsigned_type_internal_node = long_unsigned_type_node;
8862 intQI_type_internal_node = intQI_type_node;
8863 uintQI_type_internal_node = unsigned_intQI_type_node;
8864 intHI_type_internal_node = intHI_type_node;
8865 uintHI_type_internal_node = unsigned_intHI_type_node;
8866 intSI_type_internal_node = intSI_type_node;
8867 uintSI_type_internal_node = unsigned_intSI_type_node;
8868 float_type_internal_node = float_type_node;
8869 void_type_internal_node = void_type_node;
8870
8bb418a3
ZL
8871 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8872 get_identifier ("__bool char"),
8873 bool_char_type_node));
8874 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8875 get_identifier ("__bool short"),
8876 bool_short_type_node));
8877 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8878 get_identifier ("__bool int"),
8879 bool_int_type_node));
8880 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8881 get_identifier ("__pixel"),
8882 pixel_type_node));
8883
4a5eab38
PB
8884 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8885 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8886 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8887 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
8888
8889 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8890 get_identifier ("__vector unsigned char"),
8891 unsigned_V16QI_type_node));
8892 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8893 get_identifier ("__vector signed char"),
8894 V16QI_type_node));
8895 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8896 get_identifier ("__vector __bool char"),
8897 bool_V16QI_type_node));
8898
8899 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8900 get_identifier ("__vector unsigned short"),
8901 unsigned_V8HI_type_node));
8902 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8903 get_identifier ("__vector signed short"),
8904 V8HI_type_node));
8905 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8906 get_identifier ("__vector __bool short"),
8907 bool_V8HI_type_node));
8908
8909 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8910 get_identifier ("__vector unsigned int"),
8911 unsigned_V4SI_type_node));
8912 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8913 get_identifier ("__vector signed int"),
8914 V4SI_type_node));
8915 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8916 get_identifier ("__vector __bool int"),
8917 bool_V4SI_type_node));
8918
8919 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8920 get_identifier ("__vector float"),
8921 V4SF_type_node));
8922 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8923 get_identifier ("__vector __pixel"),
8924 pixel_V8HI_type_node));
8925
96038623
DE
8926 if (TARGET_PAIRED_FLOAT)
8927 paired_init_builtins ();
a3170dc6 8928 if (TARGET_SPE)
3fdaa45a 8929 spe_init_builtins ();
0ac081f6
AH
8930 if (TARGET_ALTIVEC)
8931 altivec_init_builtins ();
96038623 8932 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 8933 rs6000_common_init_builtins ();
9c78b944
DE
8934 if (TARGET_PPC_GFXOPT)
8935 {
8936 tree ftype = build_function_type_list (float_type_node,
8937 float_type_node,
8938 float_type_node,
8939 NULL_TREE);
8940 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
8941 RS6000_BUILTIN_RECIPF);
8942
8943 ftype = build_function_type_list (float_type_node,
8944 float_type_node,
8945 NULL_TREE);
8946 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
8947 RS6000_BUILTIN_RSQRTF);
8948 }
8949 if (TARGET_POPCNTB)
8950 {
8951 tree ftype = build_function_type_list (double_type_node,
8952 double_type_node,
8953 double_type_node,
8954 NULL_TREE);
8955 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
8956 RS6000_BUILTIN_RECIP);
8957
8958 }
69ca3549
DE
8959
8960#if TARGET_XCOFF
8961 /* AIX libm provides clog as __clog. */
8962 if (built_in_decls [BUILT_IN_CLOG])
8963 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8964#endif
0ac081f6
AH
8965}
8966
a3170dc6
AH
8967/* Search through a set of builtins and enable the mask bits.
8968 DESC is an array of builtins.
b6d08ca1 8969 SIZE is the total number of builtins.
a3170dc6
AH
8970 START is the builtin enum at which to start.
8971 END is the builtin enum at which to end. */
0ac081f6 8972static void
a2369ed3 8973enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 8974 enum rs6000_builtins start,
a2369ed3 8975 enum rs6000_builtins end)
a3170dc6
AH
8976{
8977 int i;
8978
8979 for (i = 0; i < size; ++i)
8980 if (desc[i].code == start)
8981 break;
8982
8983 if (i == size)
8984 return;
8985
8986 for (; i < size; ++i)
8987 {
8988 /* Flip all the bits on. */
8989 desc[i].mask = target_flags;
8990 if (desc[i].code == end)
8991 break;
8992 }
8993}
8994
8995static void
863d938c 8996spe_init_builtins (void)
0ac081f6 8997{
a3170dc6
AH
8998 tree endlink = void_list_node;
8999 tree puint_type_node = build_pointer_type (unsigned_type_node);
9000 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9001 struct builtin_description *d;
0ac081f6
AH
9002 size_t i;
9003
a3170dc6
AH
9004 tree v2si_ftype_4_v2si
9005 = build_function_type
3fdaa45a
AH
9006 (opaque_V2SI_type_node,
9007 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9008 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9009 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9010 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9011 endlink)))));
9012
9013 tree v2sf_ftype_4_v2sf
9014 = build_function_type
3fdaa45a
AH
9015 (opaque_V2SF_type_node,
9016 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9017 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9018 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9019 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9020 endlink)))));
9021
9022 tree int_ftype_int_v2si_v2si
9023 = build_function_type
9024 (integer_type_node,
9025 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9026 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9027 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9028 endlink))));
9029
9030 tree int_ftype_int_v2sf_v2sf
9031 = build_function_type
9032 (integer_type_node,
9033 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9034 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9035 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9036 endlink))));
9037
9038 tree void_ftype_v2si_puint_int
9039 = build_function_type (void_type_node,
3fdaa45a 9040 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9041 tree_cons (NULL_TREE, puint_type_node,
9042 tree_cons (NULL_TREE,
9043 integer_type_node,
9044 endlink))));
9045
9046 tree void_ftype_v2si_puint_char
9047 = build_function_type (void_type_node,
3fdaa45a 9048 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9049 tree_cons (NULL_TREE, puint_type_node,
9050 tree_cons (NULL_TREE,
9051 char_type_node,
9052 endlink))));
9053
9054 tree void_ftype_v2si_pv2si_int
9055 = build_function_type (void_type_node,
3fdaa45a 9056 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9057 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9058 tree_cons (NULL_TREE,
9059 integer_type_node,
9060 endlink))));
9061
9062 tree void_ftype_v2si_pv2si_char
9063 = build_function_type (void_type_node,
3fdaa45a 9064 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9065 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9066 tree_cons (NULL_TREE,
9067 char_type_node,
9068 endlink))));
9069
9070 tree void_ftype_int
9071 = build_function_type (void_type_node,
9072 tree_cons (NULL_TREE, integer_type_node, endlink));
9073
9074 tree int_ftype_void
36e8d515 9075 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9076
9077 tree v2si_ftype_pv2si_int
3fdaa45a 9078 = build_function_type (opaque_V2SI_type_node,
6035d635 9079 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9080 tree_cons (NULL_TREE, integer_type_node,
9081 endlink)));
9082
9083 tree v2si_ftype_puint_int
3fdaa45a 9084 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9085 tree_cons (NULL_TREE, puint_type_node,
9086 tree_cons (NULL_TREE, integer_type_node,
9087 endlink)));
9088
9089 tree v2si_ftype_pushort_int
3fdaa45a 9090 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9091 tree_cons (NULL_TREE, pushort_type_node,
9092 tree_cons (NULL_TREE, integer_type_node,
9093 endlink)));
9094
00332c9f
AH
9095 tree v2si_ftype_signed_char
9096 = build_function_type (opaque_V2SI_type_node,
9097 tree_cons (NULL_TREE, signed_char_type_node,
9098 endlink));
9099
a3170dc6
AH
9100 /* The initialization of the simple binary and unary builtins is
9101 done in rs6000_common_init_builtins, but we have to enable the
9102 mask bits here manually because we have run out of `target_flags'
9103 bits. We really need to redesign this mask business. */
9104
9105 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9106 ARRAY_SIZE (bdesc_2arg),
9107 SPE_BUILTIN_EVADDW,
9108 SPE_BUILTIN_EVXOR);
9109 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9110 ARRAY_SIZE (bdesc_1arg),
9111 SPE_BUILTIN_EVABS,
9112 SPE_BUILTIN_EVSUBFUSIAAW);
9113 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9114 ARRAY_SIZE (bdesc_spe_predicates),
9115 SPE_BUILTIN_EVCMPEQ,
9116 SPE_BUILTIN_EVFSTSTLT);
9117 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9118 ARRAY_SIZE (bdesc_spe_evsel),
9119 SPE_BUILTIN_EVSEL_CMPGTS,
9120 SPE_BUILTIN_EVSEL_FSTSTEQ);
9121
36252949
AH
9122 (*lang_hooks.decls.pushdecl)
9123 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9124 opaque_V2SI_type_node));
9125
a3170dc6 9126 /* Initialize irregular SPE builtins. */
f676971a 9127
a3170dc6
AH
9128 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9129 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9130 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9131 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9132 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9133 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9134 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9135 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9136 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9137 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9138 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9139 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9140 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9141 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9142 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9143 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9144 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9145 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9146
9147 /* Loads. */
9148 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9149 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9150 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9151 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9152 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9153 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9154 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9155 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9156 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9157 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9158 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9159 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9160 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9161 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9162 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9163 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9164 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9165 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9166 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9167 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9168 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9169 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9170
9171 /* Predicates. */
9172 d = (struct builtin_description *) bdesc_spe_predicates;
9173 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9174 {
9175 tree type;
9176
9177 switch (insn_data[d->icode].operand[1].mode)
9178 {
9179 case V2SImode:
9180 type = int_ftype_int_v2si_v2si;
9181 break;
9182 case V2SFmode:
9183 type = int_ftype_int_v2sf_v2sf;
9184 break;
9185 default:
37409796 9186 gcc_unreachable ();
a3170dc6
AH
9187 }
9188
9189 def_builtin (d->mask, d->name, type, d->code);
9190 }
9191
9192 /* Evsel predicates. */
9193 d = (struct builtin_description *) bdesc_spe_evsel;
9194 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9195 {
9196 tree type;
9197
9198 switch (insn_data[d->icode].operand[1].mode)
9199 {
9200 case V2SImode:
9201 type = v2si_ftype_4_v2si;
9202 break;
9203 case V2SFmode:
9204 type = v2sf_ftype_4_v2sf;
9205 break;
9206 default:
37409796 9207 gcc_unreachable ();
a3170dc6
AH
9208 }
9209
9210 def_builtin (d->mask, d->name, type, d->code);
9211 }
9212}
9213
96038623
DE
9214static void
9215paired_init_builtins (void)
9216{
23a651fc 9217 const struct builtin_description *d;
96038623
DE
9218 size_t i;
9219 tree endlink = void_list_node;
9220
9221 tree int_ftype_int_v2sf_v2sf
9222 = build_function_type
9223 (integer_type_node,
9224 tree_cons (NULL_TREE, integer_type_node,
9225 tree_cons (NULL_TREE, V2SF_type_node,
9226 tree_cons (NULL_TREE, V2SF_type_node,
9227 endlink))));
9228 tree pcfloat_type_node =
9229 build_pointer_type (build_qualified_type
9230 (float_type_node, TYPE_QUAL_CONST));
9231
9232 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9233 long_integer_type_node,
9234 pcfloat_type_node,
9235 NULL_TREE);
9236 tree void_ftype_v2sf_long_pcfloat =
9237 build_function_type_list (void_type_node,
9238 V2SF_type_node,
9239 long_integer_type_node,
9240 pcfloat_type_node,
9241 NULL_TREE);
9242
9243
9244 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9245 PAIRED_BUILTIN_LX);
9246
9247
9248 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9249 PAIRED_BUILTIN_STX);
9250
9251 /* Predicates. */
23a651fc 9252 d = bdesc_paired_preds;
96038623
DE
9253 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9254 {
9255 tree type;
9256
9257 switch (insn_data[d->icode].operand[1].mode)
9258 {
9259 case V2SFmode:
9260 type = int_ftype_int_v2sf_v2sf;
9261 break;
9262 default:
9263 gcc_unreachable ();
9264 }
9265
9266 def_builtin (d->mask, d->name, type, d->code);
9267 }
9268}
9269
a3170dc6 9270static void
863d938c 9271altivec_init_builtins (void)
a3170dc6 9272{
586de218
KG
9273 const struct builtin_description *d;
9274 const struct builtin_description_predicates *dp;
a3170dc6 9275 size_t i;
7a4eca66
DE
9276 tree ftype;
9277
a3170dc6
AH
9278 tree pfloat_type_node = build_pointer_type (float_type_node);
9279 tree pint_type_node = build_pointer_type (integer_type_node);
9280 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9281 tree pchar_type_node = build_pointer_type (char_type_node);
9282
9283 tree pvoid_type_node = build_pointer_type (void_type_node);
9284
0dbc3651
ZW
9285 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9286 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9287 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9288 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9289
9290 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9291
58646b77
PB
9292 tree int_ftype_opaque
9293 = build_function_type_list (integer_type_node,
9294 opaque_V4SI_type_node, NULL_TREE);
9295
9296 tree opaque_ftype_opaque_int
9297 = build_function_type_list (opaque_V4SI_type_node,
9298 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9299 tree opaque_ftype_opaque_opaque_int
9300 = build_function_type_list (opaque_V4SI_type_node,
9301 opaque_V4SI_type_node, opaque_V4SI_type_node,
9302 integer_type_node, NULL_TREE);
9303 tree int_ftype_int_opaque_opaque
9304 = build_function_type_list (integer_type_node,
9305 integer_type_node, opaque_V4SI_type_node,
9306 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9307 tree int_ftype_int_v4si_v4si
9308 = build_function_type_list (integer_type_node,
9309 integer_type_node, V4SI_type_node,
9310 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9311 tree v4sf_ftype_pcfloat
9312 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9313 tree void_ftype_pfloat_v4sf
b4de2f7d 9314 = build_function_type_list (void_type_node,
a3170dc6 9315 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9316 tree v4si_ftype_pcint
9317 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9318 tree void_ftype_pint_v4si
b4de2f7d
AH
9319 = build_function_type_list (void_type_node,
9320 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9321 tree v8hi_ftype_pcshort
9322 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9323 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9324 = build_function_type_list (void_type_node,
9325 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9326 tree v16qi_ftype_pcchar
9327 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9328 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9329 = build_function_type_list (void_type_node,
9330 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9331 tree void_ftype_v4si
b4de2f7d 9332 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9333 tree v8hi_ftype_void
9334 = build_function_type (V8HI_type_node, void_list_node);
9335 tree void_ftype_void
9336 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9337 tree void_ftype_int
9338 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9339
58646b77
PB
9340 tree opaque_ftype_long_pcvoid
9341 = build_function_type_list (opaque_V4SI_type_node,
9342 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9343 tree v16qi_ftype_long_pcvoid
a3170dc6 9344 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9345 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9346 tree v8hi_ftype_long_pcvoid
a3170dc6 9347 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9348 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9349 tree v4si_ftype_long_pcvoid
a3170dc6 9350 = build_function_type_list (V4SI_type_node,
b4a62fa0 9351 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9352
58646b77
PB
9353 tree void_ftype_opaque_long_pvoid
9354 = build_function_type_list (void_type_node,
9355 opaque_V4SI_type_node, long_integer_type_node,
9356 pvoid_type_node, NULL_TREE);
b4a62fa0 9357 tree void_ftype_v4si_long_pvoid
b4de2f7d 9358 = build_function_type_list (void_type_node,
b4a62fa0 9359 V4SI_type_node, long_integer_type_node,
b4de2f7d 9360 pvoid_type_node, NULL_TREE);
b4a62fa0 9361 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9362 = build_function_type_list (void_type_node,
b4a62fa0 9363 V16QI_type_node, long_integer_type_node,
b4de2f7d 9364 pvoid_type_node, NULL_TREE);
b4a62fa0 9365 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9366 = build_function_type_list (void_type_node,
b4a62fa0 9367 V8HI_type_node, long_integer_type_node,
b4de2f7d 9368 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9369 tree int_ftype_int_v8hi_v8hi
9370 = build_function_type_list (integer_type_node,
9371 integer_type_node, V8HI_type_node,
9372 V8HI_type_node, NULL_TREE);
9373 tree int_ftype_int_v16qi_v16qi
9374 = build_function_type_list (integer_type_node,
9375 integer_type_node, V16QI_type_node,
9376 V16QI_type_node, NULL_TREE);
9377 tree int_ftype_int_v4sf_v4sf
9378 = build_function_type_list (integer_type_node,
9379 integer_type_node, V4SF_type_node,
9380 V4SF_type_node, NULL_TREE);
9381 tree v4si_ftype_v4si
9382 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9383 tree v8hi_ftype_v8hi
9384 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9385 tree v16qi_ftype_v16qi
9386 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9387 tree v4sf_ftype_v4sf
9388 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9389 tree void_ftype_pcvoid_int_int
a3170dc6 9390 = build_function_type_list (void_type_node,
0dbc3651 9391 pcvoid_type_node, integer_type_node,
8bb418a3 9392 integer_type_node, NULL_TREE);
8bb418a3 9393
0dbc3651
ZW
9394 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9395 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9396 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9397 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9398 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9399 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9400 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9401 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9402 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9403 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9404 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9405 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9406 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9407 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9408 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9409 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9410 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9411 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9412 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9413 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9414 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9415 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9416 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9417 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9418 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9419 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9420 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9421 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9422 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9423 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9424 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9425 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9426 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9427 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9428 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9429 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9430 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9431 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9432 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9433 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9434 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9435 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9436 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9437 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9438 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9439 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9440
9441 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9442
9443 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9444 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9445 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9446 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9447 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9448 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9449 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9450 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9451 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9452 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9453
a3170dc6 9454 /* Add the DST variants. */
586de218 9455 d = bdesc_dst;
a3170dc6 9456 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9457 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9458
9459 /* Initialize the predicates. */
586de218 9460 dp = bdesc_altivec_preds;
a3170dc6
AH
9461 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9462 {
9463 enum machine_mode mode1;
9464 tree type;
58646b77
PB
9465 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9466 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9467
58646b77
PB
9468 if (is_overloaded)
9469 mode1 = VOIDmode;
9470 else
9471 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9472
9473 switch (mode1)
9474 {
58646b77
PB
9475 case VOIDmode:
9476 type = int_ftype_int_opaque_opaque;
9477 break;
a3170dc6
AH
9478 case V4SImode:
9479 type = int_ftype_int_v4si_v4si;
9480 break;
9481 case V8HImode:
9482 type = int_ftype_int_v8hi_v8hi;
9483 break;
9484 case V16QImode:
9485 type = int_ftype_int_v16qi_v16qi;
9486 break;
9487 case V4SFmode:
9488 type = int_ftype_int_v4sf_v4sf;
9489 break;
9490 default:
37409796 9491 gcc_unreachable ();
a3170dc6 9492 }
f676971a 9493
a3170dc6
AH
9494 def_builtin (dp->mask, dp->name, type, dp->code);
9495 }
9496
9497 /* Initialize the abs* operators. */
586de218 9498 d = bdesc_abs;
a3170dc6
AH
9499 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9500 {
9501 enum machine_mode mode0;
9502 tree type;
9503
9504 mode0 = insn_data[d->icode].operand[0].mode;
9505
9506 switch (mode0)
9507 {
9508 case V4SImode:
9509 type = v4si_ftype_v4si;
9510 break;
9511 case V8HImode:
9512 type = v8hi_ftype_v8hi;
9513 break;
9514 case V16QImode:
9515 type = v16qi_ftype_v16qi;
9516 break;
9517 case V4SFmode:
9518 type = v4sf_ftype_v4sf;
9519 break;
9520 default:
37409796 9521 gcc_unreachable ();
a3170dc6 9522 }
f676971a 9523
a3170dc6
AH
9524 def_builtin (d->mask, d->name, type, d->code);
9525 }
7ccf35ed 9526
13c62176
DN
9527 if (TARGET_ALTIVEC)
9528 {
9529 tree decl;
9530
9531 /* Initialize target builtin that implements
9532 targetm.vectorize.builtin_mask_for_load. */
9533
c79efc4d
RÁE
9534 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9535 v16qi_ftype_long_pcvoid,
9536 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9537 BUILT_IN_MD, NULL, NULL_TREE);
9538 TREE_READONLY (decl) = 1;
13c62176
DN
9539 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9540 altivec_builtin_mask_for_load = decl;
13c62176 9541 }
7a4eca66
DE
9542
9543 /* Access to the vec_init patterns. */
9544 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9545 integer_type_node, integer_type_node,
9546 integer_type_node, NULL_TREE);
9547 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9548 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9549
9550 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9551 short_integer_type_node,
9552 short_integer_type_node,
9553 short_integer_type_node,
9554 short_integer_type_node,
9555 short_integer_type_node,
9556 short_integer_type_node,
9557 short_integer_type_node, NULL_TREE);
9558 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9559 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9560
9561 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9562 char_type_node, char_type_node,
9563 char_type_node, char_type_node,
9564 char_type_node, char_type_node,
9565 char_type_node, char_type_node,
9566 char_type_node, char_type_node,
9567 char_type_node, char_type_node,
9568 char_type_node, char_type_node,
9569 char_type_node, NULL_TREE);
9570 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9571 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9572
9573 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9574 float_type_node, float_type_node,
9575 float_type_node, NULL_TREE);
9576 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9577 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9578
9579 /* Access to the vec_set patterns. */
9580 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9581 intSI_type_node,
9582 integer_type_node, NULL_TREE);
9583 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9584 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9585
9586 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9587 intHI_type_node,
9588 integer_type_node, NULL_TREE);
9589 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9590 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9591
9592 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9593 intQI_type_node,
9594 integer_type_node, NULL_TREE);
9595 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9596 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9597
9598 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9599 float_type_node,
9600 integer_type_node, NULL_TREE);
9601 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9602 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9603
9604 /* Access to the vec_extract patterns. */
9605 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9606 integer_type_node, NULL_TREE);
9607 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9608 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9609
9610 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9611 integer_type_node, NULL_TREE);
9612 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9613 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9614
9615 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9616 integer_type_node, NULL_TREE);
9617 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9618 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9619
9620 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9621 integer_type_node, NULL_TREE);
9622 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9623 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9624}
9625
9626static void
863d938c 9627rs6000_common_init_builtins (void)
a3170dc6 9628{
586de218 9629 const struct builtin_description *d;
a3170dc6
AH
9630 size_t i;
9631
96038623
DE
9632 tree v2sf_ftype_v2sf_v2sf_v2sf
9633 = build_function_type_list (V2SF_type_node,
9634 V2SF_type_node, V2SF_type_node,
9635 V2SF_type_node, NULL_TREE);
9636
a3170dc6
AH
9637 tree v4sf_ftype_v4sf_v4sf_v16qi
9638 = build_function_type_list (V4SF_type_node,
9639 V4SF_type_node, V4SF_type_node,
9640 V16QI_type_node, NULL_TREE);
9641 tree v4si_ftype_v4si_v4si_v16qi
9642 = build_function_type_list (V4SI_type_node,
9643 V4SI_type_node, V4SI_type_node,
9644 V16QI_type_node, NULL_TREE);
9645 tree v8hi_ftype_v8hi_v8hi_v16qi
9646 = build_function_type_list (V8HI_type_node,
9647 V8HI_type_node, V8HI_type_node,
9648 V16QI_type_node, NULL_TREE);
9649 tree v16qi_ftype_v16qi_v16qi_v16qi
9650 = build_function_type_list (V16QI_type_node,
9651 V16QI_type_node, V16QI_type_node,
9652 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9653 tree v4si_ftype_int
9654 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9655 tree v8hi_ftype_int
9656 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9657 tree v16qi_ftype_int
9658 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9659 tree v8hi_ftype_v16qi
9660 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9661 tree v4sf_ftype_v4sf
9662 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9663
9664 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9665 = build_function_type_list (opaque_V2SI_type_node,
9666 opaque_V2SI_type_node,
9667 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9668
96038623 9669 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
9670 = build_function_type_list (opaque_V2SF_type_node,
9671 opaque_V2SF_type_node,
9672 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 9673
96038623
DE
9674 tree v2sf_ftype_v2sf_v2sf
9675 = build_function_type_list (V2SF_type_node,
9676 V2SF_type_node,
9677 V2SF_type_node, NULL_TREE);
9678
9679
a3170dc6 9680 tree v2si_ftype_int_int
2abe3e28 9681 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9682 integer_type_node, integer_type_node,
9683 NULL_TREE);
9684
58646b77
PB
9685 tree opaque_ftype_opaque
9686 = build_function_type_list (opaque_V4SI_type_node,
9687 opaque_V4SI_type_node, NULL_TREE);
9688
a3170dc6 9689 tree v2si_ftype_v2si
2abe3e28
AH
9690 = build_function_type_list (opaque_V2SI_type_node,
9691 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 9692
96038623 9693 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
9694 = build_function_type_list (opaque_V2SF_type_node,
9695 opaque_V2SF_type_node, NULL_TREE);
f676971a 9696
96038623
DE
9697 tree v2sf_ftype_v2sf
9698 = build_function_type_list (V2SF_type_node,
9699 V2SF_type_node, NULL_TREE);
9700
a3170dc6 9701 tree v2sf_ftype_v2si
2abe3e28
AH
9702 = build_function_type_list (opaque_V2SF_type_node,
9703 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9704
9705 tree v2si_ftype_v2sf
2abe3e28
AH
9706 = build_function_type_list (opaque_V2SI_type_node,
9707 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9708
9709 tree v2si_ftype_v2si_char
2abe3e28
AH
9710 = build_function_type_list (opaque_V2SI_type_node,
9711 opaque_V2SI_type_node,
9712 char_type_node, NULL_TREE);
a3170dc6
AH
9713
9714 tree v2si_ftype_int_char
2abe3e28 9715 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9716 integer_type_node, char_type_node, NULL_TREE);
9717
9718 tree v2si_ftype_char
2abe3e28
AH
9719 = build_function_type_list (opaque_V2SI_type_node,
9720 char_type_node, NULL_TREE);
a3170dc6
AH
9721
9722 tree int_ftype_int_int
9723 = build_function_type_list (integer_type_node,
9724 integer_type_node, integer_type_node,
9725 NULL_TREE);
95385cbb 9726
58646b77
PB
9727 tree opaque_ftype_opaque_opaque
9728 = build_function_type_list (opaque_V4SI_type_node,
9729 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9730 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9731 = build_function_type_list (V4SI_type_node,
9732 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9733 tree v4sf_ftype_v4si_int
b4de2f7d 9734 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9735 V4SI_type_node, integer_type_node, NULL_TREE);
9736 tree v4si_ftype_v4sf_int
b4de2f7d 9737 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9738 V4SF_type_node, integer_type_node, NULL_TREE);
9739 tree v4si_ftype_v4si_int
b4de2f7d 9740 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9741 V4SI_type_node, integer_type_node, NULL_TREE);
9742 tree v8hi_ftype_v8hi_int
b4de2f7d 9743 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9744 V8HI_type_node, integer_type_node, NULL_TREE);
9745 tree v16qi_ftype_v16qi_int
b4de2f7d 9746 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9747 V16QI_type_node, integer_type_node, NULL_TREE);
9748 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9749 = build_function_type_list (V16QI_type_node,
9750 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9751 integer_type_node, NULL_TREE);
9752 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9753 = build_function_type_list (V8HI_type_node,
9754 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9755 integer_type_node, NULL_TREE);
9756 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9757 = build_function_type_list (V4SI_type_node,
9758 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9759 integer_type_node, NULL_TREE);
9760 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9761 = build_function_type_list (V4SF_type_node,
9762 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9763 integer_type_node, NULL_TREE);
0ac081f6 9764 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9765 = build_function_type_list (V4SF_type_node,
9766 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9767 tree opaque_ftype_opaque_opaque_opaque
9768 = build_function_type_list (opaque_V4SI_type_node,
9769 opaque_V4SI_type_node, opaque_V4SI_type_node,
9770 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9771 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9772 = build_function_type_list (V4SF_type_node,
9773 V4SF_type_node, V4SF_type_node,
9774 V4SI_type_node, NULL_TREE);
2212663f 9775 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9776 = build_function_type_list (V4SF_type_node,
9777 V4SF_type_node, V4SF_type_node,
9778 V4SF_type_node, NULL_TREE);
f676971a 9779 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9780 = build_function_type_list (V4SI_type_node,
9781 V4SI_type_node, V4SI_type_node,
9782 V4SI_type_node, NULL_TREE);
0ac081f6 9783 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9784 = build_function_type_list (V8HI_type_node,
9785 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9786 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9787 = build_function_type_list (V8HI_type_node,
9788 V8HI_type_node, V8HI_type_node,
9789 V8HI_type_node, NULL_TREE);
c4ad648e 9790 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9791 = build_function_type_list (V4SI_type_node,
9792 V8HI_type_node, V8HI_type_node,
9793 V4SI_type_node, NULL_TREE);
c4ad648e 9794 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9795 = build_function_type_list (V4SI_type_node,
9796 V16QI_type_node, V16QI_type_node,
9797 V4SI_type_node, NULL_TREE);
0ac081f6 9798 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9799 = build_function_type_list (V16QI_type_node,
9800 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9801 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9802 = build_function_type_list (V4SI_type_node,
9803 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9804 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9805 = build_function_type_list (V8HI_type_node,
9806 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9807 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9808 = build_function_type_list (V4SI_type_node,
9809 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9810 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9811 = build_function_type_list (V8HI_type_node,
9812 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9813 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9814 = build_function_type_list (V16QI_type_node,
9815 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9816 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9817 = build_function_type_list (V4SI_type_node,
9818 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9819 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9820 = build_function_type_list (V4SI_type_node,
9821 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9822 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9823 = build_function_type_list (V4SI_type_node,
9824 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9825 tree v4si_ftype_v8hi
9826 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9827 tree int_ftype_v4si_v4si
9828 = build_function_type_list (integer_type_node,
9829 V4SI_type_node, V4SI_type_node, NULL_TREE);
9830 tree int_ftype_v4sf_v4sf
9831 = build_function_type_list (integer_type_node,
9832 V4SF_type_node, V4SF_type_node, NULL_TREE);
9833 tree int_ftype_v16qi_v16qi
9834 = build_function_type_list (integer_type_node,
9835 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9836 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9837 = build_function_type_list (integer_type_node,
9838 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9839
6f317ef3 9840 /* Add the simple ternary operators. */
586de218 9841 d = bdesc_3arg;
ca7558fc 9842 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9843 {
2212663f
DB
9844 enum machine_mode mode0, mode1, mode2, mode3;
9845 tree type;
58646b77
PB
9846 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9847 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9848
58646b77
PB
9849 if (is_overloaded)
9850 {
9851 mode0 = VOIDmode;
9852 mode1 = VOIDmode;
9853 mode2 = VOIDmode;
9854 mode3 = VOIDmode;
9855 }
9856 else
9857 {
9858 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9859 continue;
f676971a 9860
58646b77
PB
9861 mode0 = insn_data[d->icode].operand[0].mode;
9862 mode1 = insn_data[d->icode].operand[1].mode;
9863 mode2 = insn_data[d->icode].operand[2].mode;
9864 mode3 = insn_data[d->icode].operand[3].mode;
9865 }
bb8df8a6 9866
2212663f
DB
9867 /* When all four are of the same mode. */
9868 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9869 {
9870 switch (mode0)
9871 {
58646b77
PB
9872 case VOIDmode:
9873 type = opaque_ftype_opaque_opaque_opaque;
9874 break;
617e0e1d
DB
9875 case V4SImode:
9876 type = v4si_ftype_v4si_v4si_v4si;
9877 break;
2212663f
DB
9878 case V4SFmode:
9879 type = v4sf_ftype_v4sf_v4sf_v4sf;
9880 break;
9881 case V8HImode:
9882 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 9883 break;
2212663f
DB
9884 case V16QImode:
9885 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9886 break;
96038623
DE
9887 case V2SFmode:
9888 type = v2sf_ftype_v2sf_v2sf_v2sf;
9889 break;
2212663f 9890 default:
37409796 9891 gcc_unreachable ();
2212663f
DB
9892 }
9893 }
9894 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 9895 {
2212663f
DB
9896 switch (mode0)
9897 {
9898 case V4SImode:
9899 type = v4si_ftype_v4si_v4si_v16qi;
9900 break;
9901 case V4SFmode:
9902 type = v4sf_ftype_v4sf_v4sf_v16qi;
9903 break;
9904 case V8HImode:
9905 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 9906 break;
2212663f
DB
9907 case V16QImode:
9908 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9909 break;
2212663f 9910 default:
37409796 9911 gcc_unreachable ();
2212663f
DB
9912 }
9913 }
f676971a 9914 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 9915 && mode3 == V4SImode)
24408032 9916 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 9917 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 9918 && mode3 == V4SImode)
24408032 9919 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 9920 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 9921 && mode3 == V4SImode)
24408032
AH
9922 type = v4sf_ftype_v4sf_v4sf_v4si;
9923
a7b376ee 9924 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
9925 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9926 && mode3 == QImode)
b9e4e5d1 9927 type = v16qi_ftype_v16qi_v16qi_int;
24408032 9928
a7b376ee 9929 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
9930 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9931 && mode3 == QImode)
b9e4e5d1 9932 type = v8hi_ftype_v8hi_v8hi_int;
24408032 9933
a7b376ee 9934 /* vint, vint, vint, 4-bit literal. */
24408032
AH
9935 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9936 && mode3 == QImode)
b9e4e5d1 9937 type = v4si_ftype_v4si_v4si_int;
24408032 9938
a7b376ee 9939 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
9940 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9941 && mode3 == QImode)
b9e4e5d1 9942 type = v4sf_ftype_v4sf_v4sf_int;
24408032 9943
2212663f 9944 else
37409796 9945 gcc_unreachable ();
2212663f
DB
9946
9947 def_builtin (d->mask, d->name, type, d->code);
9948 }
9949
0ac081f6 9950 /* Add the simple binary operators. */
00b960c7 9951 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 9952 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
9953 {
9954 enum machine_mode mode0, mode1, mode2;
9955 tree type;
58646b77
PB
9956 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9957 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 9958
58646b77
PB
9959 if (is_overloaded)
9960 {
9961 mode0 = VOIDmode;
9962 mode1 = VOIDmode;
9963 mode2 = VOIDmode;
9964 }
9965 else
bb8df8a6 9966 {
58646b77
PB
9967 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9968 continue;
f676971a 9969
58646b77
PB
9970 mode0 = insn_data[d->icode].operand[0].mode;
9971 mode1 = insn_data[d->icode].operand[1].mode;
9972 mode2 = insn_data[d->icode].operand[2].mode;
9973 }
0ac081f6
AH
9974
9975 /* When all three operands are of the same mode. */
9976 if (mode0 == mode1 && mode1 == mode2)
9977 {
9978 switch (mode0)
9979 {
58646b77
PB
9980 case VOIDmode:
9981 type = opaque_ftype_opaque_opaque;
9982 break;
0ac081f6
AH
9983 case V4SFmode:
9984 type = v4sf_ftype_v4sf_v4sf;
9985 break;
9986 case V4SImode:
9987 type = v4si_ftype_v4si_v4si;
9988 break;
9989 case V16QImode:
9990 type = v16qi_ftype_v16qi_v16qi;
9991 break;
9992 case V8HImode:
9993 type = v8hi_ftype_v8hi_v8hi;
9994 break;
a3170dc6
AH
9995 case V2SImode:
9996 type = v2si_ftype_v2si_v2si;
9997 break;
96038623
DE
9998 case V2SFmode:
9999 if (TARGET_PAIRED_FLOAT)
10000 type = v2sf_ftype_v2sf_v2sf;
10001 else
10002 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10003 break;
10004 case SImode:
10005 type = int_ftype_int_int;
10006 break;
0ac081f6 10007 default:
37409796 10008 gcc_unreachable ();
0ac081f6
AH
10009 }
10010 }
10011
10012 /* A few other combos we really don't want to do manually. */
10013
10014 /* vint, vfloat, vfloat. */
10015 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10016 type = v4si_ftype_v4sf_v4sf;
10017
10018 /* vshort, vchar, vchar. */
10019 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10020 type = v8hi_ftype_v16qi_v16qi;
10021
10022 /* vint, vshort, vshort. */
10023 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10024 type = v4si_ftype_v8hi_v8hi;
10025
10026 /* vshort, vint, vint. */
10027 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10028 type = v8hi_ftype_v4si_v4si;
10029
10030 /* vchar, vshort, vshort. */
10031 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10032 type = v16qi_ftype_v8hi_v8hi;
10033
10034 /* vint, vchar, vint. */
10035 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10036 type = v4si_ftype_v16qi_v4si;
10037
fa066a23
AH
10038 /* vint, vchar, vchar. */
10039 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10040 type = v4si_ftype_v16qi_v16qi;
10041
0ac081f6
AH
10042 /* vint, vshort, vint. */
10043 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10044 type = v4si_ftype_v8hi_v4si;
f676971a 10045
a7b376ee 10046 /* vint, vint, 5-bit literal. */
2212663f 10047 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10048 type = v4si_ftype_v4si_int;
f676971a 10049
a7b376ee 10050 /* vshort, vshort, 5-bit literal. */
2212663f 10051 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10052 type = v8hi_ftype_v8hi_int;
f676971a 10053
a7b376ee 10054 /* vchar, vchar, 5-bit literal. */
2212663f 10055 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10056 type = v16qi_ftype_v16qi_int;
0ac081f6 10057
a7b376ee 10058 /* vfloat, vint, 5-bit literal. */
617e0e1d 10059 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10060 type = v4sf_ftype_v4si_int;
f676971a 10061
a7b376ee 10062 /* vint, vfloat, 5-bit literal. */
617e0e1d 10063 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10064 type = v4si_ftype_v4sf_int;
617e0e1d 10065
a3170dc6
AH
10066 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10067 type = v2si_ftype_int_int;
10068
10069 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10070 type = v2si_ftype_v2si_char;
10071
10072 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10073 type = v2si_ftype_int_char;
10074
37409796 10075 else
0ac081f6 10076 {
37409796
NS
10077 /* int, x, x. */
10078 gcc_assert (mode0 == SImode);
0ac081f6
AH
10079 switch (mode1)
10080 {
10081 case V4SImode:
10082 type = int_ftype_v4si_v4si;
10083 break;
10084 case V4SFmode:
10085 type = int_ftype_v4sf_v4sf;
10086 break;
10087 case V16QImode:
10088 type = int_ftype_v16qi_v16qi;
10089 break;
10090 case V8HImode:
10091 type = int_ftype_v8hi_v8hi;
10092 break;
10093 default:
37409796 10094 gcc_unreachable ();
0ac081f6
AH
10095 }
10096 }
10097
2212663f
DB
10098 def_builtin (d->mask, d->name, type, d->code);
10099 }
24408032 10100
2212663f
DB
10101 /* Add the simple unary operators. */
10102 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10103 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10104 {
10105 enum machine_mode mode0, mode1;
10106 tree type;
58646b77
PB
10107 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10108 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10109
10110 if (is_overloaded)
10111 {
10112 mode0 = VOIDmode;
10113 mode1 = VOIDmode;
10114 }
10115 else
10116 {
10117 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10118 continue;
bb8df8a6 10119
58646b77
PB
10120 mode0 = insn_data[d->icode].operand[0].mode;
10121 mode1 = insn_data[d->icode].operand[1].mode;
10122 }
2212663f
DB
10123
10124 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10125 type = v4si_ftype_int;
2212663f 10126 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10127 type = v8hi_ftype_int;
2212663f 10128 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10129 type = v16qi_ftype_int;
58646b77
PB
10130 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10131 type = opaque_ftype_opaque;
617e0e1d
DB
10132 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10133 type = v4sf_ftype_v4sf;
20e26713
AH
10134 else if (mode0 == V8HImode && mode1 == V16QImode)
10135 type = v8hi_ftype_v16qi;
10136 else if (mode0 == V4SImode && mode1 == V8HImode)
10137 type = v4si_ftype_v8hi;
a3170dc6
AH
10138 else if (mode0 == V2SImode && mode1 == V2SImode)
10139 type = v2si_ftype_v2si;
10140 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10141 {
10142 if (TARGET_PAIRED_FLOAT)
10143 type = v2sf_ftype_v2sf;
10144 else
10145 type = v2sf_ftype_v2sf_spe;
10146 }
a3170dc6
AH
10147 else if (mode0 == V2SFmode && mode1 == V2SImode)
10148 type = v2sf_ftype_v2si;
10149 else if (mode0 == V2SImode && mode1 == V2SFmode)
10150 type = v2si_ftype_v2sf;
10151 else if (mode0 == V2SImode && mode1 == QImode)
10152 type = v2si_ftype_char;
2212663f 10153 else
37409796 10154 gcc_unreachable ();
2212663f 10155
0ac081f6
AH
10156 def_builtin (d->mask, d->name, type, d->code);
10157 }
10158}
10159
c15c90bb
ZW
10160static void
10161rs6000_init_libfuncs (void)
10162{
602ea4d3
JJ
10163 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10164 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10165 {
602ea4d3
JJ
10166 /* AIX library routines for float->int conversion. */
10167 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10168 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10169 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10170 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10171 }
c15c90bb 10172
602ea4d3 10173 if (!TARGET_IEEEQUAD)
98c41d98 10174 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10175 if (!TARGET_XL_COMPAT)
10176 {
10177 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10178 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10179 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10180 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10181
17caeff2 10182 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10183 {
10184 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10185 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10186 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10187 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10188 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10189 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10190 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10191
10192 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10193 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10194 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10195 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10196 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10197 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10198 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10199 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10200 }
b26941b4
JM
10201
10202 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10203 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10204 }
10205 else
10206 {
10207 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10208 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10209 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10210 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10211 }
c9034561 10212 else
c15c90bb 10213 {
c9034561 10214 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10215
10216 set_optab_libfunc (add_optab, TFmode, "_q_add");
10217 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10218 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10219 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10220 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10221 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10222 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10223
c9034561
ZW
10224 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10225 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10226 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10227 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10228 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10229 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10230
85363ca0
ZW
10231 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10232 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10233 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10234 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10235 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10236 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10237 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10238 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10239 }
10240}
fba73eb1
DE
10241
10242\f
10243/* Expand a block clear operation, and return 1 if successful. Return 0
10244 if we should let the compiler generate normal code.
10245
10246 operands[0] is the destination
10247 operands[1] is the length
57e84f18 10248 operands[3] is the alignment */
fba73eb1
DE
10249
10250int
10251expand_block_clear (rtx operands[])
10252{
10253 rtx orig_dest = operands[0];
10254 rtx bytes_rtx = operands[1];
57e84f18 10255 rtx align_rtx = operands[3];
5514620a
GK
10256 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10257 HOST_WIDE_INT align;
10258 HOST_WIDE_INT bytes;
fba73eb1
DE
10259 int offset;
10260 int clear_bytes;
5514620a 10261 int clear_step;
fba73eb1
DE
10262
10263 /* If this is not a fixed size move, just call memcpy */
10264 if (! constp)
10265 return 0;
10266
37409796
NS
10267 /* This must be a fixed size alignment */
10268 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10269 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10270
10271 /* Anything to clear? */
10272 bytes = INTVAL (bytes_rtx);
10273 if (bytes <= 0)
10274 return 1;
10275
5514620a
GK
10276 /* Use the builtin memset after a point, to avoid huge code bloat.
10277 When optimize_size, avoid any significant code bloat; calling
10278 memset is about 4 instructions, so allow for one instruction to
10279 load zero and three to do clearing. */
10280 if (TARGET_ALTIVEC && align >= 128)
10281 clear_step = 16;
10282 else if (TARGET_POWERPC64 && align >= 32)
10283 clear_step = 8;
21d818ff
NF
10284 else if (TARGET_SPE && align >= 64)
10285 clear_step = 8;
5514620a
GK
10286 else
10287 clear_step = 4;
fba73eb1 10288
5514620a
GK
10289 if (optimize_size && bytes > 3 * clear_step)
10290 return 0;
10291 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10292 return 0;
10293
10294 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10295 {
fba73eb1
DE
10296 enum machine_mode mode = BLKmode;
10297 rtx dest;
f676971a 10298
5514620a
GK
10299 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10300 {
10301 clear_bytes = 16;
10302 mode = V4SImode;
10303 }
21d818ff
NF
10304 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10305 {
10306 clear_bytes = 8;
10307 mode = V2SImode;
10308 }
5514620a 10309 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10310 /* 64-bit loads and stores require word-aligned
10311 displacements. */
10312 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10313 {
10314 clear_bytes = 8;
10315 mode = DImode;
fba73eb1 10316 }
5514620a 10317 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10318 { /* move 4 bytes */
10319 clear_bytes = 4;
10320 mode = SImode;
fba73eb1 10321 }
ec53fc93 10322 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10323 { /* move 2 bytes */
10324 clear_bytes = 2;
10325 mode = HImode;
fba73eb1
DE
10326 }
10327 else /* move 1 byte at a time */
10328 {
10329 clear_bytes = 1;
10330 mode = QImode;
fba73eb1 10331 }
f676971a 10332
fba73eb1 10333 dest = adjust_address (orig_dest, mode, offset);
f676971a 10334
5514620a 10335 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10336 }
10337
10338 return 1;
10339}
10340
35aff10b 10341\f
7e69e155
MM
10342/* Expand a block move operation, and return 1 if successful. Return 0
10343 if we should let the compiler generate normal code.
10344
10345 operands[0] is the destination
10346 operands[1] is the source
10347 operands[2] is the length
10348 operands[3] is the alignment */
10349
3933e0e1
MM
10350#define MAX_MOVE_REG 4
10351
7e69e155 10352int
a2369ed3 10353expand_block_move (rtx operands[])
7e69e155 10354{
b6c9286a
MM
10355 rtx orig_dest = operands[0];
10356 rtx orig_src = operands[1];
7e69e155 10357 rtx bytes_rtx = operands[2];
7e69e155 10358 rtx align_rtx = operands[3];
3933e0e1 10359 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10360 int align;
3933e0e1
MM
10361 int bytes;
10362 int offset;
7e69e155 10363 int move_bytes;
cabfd258
GK
10364 rtx stores[MAX_MOVE_REG];
10365 int num_reg = 0;
7e69e155 10366
3933e0e1 10367 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10368 if (! constp)
3933e0e1
MM
10369 return 0;
10370
37409796
NS
10371 /* This must be a fixed size alignment */
10372 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10373 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10374
7e69e155 10375 /* Anything to move? */
3933e0e1
MM
10376 bytes = INTVAL (bytes_rtx);
10377 if (bytes <= 0)
7e69e155
MM
10378 return 1;
10379
ea9982a8 10380 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10381 reg_parm_stack_space. */
ea9982a8 10382 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10383 return 0;
10384
cabfd258 10385 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10386 {
cabfd258 10387 union {
70128ad9 10388 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10389 rtx (*mov) (rtx, rtx);
cabfd258
GK
10390 } gen_func;
10391 enum machine_mode mode = BLKmode;
10392 rtx src, dest;
f676971a 10393
5514620a
GK
10394 /* Altivec first, since it will be faster than a string move
10395 when it applies, and usually not significantly larger. */
10396 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10397 {
10398 move_bytes = 16;
10399 mode = V4SImode;
10400 gen_func.mov = gen_movv4si;
10401 }
21d818ff
NF
10402 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10403 {
10404 move_bytes = 8;
10405 mode = V2SImode;
10406 gen_func.mov = gen_movv2si;
10407 }
5514620a 10408 else if (TARGET_STRING
cabfd258
GK
10409 && bytes > 24 /* move up to 32 bytes at a time */
10410 && ! fixed_regs[5]
10411 && ! fixed_regs[6]
10412 && ! fixed_regs[7]
10413 && ! fixed_regs[8]
10414 && ! fixed_regs[9]
10415 && ! fixed_regs[10]
10416 && ! fixed_regs[11]
10417 && ! fixed_regs[12])
7e69e155 10418 {
cabfd258 10419 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10420 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10421 }
10422 else if (TARGET_STRING
10423 && bytes > 16 /* move up to 24 bytes at a time */
10424 && ! fixed_regs[5]
10425 && ! fixed_regs[6]
10426 && ! fixed_regs[7]
10427 && ! fixed_regs[8]
10428 && ! fixed_regs[9]
10429 && ! fixed_regs[10])
10430 {
10431 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10432 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10433 }
10434 else if (TARGET_STRING
10435 && bytes > 8 /* move up to 16 bytes at a time */
10436 && ! fixed_regs[5]
10437 && ! fixed_regs[6]
10438 && ! fixed_regs[7]
10439 && ! fixed_regs[8])
10440 {
10441 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10442 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10443 }
10444 else if (bytes >= 8 && TARGET_POWERPC64
10445 /* 64-bit loads and stores require word-aligned
10446 displacements. */
fba73eb1 10447 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10448 {
10449 move_bytes = 8;
10450 mode = DImode;
10451 gen_func.mov = gen_movdi;
10452 }
10453 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10454 { /* move up to 8 bytes at a time */
10455 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10456 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10457 }
cd7d9ca4 10458 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10459 { /* move 4 bytes */
10460 move_bytes = 4;
10461 mode = SImode;
10462 gen_func.mov = gen_movsi;
10463 }
ec53fc93 10464 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10465 { /* move 2 bytes */
10466 move_bytes = 2;
10467 mode = HImode;
10468 gen_func.mov = gen_movhi;
10469 }
10470 else if (TARGET_STRING && bytes > 1)
10471 { /* move up to 4 bytes at a time */
10472 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10473 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10474 }
10475 else /* move 1 byte at a time */
10476 {
10477 move_bytes = 1;
10478 mode = QImode;
10479 gen_func.mov = gen_movqi;
10480 }
f676971a 10481
cabfd258
GK
10482 src = adjust_address (orig_src, mode, offset);
10483 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10484
10485 if (mode != BLKmode)
cabfd258
GK
10486 {
10487 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10488
cabfd258
GK
10489 emit_insn ((*gen_func.mov) (tmp_reg, src));
10490 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10491 }
3933e0e1 10492
cabfd258
GK
10493 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10494 {
10495 int i;
10496 for (i = 0; i < num_reg; i++)
10497 emit_insn (stores[i]);
10498 num_reg = 0;
10499 }
35aff10b 10500
cabfd258 10501 if (mode == BLKmode)
7e69e155 10502 {
70128ad9 10503 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10504 patterns require zero offset. */
10505 if (!REG_P (XEXP (src, 0)))
b6c9286a 10506 {
cabfd258
GK
10507 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10508 src = replace_equiv_address (src, src_reg);
b6c9286a 10509 }
cabfd258 10510 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10511
cabfd258 10512 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10513 {
cabfd258
GK
10514 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10515 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10516 }
cabfd258 10517 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10518
70128ad9 10519 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10520 GEN_INT (move_bytes & 31),
10521 align_rtx));
7e69e155 10522 }
7e69e155
MM
10523 }
10524
10525 return 1;
10526}
10527
d62294f5 10528\f
9caa3eb2
DE
10529/* Return a string to perform a load_multiple operation.
10530 operands[0] is the vector.
10531 operands[1] is the source address.
10532 operands[2] is the first destination register. */
10533
10534const char *
a2369ed3 10535rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10536{
10537 /* We have to handle the case where the pseudo used to contain the address
10538 is assigned to one of the output registers. */
10539 int i, j;
10540 int words = XVECLEN (operands[0], 0);
10541 rtx xop[10];
10542
10543 if (XVECLEN (operands[0], 0) == 1)
10544 return "{l|lwz} %2,0(%1)";
10545
10546 for (i = 0; i < words; i++)
10547 if (refers_to_regno_p (REGNO (operands[2]) + i,
10548 REGNO (operands[2]) + i + 1, operands[1], 0))
10549 {
10550 if (i == words-1)
10551 {
10552 xop[0] = GEN_INT (4 * (words-1));
10553 xop[1] = operands[1];
10554 xop[2] = operands[2];
10555 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10556 return "";
10557 }
10558 else if (i == 0)
10559 {
10560 xop[0] = GEN_INT (4 * (words-1));
10561 xop[1] = operands[1];
10562 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10563 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10564 return "";
10565 }
10566 else
10567 {
10568 for (j = 0; j < words; j++)
10569 if (j != i)
10570 {
10571 xop[0] = GEN_INT (j * 4);
10572 xop[1] = operands[1];
10573 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10574 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10575 }
10576 xop[0] = GEN_INT (i * 4);
10577 xop[1] = operands[1];
10578 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10579 return "";
10580 }
10581 }
10582
10583 return "{lsi|lswi} %2,%1,%N0";
10584}
10585
9878760c 10586\f
a4f6c312
SS
10587/* A validation routine: say whether CODE, a condition code, and MODE
10588 match. The other alternatives either don't make sense or should
10589 never be generated. */
39a10a29 10590
48d72335 10591void
a2369ed3 10592validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10593{
37409796
NS
10594 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10595 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10596 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10597
10598 /* These don't make sense. */
37409796
NS
10599 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10600 || mode != CCUNSmode);
39a10a29 10601
37409796
NS
10602 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10603 || mode == CCUNSmode);
39a10a29 10604
37409796
NS
10605 gcc_assert (mode == CCFPmode
10606 || (code != ORDERED && code != UNORDERED
10607 && code != UNEQ && code != LTGT
10608 && code != UNGT && code != UNLT
10609 && code != UNGE && code != UNLE));
f676971a
EC
10610
10611 /* These should never be generated except for
bc9ec0e0 10612 flag_finite_math_only. */
37409796
NS
10613 gcc_assert (mode != CCFPmode
10614 || flag_finite_math_only
10615 || (code != LE && code != GE
10616 && code != UNEQ && code != LTGT
10617 && code != UNGT && code != UNLT));
39a10a29
GK
10618
10619 /* These are invalid; the information is not there. */
37409796 10620 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10621}
10622
9878760c
RK
10623\f
10624/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10625 mask required to convert the result of a rotate insn into a shift
b1765bde 10626 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10627
10628int
a2369ed3 10629includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10630{
e2c953b6
DE
10631 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10632
10633 shift_mask <<= INTVAL (shiftop);
9878760c 10634
b1765bde 10635 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10636}
10637
10638/* Similar, but for right shift. */
10639
10640int
a2369ed3 10641includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10642{
a7653a2c 10643 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10644
10645 shift_mask >>= INTVAL (shiftop);
10646
b1765bde 10647 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10648}
10649
c5059423
AM
10650/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10651 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10652 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10653
10654int
a2369ed3 10655includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10656{
c5059423
AM
10657 if (GET_CODE (andop) == CONST_INT)
10658 {
02071907 10659 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10660
c5059423 10661 c = INTVAL (andop);
02071907 10662 if (c == 0 || c == ~0)
c5059423 10663 return 0;
e2c953b6 10664
02071907 10665 shift_mask = ~0;
c5059423
AM
10666 shift_mask <<= INTVAL (shiftop);
10667
b6d08ca1 10668 /* Find the least significant one bit. */
c5059423
AM
10669 lsb = c & -c;
10670
10671 /* It must coincide with the LSB of the shift mask. */
10672 if (-lsb != shift_mask)
10673 return 0;
e2c953b6 10674
c5059423
AM
10675 /* Invert to look for the next transition (if any). */
10676 c = ~c;
10677
10678 /* Remove the low group of ones (originally low group of zeros). */
10679 c &= -lsb;
10680
10681 /* Again find the lsb, and check we have all 1's above. */
10682 lsb = c & -c;
10683 return c == -lsb;
10684 }
10685 else if (GET_CODE (andop) == CONST_DOUBLE
10686 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10687 {
02071907
AM
10688 HOST_WIDE_INT low, high, lsb;
10689 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10690
10691 low = CONST_DOUBLE_LOW (andop);
10692 if (HOST_BITS_PER_WIDE_INT < 64)
10693 high = CONST_DOUBLE_HIGH (andop);
10694
10695 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10696 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10697 return 0;
10698
10699 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10700 {
02071907 10701 shift_mask_high = ~0;
c5059423
AM
10702 if (INTVAL (shiftop) > 32)
10703 shift_mask_high <<= INTVAL (shiftop) - 32;
10704
10705 lsb = high & -high;
10706
10707 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10708 return 0;
10709
10710 high = ~high;
10711 high &= -lsb;
10712
10713 lsb = high & -high;
10714 return high == -lsb;
10715 }
10716
02071907 10717 shift_mask_low = ~0;
c5059423
AM
10718 shift_mask_low <<= INTVAL (shiftop);
10719
10720 lsb = low & -low;
10721
10722 if (-lsb != shift_mask_low)
10723 return 0;
10724
10725 if (HOST_BITS_PER_WIDE_INT < 64)
10726 high = ~high;
10727 low = ~low;
10728 low &= -lsb;
10729
10730 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10731 {
10732 lsb = high & -high;
10733 return high == -lsb;
10734 }
10735
10736 lsb = low & -low;
10737 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10738 }
10739 else
10740 return 0;
10741}
e2c953b6 10742
c5059423
AM
10743/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10744 to perform a left shift. It must have SHIFTOP or more least
c1207243 10745 significant 0's, with the remainder of the word 1's. */
e2c953b6 10746
c5059423 10747int
a2369ed3 10748includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10749{
e2c953b6 10750 if (GET_CODE (andop) == CONST_INT)
c5059423 10751 {
02071907 10752 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10753
02071907 10754 shift_mask = ~0;
c5059423
AM
10755 shift_mask <<= INTVAL (shiftop);
10756 c = INTVAL (andop);
10757
c1207243 10758 /* Find the least significant one bit. */
c5059423
AM
10759 lsb = c & -c;
10760
10761 /* It must be covered by the shift mask.
a4f6c312 10762 This test also rejects c == 0. */
c5059423
AM
10763 if ((lsb & shift_mask) == 0)
10764 return 0;
10765
10766 /* Check we have all 1's above the transition, and reject all 1's. */
10767 return c == -lsb && lsb != 1;
10768 }
10769 else if (GET_CODE (andop) == CONST_DOUBLE
10770 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10771 {
02071907 10772 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10773
10774 low = CONST_DOUBLE_LOW (andop);
10775
10776 if (HOST_BITS_PER_WIDE_INT < 64)
10777 {
02071907 10778 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10779
10780 high = CONST_DOUBLE_HIGH (andop);
10781
10782 if (low == 0)
10783 {
02071907 10784 shift_mask_high = ~0;
c5059423
AM
10785 if (INTVAL (shiftop) > 32)
10786 shift_mask_high <<= INTVAL (shiftop) - 32;
10787
10788 lsb = high & -high;
10789
10790 if ((lsb & shift_mask_high) == 0)
10791 return 0;
10792
10793 return high == -lsb;
10794 }
10795 if (high != ~0)
10796 return 0;
10797 }
10798
02071907 10799 shift_mask_low = ~0;
c5059423
AM
10800 shift_mask_low <<= INTVAL (shiftop);
10801
10802 lsb = low & -low;
10803
10804 if ((lsb & shift_mask_low) == 0)
10805 return 0;
10806
10807 return low == -lsb && lsb != 1;
10808 }
e2c953b6 10809 else
c5059423 10810 return 0;
9878760c 10811}
35068b43 10812
11ac38b2
DE
10813/* Return 1 if operands will generate a valid arguments to rlwimi
10814instruction for insert with right shift in 64-bit mode. The mask may
10815not start on the first bit or stop on the last bit because wrap-around
10816effects of instruction do not correspond to semantics of RTL insn. */
10817
10818int
10819insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10820{
429ec7dc
DE
10821 if (INTVAL (startop) > 32
10822 && INTVAL (startop) < 64
10823 && INTVAL (sizeop) > 1
10824 && INTVAL (sizeop) + INTVAL (startop) < 64
10825 && INTVAL (shiftop) > 0
10826 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10827 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10828 return 1;
10829
10830 return 0;
10831}
10832
35068b43 10833/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10834 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10835
10836int
a2369ed3 10837registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10838{
10839 /* We might have been passed a SUBREG. */
f676971a 10840 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10841 return 0;
f676971a 10842
90f81f99
AP
10843 /* We might have been passed non floating point registers. */
10844 if (!FP_REGNO_P (REGNO (reg1))
10845 || !FP_REGNO_P (REGNO (reg2)))
10846 return 0;
35068b43
RK
10847
10848 return (REGNO (reg1) == REGNO (reg2) - 1);
10849}
10850
a4f6c312
SS
10851/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10852 addr1 and addr2 must be in consecutive memory locations
10853 (addr2 == addr1 + 8). */
35068b43
RK
10854
10855int
90f81f99 10856mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10857{
90f81f99 10858 rtx addr1, addr2;
bb8df8a6
EC
10859 unsigned int reg1, reg2;
10860 int offset1, offset2;
35068b43 10861
90f81f99
AP
10862 /* The mems cannot be volatile. */
10863 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10864 return 0;
f676971a 10865
90f81f99
AP
10866 addr1 = XEXP (mem1, 0);
10867 addr2 = XEXP (mem2, 0);
10868
35068b43
RK
10869 /* Extract an offset (if used) from the first addr. */
10870 if (GET_CODE (addr1) == PLUS)
10871 {
10872 /* If not a REG, return zero. */
10873 if (GET_CODE (XEXP (addr1, 0)) != REG)
10874 return 0;
10875 else
10876 {
c4ad648e 10877 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
10878 /* The offset must be constant! */
10879 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
10880 return 0;
10881 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
10882 }
10883 }
10884 else if (GET_CODE (addr1) != REG)
10885 return 0;
10886 else
10887 {
10888 reg1 = REGNO (addr1);
10889 /* This was a simple (mem (reg)) expression. Offset is 0. */
10890 offset1 = 0;
10891 }
10892
bb8df8a6
EC
10893 /* And now for the second addr. */
10894 if (GET_CODE (addr2) == PLUS)
10895 {
10896 /* If not a REG, return zero. */
10897 if (GET_CODE (XEXP (addr2, 0)) != REG)
10898 return 0;
10899 else
10900 {
10901 reg2 = REGNO (XEXP (addr2, 0));
10902 /* The offset must be constant. */
10903 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
10904 return 0;
10905 offset2 = INTVAL (XEXP (addr2, 1));
10906 }
10907 }
10908 else if (GET_CODE (addr2) != REG)
35068b43 10909 return 0;
bb8df8a6
EC
10910 else
10911 {
10912 reg2 = REGNO (addr2);
10913 /* This was a simple (mem (reg)) expression. Offset is 0. */
10914 offset2 = 0;
10915 }
35068b43 10916
bb8df8a6
EC
10917 /* Both of these must have the same base register. */
10918 if (reg1 != reg2)
35068b43
RK
10919 return 0;
10920
10921 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 10922 if (offset2 != offset1 + 8)
35068b43
RK
10923 return 0;
10924
10925 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
10926 instructions. */
10927 return 1;
10928}
9878760c
RK
10929\f
10930/* Return the register class of a scratch register needed to copy IN into
10931 or out of a register in CLASS in MODE. If it can be done directly,
10932 NO_REGS is returned. */
10933
10934enum reg_class
3c4774e0
R
10935rs6000_secondary_reload_class (enum reg_class class,
10936 enum machine_mode mode ATTRIBUTE_UNUSED,
10937 rtx in)
9878760c 10938{
5accd822 10939 int regno;
9878760c 10940
ab82a49f
AP
10941 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10942#if TARGET_MACHO
c4ad648e 10943 && MACHOPIC_INDIRECT
ab82a49f 10944#endif
c4ad648e 10945 ))
46fad5b7
DJ
10946 {
10947 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
10948 other than BASE_REGS for TARGET_ELF. So indicate that a
10949 register from BASE_REGS is needed as an intermediate
10950 register.
f676971a 10951
46fad5b7
DJ
10952 On Darwin, pic addresses require a load from memory, which
10953 needs a base register. */
10954 if (class != BASE_REGS
c4ad648e
AM
10955 && (GET_CODE (in) == SYMBOL_REF
10956 || GET_CODE (in) == HIGH
10957 || GET_CODE (in) == LABEL_REF
10958 || GET_CODE (in) == CONST))
10959 return BASE_REGS;
46fad5b7 10960 }
e7b7998a 10961
5accd822
DE
10962 if (GET_CODE (in) == REG)
10963 {
10964 regno = REGNO (in);
10965 if (regno >= FIRST_PSEUDO_REGISTER)
10966 {
10967 regno = true_regnum (in);
10968 if (regno >= FIRST_PSEUDO_REGISTER)
10969 regno = -1;
10970 }
10971 }
10972 else if (GET_CODE (in) == SUBREG)
10973 {
10974 regno = true_regnum (in);
10975 if (regno >= FIRST_PSEUDO_REGISTER)
10976 regno = -1;
10977 }
10978 else
10979 regno = -1;
10980
9878760c
RK
10981 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10982 into anything. */
10983 if (class == GENERAL_REGS || class == BASE_REGS
10984 || (regno >= 0 && INT_REGNO_P (regno)))
10985 return NO_REGS;
10986
10987 /* Constants, memory, and FP registers can go into FP registers. */
10988 if ((regno == -1 || FP_REGNO_P (regno))
10989 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10990 return NO_REGS;
10991
0ac081f6
AH
10992 /* Memory, and AltiVec registers can go into AltiVec registers. */
10993 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10994 && class == ALTIVEC_REGS)
10995 return NO_REGS;
10996
9878760c
RK
10997 /* We can copy among the CR registers. */
10998 if ((class == CR_REGS || class == CR0_REGS)
10999 && regno >= 0 && CR_REGNO_P (regno))
11000 return NO_REGS;
11001
11002 /* Otherwise, we need GENERAL_REGS. */
11003 return GENERAL_REGS;
11004}
11005\f
11006/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11007 know this is a valid comparison.
9878760c
RK
11008
11009 SCC_P is 1 if this is for an scc. That means that %D will have been
11010 used instead of %C, so the bits will be in different places.
11011
b4ac57ab 11012 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11013
11014int
a2369ed3 11015ccr_bit (rtx op, int scc_p)
9878760c
RK
11016{
11017 enum rtx_code code = GET_CODE (op);
11018 enum machine_mode cc_mode;
11019 int cc_regnum;
11020 int base_bit;
9ebbca7d 11021 rtx reg;
9878760c 11022
ec8e098d 11023 if (!COMPARISON_P (op))
9878760c
RK
11024 return -1;
11025
9ebbca7d
GK
11026 reg = XEXP (op, 0);
11027
37409796 11028 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11029
11030 cc_mode = GET_MODE (reg);
11031 cc_regnum = REGNO (reg);
11032 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11033
39a10a29 11034 validate_condition_mode (code, cc_mode);
c5defebb 11035
b7053a3f
GK
11036 /* When generating a sCOND operation, only positive conditions are
11037 allowed. */
37409796
NS
11038 gcc_assert (!scc_p
11039 || code == EQ || code == GT || code == LT || code == UNORDERED
11040 || code == GTU || code == LTU);
f676971a 11041
9878760c
RK
11042 switch (code)
11043 {
11044 case NE:
11045 return scc_p ? base_bit + 3 : base_bit + 2;
11046 case EQ:
11047 return base_bit + 2;
1c882ea4 11048 case GT: case GTU: case UNLE:
9878760c 11049 return base_bit + 1;
1c882ea4 11050 case LT: case LTU: case UNGE:
9878760c 11051 return base_bit;
1c882ea4
GK
11052 case ORDERED: case UNORDERED:
11053 return base_bit + 3;
9878760c
RK
11054
11055 case GE: case GEU:
39a10a29 11056 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11057 unordered position. So test that bit. For integer, this is ! LT
11058 unless this is an scc insn. */
39a10a29 11059 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11060
11061 case LE: case LEU:
39a10a29 11062 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11063
9878760c 11064 default:
37409796 11065 gcc_unreachable ();
9878760c
RK
11066 }
11067}
1ff7789b 11068\f
8d30c4ee 11069/* Return the GOT register. */
1ff7789b 11070
9390387d 11071rtx
a2369ed3 11072rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11073{
a4f6c312
SS
11074 /* The second flow pass currently (June 1999) can't update
11075 regs_ever_live without disturbing other parts of the compiler, so
11076 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11077 if (!can_create_pseudo_p ()
11078 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11079 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11080
8d30c4ee 11081 current_function_uses_pic_offset_table = 1;
3cb999d8 11082
1ff7789b
MM
11083 return pic_offset_table_rtx;
11084}
a7df97e6 11085\f
e2500fed
GK
11086/* Function to init struct machine_function.
11087 This will be called, via a pointer variable,
11088 from push_function_context. */
a7df97e6 11089
e2500fed 11090static struct machine_function *
863d938c 11091rs6000_init_machine_status (void)
a7df97e6 11092{
e2500fed 11093 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 11094}
9878760c 11095\f
0ba1b2ff
AM
11096/* These macros test for integers and extract the low-order bits. */
11097#define INT_P(X) \
11098((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11099 && GET_MODE (X) == VOIDmode)
11100
11101#define INT_LOWPART(X) \
11102 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11103
11104int
a2369ed3 11105extract_MB (rtx op)
0ba1b2ff
AM
11106{
11107 int i;
11108 unsigned long val = INT_LOWPART (op);
11109
11110 /* If the high bit is zero, the value is the first 1 bit we find
11111 from the left. */
11112 if ((val & 0x80000000) == 0)
11113 {
37409796 11114 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11115
11116 i = 1;
11117 while (((val <<= 1) & 0x80000000) == 0)
11118 ++i;
11119 return i;
11120 }
11121
11122 /* If the high bit is set and the low bit is not, or the mask is all
11123 1's, the value is zero. */
11124 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11125 return 0;
11126
11127 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11128 from the right. */
11129 i = 31;
11130 while (((val >>= 1) & 1) != 0)
11131 --i;
11132
11133 return i;
11134}
11135
11136int
a2369ed3 11137extract_ME (rtx op)
0ba1b2ff
AM
11138{
11139 int i;
11140 unsigned long val = INT_LOWPART (op);
11141
11142 /* If the low bit is zero, the value is the first 1 bit we find from
11143 the right. */
11144 if ((val & 1) == 0)
11145 {
37409796 11146 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11147
11148 i = 30;
11149 while (((val >>= 1) & 1) == 0)
11150 --i;
11151
11152 return i;
11153 }
11154
11155 /* If the low bit is set and the high bit is not, or the mask is all
11156 1's, the value is 31. */
11157 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11158 return 31;
11159
11160 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11161 from the left. */
11162 i = 0;
11163 while (((val <<= 1) & 0x80000000) != 0)
11164 ++i;
11165
11166 return i;
11167}
11168
c4501e62
JJ
11169/* Locate some local-dynamic symbol still in use by this function
11170 so that we can print its name in some tls_ld pattern. */
11171
11172static const char *
863d938c 11173rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11174{
11175 rtx insn;
11176
11177 if (cfun->machine->some_ld_name)
11178 return cfun->machine->some_ld_name;
11179
11180 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11181 if (INSN_P (insn)
11182 && for_each_rtx (&PATTERN (insn),
11183 rs6000_get_some_local_dynamic_name_1, 0))
11184 return cfun->machine->some_ld_name;
11185
37409796 11186 gcc_unreachable ();
c4501e62
JJ
11187}
11188
11189/* Helper function for rs6000_get_some_local_dynamic_name. */
11190
11191static int
a2369ed3 11192rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11193{
11194 rtx x = *px;
11195
11196 if (GET_CODE (x) == SYMBOL_REF)
11197 {
11198 const char *str = XSTR (x, 0);
11199 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11200 {
11201 cfun->machine->some_ld_name = str;
11202 return 1;
11203 }
11204 }
11205
11206 return 0;
11207}
11208
85b776df
AM
11209/* Write out a function code label. */
11210
11211void
11212rs6000_output_function_entry (FILE *file, const char *fname)
11213{
11214 if (fname[0] != '.')
11215 {
11216 switch (DEFAULT_ABI)
11217 {
11218 default:
37409796 11219 gcc_unreachable ();
85b776df
AM
11220
11221 case ABI_AIX:
11222 if (DOT_SYMBOLS)
11223 putc ('.', file);
11224 else
11225 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11226 break;
11227
11228 case ABI_V4:
11229 case ABI_DARWIN:
11230 break;
11231 }
11232 }
11233 if (TARGET_AIX)
11234 RS6000_OUTPUT_BASENAME (file, fname);
11235 else
11236 assemble_name (file, fname);
11237}
11238
9878760c
RK
11239/* Print an operand. Recognize special options, documented below. */
11240
38c1f2d7 11241#if TARGET_ELF
d9407988 11242#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11243#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11244#else
11245#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11246#define SMALL_DATA_REG 0
ba5e43aa
MM
11247#endif
11248
9878760c 11249void
a2369ed3 11250print_operand (FILE *file, rtx x, int code)
9878760c
RK
11251{
11252 int i;
a260abc9 11253 HOST_WIDE_INT val;
0ba1b2ff 11254 unsigned HOST_WIDE_INT uval;
9878760c
RK
11255
11256 switch (code)
11257 {
a8b3aeda 11258 case '.':
a85d226b
RK
11259 /* Write out an instruction after the call which may be replaced
11260 with glue code by the loader. This depends on the AIX version. */
11261 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11262 return;
11263
81eace42
GK
11264 /* %a is output_address. */
11265
9854d9ed
RK
11266 case 'A':
11267 /* If X is a constant integer whose low-order 5 bits are zero,
11268 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11269 in the AIX assembler where "sri" with a zero shift count
20e26713 11270 writes a trash instruction. */
9854d9ed 11271 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11272 putc ('l', file);
9854d9ed 11273 else
76229ac8 11274 putc ('r', file);
9854d9ed
RK
11275 return;
11276
11277 case 'b':
e2c953b6
DE
11278 /* If constant, low-order 16 bits of constant, unsigned.
11279 Otherwise, write normally. */
11280 if (INT_P (x))
11281 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11282 else
11283 print_operand (file, x, 0);
cad12a8d
RK
11284 return;
11285
a260abc9
DE
11286 case 'B':
11287 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11288 for 64-bit mask direction. */
9390387d 11289 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11290 return;
a260abc9 11291
81eace42
GK
11292 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11293 output_operand. */
11294
423c1189
AH
11295 case 'c':
11296 /* X is a CR register. Print the number of the GT bit of the CR. */
11297 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11298 output_operand_lossage ("invalid %%E value");
11299 else
11300 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11301 return;
11302
11303 case 'D':
cef6b86c 11304 /* Like 'J' but get to the GT bit only. */
37409796 11305 gcc_assert (GET_CODE (x) == REG);
423c1189 11306
cef6b86c
EB
11307 /* Bit 1 is GT bit. */
11308 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11309
cef6b86c
EB
11310 /* Add one for shift count in rlinm for scc. */
11311 fprintf (file, "%d", i + 1);
423c1189
AH
11312 return;
11313
9854d9ed 11314 case 'E':
39a10a29 11315 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11316 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11317 output_operand_lossage ("invalid %%E value");
78fbdbf7 11318 else
39a10a29 11319 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11320 return;
9854d9ed
RK
11321
11322 case 'f':
11323 /* X is a CR register. Print the shift count needed to move it
11324 to the high-order four bits. */
11325 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11326 output_operand_lossage ("invalid %%f value");
11327 else
9ebbca7d 11328 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11329 return;
11330
11331 case 'F':
11332 /* Similar, but print the count for the rotate in the opposite
11333 direction. */
11334 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11335 output_operand_lossage ("invalid %%F value");
11336 else
9ebbca7d 11337 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11338 return;
11339
11340 case 'G':
11341 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11342 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11343 if (GET_CODE (x) != CONST_INT)
11344 output_operand_lossage ("invalid %%G value");
11345 else if (INTVAL (x) >= 0)
76229ac8 11346 putc ('z', file);
9854d9ed 11347 else
76229ac8 11348 putc ('m', file);
9854d9ed 11349 return;
e2c953b6 11350
9878760c 11351 case 'h':
a4f6c312
SS
11352 /* If constant, output low-order five bits. Otherwise, write
11353 normally. */
9878760c 11354 if (INT_P (x))
5f59ecb7 11355 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11356 else
11357 print_operand (file, x, 0);
11358 return;
11359
64305719 11360 case 'H':
a4f6c312
SS
11361 /* If constant, output low-order six bits. Otherwise, write
11362 normally. */
64305719 11363 if (INT_P (x))
5f59ecb7 11364 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11365 else
11366 print_operand (file, x, 0);
11367 return;
11368
9854d9ed
RK
11369 case 'I':
11370 /* Print `i' if this is a constant, else nothing. */
9878760c 11371 if (INT_P (x))
76229ac8 11372 putc ('i', file);
9878760c
RK
11373 return;
11374
9854d9ed
RK
11375 case 'j':
11376 /* Write the bit number in CCR for jump. */
11377 i = ccr_bit (x, 0);
11378 if (i == -1)
11379 output_operand_lossage ("invalid %%j code");
9878760c 11380 else
9854d9ed 11381 fprintf (file, "%d", i);
9878760c
RK
11382 return;
11383
9854d9ed
RK
11384 case 'J':
11385 /* Similar, but add one for shift count in rlinm for scc and pass
11386 scc flag to `ccr_bit'. */
11387 i = ccr_bit (x, 1);
11388 if (i == -1)
11389 output_operand_lossage ("invalid %%J code");
11390 else
a0466a68
RK
11391 /* If we want bit 31, write a shift count of zero, not 32. */
11392 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11393 return;
11394
9854d9ed
RK
11395 case 'k':
11396 /* X must be a constant. Write the 1's complement of the
11397 constant. */
9878760c 11398 if (! INT_P (x))
9854d9ed 11399 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11400 else
11401 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11402 return;
11403
81eace42 11404 case 'K':
9ebbca7d
GK
11405 /* X must be a symbolic constant on ELF. Write an
11406 expression suitable for an 'addi' that adds in the low 16
11407 bits of the MEM. */
11408 if (GET_CODE (x) != CONST)
11409 {
11410 print_operand_address (file, x);
11411 fputs ("@l", file);
11412 }
11413 else
11414 {
11415 if (GET_CODE (XEXP (x, 0)) != PLUS
11416 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11417 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11418 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11419 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11420 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11421 fputs ("@l", file);
ed8d2920
MM
11422 /* For GNU as, there must be a non-alphanumeric character
11423 between 'l' and the number. The '-' is added by
11424 print_operand() already. */
11425 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11426 fputs ("+", file);
9ebbca7d
GK
11427 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11428 }
81eace42
GK
11429 return;
11430
11431 /* %l is output_asm_label. */
9ebbca7d 11432
9854d9ed
RK
11433 case 'L':
11434 /* Write second word of DImode or DFmode reference. Works on register
11435 or non-indexed memory only. */
11436 if (GET_CODE (x) == REG)
fb5c67a7 11437 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11438 else if (GET_CODE (x) == MEM)
11439 {
11440 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11441 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11442 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11443 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11444 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11445 UNITS_PER_WORD));
6fb5fa3c
DB
11446 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11447 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11448 UNITS_PER_WORD));
9854d9ed 11449 else
d7624dc0
RK
11450 output_address (XEXP (adjust_address_nv (x, SImode,
11451 UNITS_PER_WORD),
11452 0));
ed8908e7 11453
ba5e43aa 11454 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11455 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11456 reg_names[SMALL_DATA_REG]);
9854d9ed 11457 }
9878760c 11458 return;
f676971a 11459
9878760c
RK
11460 case 'm':
11461 /* MB value for a mask operand. */
b1765bde 11462 if (! mask_operand (x, SImode))
9878760c
RK
11463 output_operand_lossage ("invalid %%m value");
11464
0ba1b2ff 11465 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11466 return;
11467
11468 case 'M':
11469 /* ME value for a mask operand. */
b1765bde 11470 if (! mask_operand (x, SImode))
a260abc9 11471 output_operand_lossage ("invalid %%M value");
9878760c 11472
0ba1b2ff 11473 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11474 return;
11475
81eace42
GK
11476 /* %n outputs the negative of its operand. */
11477
9878760c
RK
11478 case 'N':
11479 /* Write the number of elements in the vector times 4. */
11480 if (GET_CODE (x) != PARALLEL)
11481 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11482 else
11483 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11484 return;
11485
11486 case 'O':
11487 /* Similar, but subtract 1 first. */
11488 if (GET_CODE (x) != PARALLEL)
1427100a 11489 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11490 else
11491 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11492 return;
11493
9854d9ed
RK
11494 case 'p':
11495 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11496 if (! INT_P (x)
2bfcf297 11497 || INT_LOWPART (x) < 0
9854d9ed
RK
11498 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11499 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11500 else
11501 fprintf (file, "%d", i);
9854d9ed
RK
11502 return;
11503
9878760c
RK
11504 case 'P':
11505 /* The operand must be an indirect memory reference. The result
8bb418a3 11506 is the register name. */
9878760c
RK
11507 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11508 || REGNO (XEXP (x, 0)) >= 32)
11509 output_operand_lossage ("invalid %%P value");
e2c953b6 11510 else
fb5c67a7 11511 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11512 return;
11513
dfbdccdb
GK
11514 case 'q':
11515 /* This outputs the logical code corresponding to a boolean
11516 expression. The expression may have one or both operands
39a10a29 11517 negated (if one, only the first one). For condition register
c4ad648e
AM
11518 logical operations, it will also treat the negated
11519 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11520 {
63bc1d05 11521 const char *const *t = 0;
dfbdccdb
GK
11522 const char *s;
11523 enum rtx_code code = GET_CODE (x);
11524 static const char * const tbl[3][3] = {
11525 { "and", "andc", "nor" },
11526 { "or", "orc", "nand" },
11527 { "xor", "eqv", "xor" } };
11528
11529 if (code == AND)
11530 t = tbl[0];
11531 else if (code == IOR)
11532 t = tbl[1];
11533 else if (code == XOR)
11534 t = tbl[2];
11535 else
11536 output_operand_lossage ("invalid %%q value");
11537
11538 if (GET_CODE (XEXP (x, 0)) != NOT)
11539 s = t[0];
11540 else
11541 {
11542 if (GET_CODE (XEXP (x, 1)) == NOT)
11543 s = t[2];
11544 else
11545 s = t[1];
11546 }
f676971a 11547
dfbdccdb
GK
11548 fputs (s, file);
11549 }
11550 return;
11551
2c4a9cff
DE
11552 case 'Q':
11553 if (TARGET_MFCRF)
3b6ce0af 11554 fputc (',', file);
5efb1046 11555 /* FALLTHRU */
2c4a9cff
DE
11556 else
11557 return;
11558
9854d9ed
RK
11559 case 'R':
11560 /* X is a CR register. Print the mask for `mtcrf'. */
11561 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11562 output_operand_lossage ("invalid %%R value");
11563 else
9ebbca7d 11564 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11565 return;
9854d9ed
RK
11566
11567 case 's':
11568 /* Low 5 bits of 32 - value */
11569 if (! INT_P (x))
11570 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11571 else
11572 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11573 return;
9854d9ed 11574
a260abc9 11575 case 'S':
0ba1b2ff 11576 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11577 CONST_INT 32-bit mask is considered sign-extended so any
11578 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11579 if (! mask64_operand (x, DImode))
a260abc9
DE
11580 output_operand_lossage ("invalid %%S value");
11581
0ba1b2ff 11582 uval = INT_LOWPART (x);
a260abc9 11583
0ba1b2ff 11584 if (uval & 1) /* Clear Left */
a260abc9 11585 {
f099d360
GK
11586#if HOST_BITS_PER_WIDE_INT > 64
11587 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11588#endif
0ba1b2ff 11589 i = 64;
a260abc9 11590 }
0ba1b2ff 11591 else /* Clear Right */
a260abc9 11592 {
0ba1b2ff 11593 uval = ~uval;
f099d360
GK
11594#if HOST_BITS_PER_WIDE_INT > 64
11595 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11596#endif
0ba1b2ff 11597 i = 63;
a260abc9 11598 }
0ba1b2ff
AM
11599 while (uval != 0)
11600 --i, uval >>= 1;
37409796 11601 gcc_assert (i >= 0);
0ba1b2ff
AM
11602 fprintf (file, "%d", i);
11603 return;
a260abc9 11604
a3170dc6
AH
11605 case 't':
11606 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11607 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11608
11609 /* Bit 3 is OV bit. */
11610 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11611
11612 /* If we want bit 31, write a shift count of zero, not 32. */
11613 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11614 return;
11615
cccf3bdc
DE
11616 case 'T':
11617 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11618 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11619 && REGNO (x) != CTR_REGNO))
cccf3bdc 11620 output_operand_lossage ("invalid %%T value");
1de43f85 11621 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11622 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11623 else
11624 fputs ("ctr", file);
11625 return;
11626
9854d9ed 11627 case 'u':
802a0058 11628 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11629 if (! INT_P (x))
11630 output_operand_lossage ("invalid %%u value");
e2c953b6 11631 else
f676971a 11632 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11633 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11634 return;
11635
802a0058
MM
11636 case 'v':
11637 /* High-order 16 bits of constant for use in signed operand. */
11638 if (! INT_P (x))
11639 output_operand_lossage ("invalid %%v value");
e2c953b6 11640 else
134c32f6
DE
11641 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11642 (INT_LOWPART (x) >> 16) & 0xffff);
11643 return;
802a0058 11644
9854d9ed
RK
11645 case 'U':
11646 /* Print `u' if this has an auto-increment or auto-decrement. */
11647 if (GET_CODE (x) == MEM
11648 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11649 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11650 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11651 putc ('u', file);
9854d9ed 11652 return;
9878760c 11653
e0cd0770
JC
11654 case 'V':
11655 /* Print the trap code for this operand. */
11656 switch (GET_CODE (x))
11657 {
11658 case EQ:
11659 fputs ("eq", file); /* 4 */
11660 break;
11661 case NE:
11662 fputs ("ne", file); /* 24 */
11663 break;
11664 case LT:
11665 fputs ("lt", file); /* 16 */
11666 break;
11667 case LE:
11668 fputs ("le", file); /* 20 */
11669 break;
11670 case GT:
11671 fputs ("gt", file); /* 8 */
11672 break;
11673 case GE:
11674 fputs ("ge", file); /* 12 */
11675 break;
11676 case LTU:
11677 fputs ("llt", file); /* 2 */
11678 break;
11679 case LEU:
11680 fputs ("lle", file); /* 6 */
11681 break;
11682 case GTU:
11683 fputs ("lgt", file); /* 1 */
11684 break;
11685 case GEU:
11686 fputs ("lge", file); /* 5 */
11687 break;
11688 default:
37409796 11689 gcc_unreachable ();
e0cd0770
JC
11690 }
11691 break;
11692
9854d9ed
RK
11693 case 'w':
11694 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11695 normally. */
11696 if (INT_P (x))
f676971a 11697 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11698 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11699 else
11700 print_operand (file, x, 0);
9878760c
RK
11701 return;
11702
9854d9ed 11703 case 'W':
e2c953b6 11704 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11705 val = (GET_CODE (x) == CONST_INT
11706 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11707
11708 if (val < 0)
11709 i = -1;
9854d9ed 11710 else
e2c953b6
DE
11711 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11712 if ((val <<= 1) < 0)
11713 break;
11714
11715#if HOST_BITS_PER_WIDE_INT == 32
11716 if (GET_CODE (x) == CONST_INT && i >= 0)
11717 i += 32; /* zero-extend high-part was all 0's */
11718 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11719 {
11720 val = CONST_DOUBLE_LOW (x);
11721
37409796
NS
11722 gcc_assert (val);
11723 if (val < 0)
e2c953b6
DE
11724 --i;
11725 else
11726 for ( ; i < 64; i++)
11727 if ((val <<= 1) < 0)
11728 break;
11729 }
11730#endif
11731
11732 fprintf (file, "%d", i + 1);
9854d9ed 11733 return;
9878760c 11734
9854d9ed
RK
11735 case 'X':
11736 if (GET_CODE (x) == MEM
6fb5fa3c
DB
11737 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
11738 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
11739 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 11740 putc ('x', file);
9854d9ed 11741 return;
9878760c 11742
9854d9ed
RK
11743 case 'Y':
11744 /* Like 'L', for third word of TImode */
11745 if (GET_CODE (x) == REG)
fb5c67a7 11746 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11747 else if (GET_CODE (x) == MEM)
9878760c 11748 {
9854d9ed
RK
11749 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11750 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11751 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
11752 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11753 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11754 else
d7624dc0 11755 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11756 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11757 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11758 reg_names[SMALL_DATA_REG]);
9878760c
RK
11759 }
11760 return;
f676971a 11761
9878760c 11762 case 'z':
b4ac57ab
RS
11763 /* X is a SYMBOL_REF. Write out the name preceded by a
11764 period and without any trailing data in brackets. Used for function
4d30c363
MM
11765 names. If we are configured for System V (or the embedded ABI) on
11766 the PowerPC, do not emit the period, since those systems do not use
11767 TOCs and the like. */
37409796 11768 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11769
c4ad648e
AM
11770 /* Mark the decl as referenced so that cgraph will output the
11771 function. */
9bf6462a 11772 if (SYMBOL_REF_DECL (x))
c4ad648e 11773 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11774
85b776df 11775 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11776 if (TARGET_MACHO)
11777 {
11778 const char *name = XSTR (x, 0);
a031e781 11779#if TARGET_MACHO
3b48085e 11780 if (MACHOPIC_INDIRECT
11abc112
MM
11781 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11782 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11783#endif
11784 assemble_name (file, name);
11785 }
85b776df 11786 else if (!DOT_SYMBOLS)
9739c90c 11787 assemble_name (file, XSTR (x, 0));
85b776df
AM
11788 else
11789 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11790 return;
11791
9854d9ed
RK
11792 case 'Z':
11793 /* Like 'L', for last word of TImode. */
11794 if (GET_CODE (x) == REG)
fb5c67a7 11795 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11796 else if (GET_CODE (x) == MEM)
11797 {
11798 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11799 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11800 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
11801 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11802 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11803 else
d7624dc0 11804 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11805 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11806 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11807 reg_names[SMALL_DATA_REG]);
9854d9ed 11808 }
5c23c401 11809 return;
0ac081f6 11810
a3170dc6 11811 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11812 case 'y':
11813 {
11814 rtx tmp;
11815
37409796 11816 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11817
11818 tmp = XEXP (x, 0);
11819
90d3ff1c 11820 /* Ugly hack because %y is overloaded. */
8ef65e3d 11821 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11822 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11823 || GET_MODE (x) == TFmode
11824 || GET_MODE (x) == TImode))
a3170dc6
AH
11825 {
11826 /* Handle [reg]. */
11827 if (GET_CODE (tmp) == REG)
11828 {
11829 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11830 break;
11831 }
11832 /* Handle [reg+UIMM]. */
11833 else if (GET_CODE (tmp) == PLUS &&
11834 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11835 {
11836 int x;
11837
37409796 11838 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11839
11840 x = INTVAL (XEXP (tmp, 1));
11841 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11842 break;
11843 }
11844
11845 /* Fall through. Must be [reg+reg]. */
11846 }
850e8d3d
DN
11847 if (TARGET_ALTIVEC
11848 && GET_CODE (tmp) == AND
11849 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11850 && INTVAL (XEXP (tmp, 1)) == -16)
11851 tmp = XEXP (tmp, 0);
0ac081f6 11852 if (GET_CODE (tmp) == REG)
c62f2db5 11853 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11854 else
0ac081f6 11855 {
37409796 11856 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11857 && REG_P (XEXP (tmp, 0))
11858 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11859
0ac081f6
AH
11860 if (REGNO (XEXP (tmp, 0)) == 0)
11861 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11862 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11863 else
11864 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11865 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11866 }
0ac081f6
AH
11867 break;
11868 }
f676971a 11869
9878760c
RK
11870 case 0:
11871 if (GET_CODE (x) == REG)
11872 fprintf (file, "%s", reg_names[REGNO (x)]);
11873 else if (GET_CODE (x) == MEM)
11874 {
11875 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11876 know the width from the mode. */
11877 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
11878 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
11879 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 11880 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
11881 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
11882 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
11883 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11884 output_address (XEXP (XEXP (x, 0), 1));
9878760c 11885 else
a54d04b7 11886 output_address (XEXP (x, 0));
9878760c
RK
11887 }
11888 else
a54d04b7 11889 output_addr_const (file, x);
a85d226b 11890 return;
9878760c 11891
c4501e62
JJ
11892 case '&':
11893 assemble_name (file, rs6000_get_some_local_dynamic_name ());
11894 return;
11895
9878760c
RK
11896 default:
11897 output_operand_lossage ("invalid %%xn code");
11898 }
11899}
11900\f
11901/* Print the address of an operand. */
11902
11903void
a2369ed3 11904print_operand_address (FILE *file, rtx x)
9878760c
RK
11905{
11906 if (GET_CODE (x) == REG)
4697a36c 11907 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
11908 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
11909 || GET_CODE (x) == LABEL_REF)
9878760c
RK
11910 {
11911 output_addr_const (file, x);
ba5e43aa 11912 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11913 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11914 reg_names[SMALL_DATA_REG]);
37409796
NS
11915 else
11916 gcc_assert (!TARGET_TOC);
9878760c
RK
11917 }
11918 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
11919 {
9024f4b8 11920 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 11921 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
11922 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
11923 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 11924 else
4697a36c
MM
11925 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
11926 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
11927 }
11928 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
11929 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
11930 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
11931#if TARGET_ELF
11932 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11933 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
11934 {
11935 output_addr_const (file, XEXP (x, 1));
11936 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11937 }
c859cda6
DJ
11938#endif
11939#if TARGET_MACHO
11940 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11941 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
11942 {
11943 fprintf (file, "lo16(");
11944 output_addr_const (file, XEXP (x, 1));
11945 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11946 }
3cb999d8 11947#endif
4d588c14 11948 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 11949 {
2bfcf297 11950 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 11951 {
2bfcf297
DB
11952 rtx contains_minus = XEXP (x, 1);
11953 rtx minus, symref;
11954 const char *name;
f676971a 11955
9ebbca7d 11956 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 11957 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
11958 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11959 contains_minus = XEXP (contains_minus, 0);
11960
2bfcf297
DB
11961 minus = XEXP (contains_minus, 0);
11962 symref = XEXP (minus, 0);
11963 XEXP (contains_minus, 0) = symref;
11964 if (TARGET_ELF)
11965 {
11966 char *newname;
11967
11968 name = XSTR (symref, 0);
11969 newname = alloca (strlen (name) + sizeof ("@toc"));
11970 strcpy (newname, name);
11971 strcat (newname, "@toc");
11972 XSTR (symref, 0) = newname;
11973 }
11974 output_addr_const (file, XEXP (x, 1));
11975 if (TARGET_ELF)
11976 XSTR (symref, 0) = name;
9ebbca7d
GK
11977 XEXP (contains_minus, 0) = minus;
11978 }
11979 else
11980 output_addr_const (file, XEXP (x, 1));
11981
11982 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11983 }
9878760c 11984 else
37409796 11985 gcc_unreachable ();
9878760c
RK
11986}
11987\f
88cad84b 11988/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
11989 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11990 is defined. It also needs to handle DI-mode objects on 64-bit
11991 targets. */
11992
11993static bool
a2369ed3 11994rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 11995{
f4f4921e 11996#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 11997 /* Special handling for SI values. */
84dcde01 11998 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 11999 {
301d03af 12000 static int recurse = 0;
f676971a 12001
301d03af
RS
12002 /* For -mrelocatable, we mark all addresses that need to be fixed up
12003 in the .fixup section. */
12004 if (TARGET_RELOCATABLE
d6b5193b
RS
12005 && in_section != toc_section
12006 && in_section != text_section
4325ca90 12007 && !unlikely_text_section_p (in_section)
301d03af
RS
12008 && !recurse
12009 && GET_CODE (x) != CONST_INT
12010 && GET_CODE (x) != CONST_DOUBLE
12011 && CONSTANT_P (x))
12012 {
12013 char buf[256];
12014
12015 recurse = 1;
12016 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12017 fixuplabelno++;
12018 ASM_OUTPUT_LABEL (asm_out_file, buf);
12019 fprintf (asm_out_file, "\t.long\t(");
12020 output_addr_const (asm_out_file, x);
12021 fprintf (asm_out_file, ")@fixup\n");
12022 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12023 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12024 fprintf (asm_out_file, "\t.long\t");
12025 assemble_name (asm_out_file, buf);
12026 fprintf (asm_out_file, "\n\t.previous\n");
12027 recurse = 0;
12028 return true;
12029 }
12030 /* Remove initial .'s to turn a -mcall-aixdesc function
12031 address into the address of the descriptor, not the function
12032 itself. */
12033 else if (GET_CODE (x) == SYMBOL_REF
12034 && XSTR (x, 0)[0] == '.'
12035 && DEFAULT_ABI == ABI_AIX)
12036 {
12037 const char *name = XSTR (x, 0);
12038 while (*name == '.')
12039 name++;
12040
12041 fprintf (asm_out_file, "\t.long\t%s\n", name);
12042 return true;
12043 }
12044 }
f4f4921e 12045#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12046 return default_assemble_integer (x, size, aligned_p);
12047}
93638d7a
AM
12048
12049#ifdef HAVE_GAS_HIDDEN
12050/* Emit an assembler directive to set symbol visibility for DECL to
12051 VISIBILITY_TYPE. */
12052
5add3202 12053static void
a2369ed3 12054rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12055{
93638d7a
AM
12056 /* Functions need to have their entry point symbol visibility set as
12057 well as their descriptor symbol visibility. */
85b776df
AM
12058 if (DEFAULT_ABI == ABI_AIX
12059 && DOT_SYMBOLS
12060 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12061 {
25fdb4dc 12062 static const char * const visibility_types[] = {
c4ad648e 12063 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12064 };
12065
12066 const char *name, *type;
93638d7a
AM
12067
12068 name = ((* targetm.strip_name_encoding)
12069 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12070 type = visibility_types[vis];
93638d7a 12071
25fdb4dc
RH
12072 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12073 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12074 }
25fdb4dc
RH
12075 else
12076 default_assemble_visibility (decl, vis);
93638d7a
AM
12077}
12078#endif
301d03af 12079\f
39a10a29 12080enum rtx_code
a2369ed3 12081rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12082{
12083 /* Reversal of FP compares takes care -- an ordered compare
12084 becomes an unordered compare and vice versa. */
f676971a 12085 if (mode == CCFPmode
bc9ec0e0
GK
12086 && (!flag_finite_math_only
12087 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12088 || code == UNEQ || code == LTGT))
bab6226b 12089 return reverse_condition_maybe_unordered (code);
39a10a29 12090 else
bab6226b 12091 return reverse_condition (code);
39a10a29
GK
12092}
12093
39a10a29
GK
12094/* Generate a compare for CODE. Return a brand-new rtx that
12095 represents the result of the compare. */
a4f6c312 12096
39a10a29 12097static rtx
a2369ed3 12098rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12099{
12100 enum machine_mode comp_mode;
12101 rtx compare_result;
12102
12103 if (rs6000_compare_fp_p)
12104 comp_mode = CCFPmode;
12105 else if (code == GTU || code == LTU
c4ad648e 12106 || code == GEU || code == LEU)
39a10a29 12107 comp_mode = CCUNSmode;
60934f9c
NS
12108 else if ((code == EQ || code == NE)
12109 && GET_CODE (rs6000_compare_op0) == SUBREG
12110 && GET_CODE (rs6000_compare_op1) == SUBREG
12111 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12112 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12113 /* These are unsigned values, perhaps there will be a later
12114 ordering compare that can be shared with this one.
12115 Unfortunately we cannot detect the signedness of the operands
12116 for non-subregs. */
12117 comp_mode = CCUNSmode;
39a10a29
GK
12118 else
12119 comp_mode = CCmode;
12120
12121 /* First, the compare. */
12122 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12123
cef6b86c 12124 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12125 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12126 && rs6000_compare_fp_p)
a3170dc6 12127 {
64022b5d 12128 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12129 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12130
12131 if (op_mode == VOIDmode)
12132 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12133
cef6b86c
EB
12134 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12135 This explains the following mess. */
423c1189 12136
a3170dc6
AH
12137 switch (code)
12138 {
423c1189 12139 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12140 switch (op_mode)
12141 {
12142 case SFmode:
12143 cmp = flag_unsafe_math_optimizations
12144 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12145 rs6000_compare_op1)
12146 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12147 rs6000_compare_op1);
12148 break;
12149
12150 case DFmode:
12151 cmp = flag_unsafe_math_optimizations
12152 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12153 rs6000_compare_op1)
12154 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12155 rs6000_compare_op1);
12156 break;
12157
17caeff2
JM
12158 case TFmode:
12159 cmp = flag_unsafe_math_optimizations
12160 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12161 rs6000_compare_op1)
12162 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12163 rs6000_compare_op1);
12164 break;
12165
37409796
NS
12166 default:
12167 gcc_unreachable ();
12168 }
a3170dc6 12169 break;
bb8df8a6 12170
423c1189 12171 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12172 switch (op_mode)
12173 {
12174 case SFmode:
12175 cmp = flag_unsafe_math_optimizations
12176 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12177 rs6000_compare_op1)
12178 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12179 rs6000_compare_op1);
12180 break;
bb8df8a6 12181
37409796
NS
12182 case DFmode:
12183 cmp = flag_unsafe_math_optimizations
12184 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12185 rs6000_compare_op1)
12186 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12187 rs6000_compare_op1);
12188 break;
12189
17caeff2
JM
12190 case TFmode:
12191 cmp = flag_unsafe_math_optimizations
12192 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12193 rs6000_compare_op1)
12194 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12195 rs6000_compare_op1);
12196 break;
12197
37409796
NS
12198 default:
12199 gcc_unreachable ();
12200 }
a3170dc6 12201 break;
bb8df8a6 12202
423c1189 12203 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12204 switch (op_mode)
12205 {
12206 case SFmode:
12207 cmp = flag_unsafe_math_optimizations
12208 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12209 rs6000_compare_op1)
12210 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12211 rs6000_compare_op1);
12212 break;
bb8df8a6 12213
37409796
NS
12214 case DFmode:
12215 cmp = flag_unsafe_math_optimizations
12216 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12217 rs6000_compare_op1)
12218 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12219 rs6000_compare_op1);
12220 break;
12221
17caeff2
JM
12222 case TFmode:
12223 cmp = flag_unsafe_math_optimizations
12224 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12225 rs6000_compare_op1)
12226 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12227 rs6000_compare_op1);
12228 break;
12229
37409796
NS
12230 default:
12231 gcc_unreachable ();
12232 }
a3170dc6 12233 break;
4d4cbc0e 12234 default:
37409796 12235 gcc_unreachable ();
a3170dc6
AH
12236 }
12237
12238 /* Synthesize LE and GE from LT/GT || EQ. */
12239 if (code == LE || code == GE || code == LEU || code == GEU)
12240 {
a3170dc6
AH
12241 emit_insn (cmp);
12242
12243 switch (code)
12244 {
12245 case LE: code = LT; break;
12246 case GE: code = GT; break;
12247 case LEU: code = LT; break;
12248 case GEU: code = GT; break;
37409796 12249 default: gcc_unreachable ();
a3170dc6
AH
12250 }
12251
a3170dc6
AH
12252 compare_result2 = gen_reg_rtx (CCFPmode);
12253
12254 /* Do the EQ. */
37409796
NS
12255 switch (op_mode)
12256 {
12257 case SFmode:
12258 cmp = flag_unsafe_math_optimizations
12259 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12260 rs6000_compare_op1)
12261 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12262 rs6000_compare_op1);
12263 break;
12264
12265 case DFmode:
12266 cmp = flag_unsafe_math_optimizations
12267 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12268 rs6000_compare_op1)
12269 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12270 rs6000_compare_op1);
12271 break;
12272
17caeff2
JM
12273 case TFmode:
12274 cmp = flag_unsafe_math_optimizations
12275 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12276 rs6000_compare_op1)
12277 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12278 rs6000_compare_op1);
12279 break;
12280
37409796
NS
12281 default:
12282 gcc_unreachable ();
12283 }
a3170dc6
AH
12284 emit_insn (cmp);
12285
a3170dc6 12286 /* OR them together. */
64022b5d
AH
12287 or_result = gen_reg_rtx (CCFPmode);
12288 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12289 compare_result2);
a3170dc6
AH
12290 compare_result = or_result;
12291 code = EQ;
12292 }
12293 else
12294 {
a3170dc6 12295 if (code == NE || code == LTGT)
a3170dc6 12296 code = NE;
423c1189
AH
12297 else
12298 code = EQ;
a3170dc6
AH
12299 }
12300
12301 emit_insn (cmp);
12302 }
12303 else
de17c25f
DE
12304 {
12305 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12306 CLOBBERs to match cmptf_internal2 pattern. */
12307 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12308 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12309 && !TARGET_IEEEQUAD
de17c25f
DE
12310 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12311 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12312 gen_rtvec (9,
12313 gen_rtx_SET (VOIDmode,
12314 compare_result,
12315 gen_rtx_COMPARE (comp_mode,
12316 rs6000_compare_op0,
12317 rs6000_compare_op1)),
12318 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12319 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12320 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12321 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12322 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12323 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12324 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12325 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12326 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12327 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12328 {
12329 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12330 comp_mode = CCEQmode;
12331 compare_result = gen_reg_rtx (CCEQmode);
12332 if (TARGET_64BIT)
12333 emit_insn (gen_stack_protect_testdi (compare_result,
12334 rs6000_compare_op0, op1));
12335 else
12336 emit_insn (gen_stack_protect_testsi (compare_result,
12337 rs6000_compare_op0, op1));
12338 }
de17c25f
DE
12339 else
12340 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12341 gen_rtx_COMPARE (comp_mode,
12342 rs6000_compare_op0,
12343 rs6000_compare_op1)));
12344 }
f676971a 12345
ca5adc63 12346 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12347 under flag_finite_math_only we don't bother. */
39a10a29 12348 if (rs6000_compare_fp_p
e7108df9 12349 && !flag_finite_math_only
8ef65e3d 12350 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12351 && (code == LE || code == GE
12352 || code == UNEQ || code == LTGT
12353 || code == UNGT || code == UNLT))
12354 {
12355 enum rtx_code or1, or2;
12356 rtx or1_rtx, or2_rtx, compare2_rtx;
12357 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12358
39a10a29
GK
12359 switch (code)
12360 {
12361 case LE: or1 = LT; or2 = EQ; break;
12362 case GE: or1 = GT; or2 = EQ; break;
12363 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12364 case LTGT: or1 = LT; or2 = GT; break;
12365 case UNGT: or1 = UNORDERED; or2 = GT; break;
12366 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12367 default: gcc_unreachable ();
39a10a29
GK
12368 }
12369 validate_condition_mode (or1, comp_mode);
12370 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12371 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12372 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12373 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12374 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12375 const_true_rtx);
12376 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12377
12378 compare_result = or_result;
12379 code = EQ;
12380 }
12381
12382 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12383
1c563bed 12384 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12385}
12386
12387
12388/* Emit the RTL for an sCOND pattern. */
12389
12390void
a2369ed3 12391rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12392{
12393 rtx condition_rtx;
12394 enum machine_mode op_mode;
b7053a3f 12395 enum rtx_code cond_code;
39a10a29
GK
12396
12397 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12398 cond_code = GET_CODE (condition_rtx);
12399
8ef65e3d 12400 if (rs6000_compare_fp_p
423c1189
AH
12401 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12402 {
12403 rtx t;
12404
12405 PUT_MODE (condition_rtx, SImode);
12406 t = XEXP (condition_rtx, 0);
12407
37409796 12408 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12409
12410 if (cond_code == NE)
64022b5d 12411 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12412
64022b5d 12413 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12414 return;
12415 }
12416
b7053a3f
GK
12417 if (cond_code == NE
12418 || cond_code == GE || cond_code == LE
12419 || cond_code == GEU || cond_code == LEU
12420 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12421 {
12422 rtx not_result = gen_reg_rtx (CCEQmode);
12423 rtx not_op, rev_cond_rtx;
12424 enum machine_mode cc_mode;
f676971a 12425
b7053a3f
GK
12426 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12427
1c563bed 12428 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12429 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12430 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12431 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12432 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12433 }
39a10a29
GK
12434
12435 op_mode = GET_MODE (rs6000_compare_op0);
12436 if (op_mode == VOIDmode)
12437 op_mode = GET_MODE (rs6000_compare_op1);
12438
12439 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12440 {
12441 PUT_MODE (condition_rtx, DImode);
12442 convert_move (result, condition_rtx, 0);
12443 }
12444 else
12445 {
12446 PUT_MODE (condition_rtx, SImode);
12447 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12448 }
12449}
12450
39a10a29
GK
12451/* Emit a branch of kind CODE to location LOC. */
12452
12453void
a2369ed3 12454rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12455{
12456 rtx condition_rtx, loc_ref;
12457
12458 condition_rtx = rs6000_generate_compare (code);
12459 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12460 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12461 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12462 loc_ref, pc_rtx)));
12463}
12464
12a4e8c5
GK
12465/* Return the string to output a conditional branch to LABEL, which is
12466 the operand number of the label, or -1 if the branch is really a
f676971a 12467 conditional return.
12a4e8c5
GK
12468
12469 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12470 condition code register and its mode specifies what kind of
12471 comparison we made.
12472
a0ab749a 12473 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12474
12475 INSN is the insn. */
12476
12477char *
a2369ed3 12478output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12479{
12480 static char string[64];
12481 enum rtx_code code = GET_CODE (op);
12482 rtx cc_reg = XEXP (op, 0);
12483 enum machine_mode mode = GET_MODE (cc_reg);
12484 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12485 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12486 int really_reversed = reversed ^ need_longbranch;
12487 char *s = string;
12488 const char *ccode;
12489 const char *pred;
12490 rtx note;
12491
39a10a29
GK
12492 validate_condition_mode (code, mode);
12493
12494 /* Work out which way this really branches. We could use
12495 reverse_condition_maybe_unordered here always but this
12496 makes the resulting assembler clearer. */
12a4e8c5 12497 if (really_reversed)
de40e1df
DJ
12498 {
12499 /* Reversal of FP compares takes care -- an ordered compare
12500 becomes an unordered compare and vice versa. */
12501 if (mode == CCFPmode)
12502 code = reverse_condition_maybe_unordered (code);
12503 else
12504 code = reverse_condition (code);
12505 }
12a4e8c5 12506
8ef65e3d 12507 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12508 {
12509 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12510 to the GT bit. */
37409796
NS
12511 switch (code)
12512 {
12513 case EQ:
12514 /* Opposite of GT. */
12515 code = GT;
12516 break;
12517
12518 case NE:
12519 code = UNLE;
12520 break;
12521
12522 default:
12523 gcc_unreachable ();
12524 }
a3170dc6
AH
12525 }
12526
39a10a29 12527 switch (code)
12a4e8c5
GK
12528 {
12529 /* Not all of these are actually distinct opcodes, but
12530 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12531 case NE: case LTGT:
12532 ccode = "ne"; break;
12533 case EQ: case UNEQ:
12534 ccode = "eq"; break;
f676971a 12535 case GE: case GEU:
50a0b056 12536 ccode = "ge"; break;
f676971a 12537 case GT: case GTU: case UNGT:
50a0b056 12538 ccode = "gt"; break;
f676971a 12539 case LE: case LEU:
50a0b056 12540 ccode = "le"; break;
f676971a 12541 case LT: case LTU: case UNLT:
50a0b056 12542 ccode = "lt"; break;
12a4e8c5
GK
12543 case UNORDERED: ccode = "un"; break;
12544 case ORDERED: ccode = "nu"; break;
12545 case UNGE: ccode = "nl"; break;
12546 case UNLE: ccode = "ng"; break;
12547 default:
37409796 12548 gcc_unreachable ();
12a4e8c5 12549 }
f676971a
EC
12550
12551 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12552 The old mnemonics don't have a way to specify this information. */
f4857b9b 12553 pred = "";
12a4e8c5
GK
12554 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12555 if (note != NULL_RTX)
12556 {
12557 /* PROB is the difference from 50%. */
12558 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12559
12560 /* Only hint for highly probable/improbable branches on newer
12561 cpus as static prediction overrides processor dynamic
12562 prediction. For older cpus we may as well always hint, but
12563 assume not taken for branches that are very close to 50% as a
12564 mispredicted taken branch is more expensive than a
f676971a 12565 mispredicted not-taken branch. */
ec507f2d 12566 if (rs6000_always_hint
2c9e13f3
JH
12567 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12568 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12569 {
12570 if (abs (prob) > REG_BR_PROB_BASE / 20
12571 && ((prob > 0) ^ need_longbranch))
c4ad648e 12572 pred = "+";
f4857b9b
AM
12573 else
12574 pred = "-";
12575 }
12a4e8c5 12576 }
12a4e8c5
GK
12577
12578 if (label == NULL)
94a54f47 12579 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12580 else
94a54f47 12581 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12582
37c67319 12583 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12584 Assume they'd only be the first character.... */
37c67319
GK
12585 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12586 *s++ = '%';
94a54f47 12587 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12588
12589 if (label != NULL)
12590 {
12591 /* If the branch distance was too far, we may have to use an
12592 unconditional branch to go the distance. */
12593 if (need_longbranch)
44518ddd 12594 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12595 else
12596 s += sprintf (s, ",%s", label);
12597 }
12598
12599 return string;
12600}
50a0b056 12601
64022b5d 12602/* Return the string to flip the GT bit on a CR. */
423c1189 12603char *
64022b5d 12604output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12605{
12606 static char string[64];
12607 int a, b;
12608
37409796
NS
12609 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12610 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12611
64022b5d
AH
12612 /* GT bit. */
12613 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12614 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12615
12616 sprintf (string, "crnot %d,%d", a, b);
12617 return string;
12618}
12619
21213b4c
DP
12620/* Return insn index for the vector compare instruction for given CODE,
12621 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12622 not available. */
12623
12624static int
94ff898d 12625get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12626 enum machine_mode dest_mode,
12627 enum machine_mode op_mode)
12628{
12629 if (!TARGET_ALTIVEC)
12630 return INSN_NOT_AVAILABLE;
12631
12632 switch (code)
12633 {
12634 case EQ:
12635 if (dest_mode == V16QImode && op_mode == V16QImode)
12636 return UNSPEC_VCMPEQUB;
12637 if (dest_mode == V8HImode && op_mode == V8HImode)
12638 return UNSPEC_VCMPEQUH;
12639 if (dest_mode == V4SImode && op_mode == V4SImode)
12640 return UNSPEC_VCMPEQUW;
12641 if (dest_mode == V4SImode && op_mode == V4SFmode)
12642 return UNSPEC_VCMPEQFP;
12643 break;
12644 case GE:
12645 if (dest_mode == V4SImode && op_mode == V4SFmode)
12646 return UNSPEC_VCMPGEFP;
12647 case GT:
12648 if (dest_mode == V16QImode && op_mode == V16QImode)
12649 return UNSPEC_VCMPGTSB;
12650 if (dest_mode == V8HImode && op_mode == V8HImode)
12651 return UNSPEC_VCMPGTSH;
12652 if (dest_mode == V4SImode && op_mode == V4SImode)
12653 return UNSPEC_VCMPGTSW;
12654 if (dest_mode == V4SImode && op_mode == V4SFmode)
12655 return UNSPEC_VCMPGTFP;
12656 break;
12657 case GTU:
12658 if (dest_mode == V16QImode && op_mode == V16QImode)
12659 return UNSPEC_VCMPGTUB;
12660 if (dest_mode == V8HImode && op_mode == V8HImode)
12661 return UNSPEC_VCMPGTUH;
12662 if (dest_mode == V4SImode && op_mode == V4SImode)
12663 return UNSPEC_VCMPGTUW;
12664 break;
12665 default:
12666 break;
12667 }
12668 return INSN_NOT_AVAILABLE;
12669}
12670
12671/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12672 DMODE is expected destination mode. This is a recursive function. */
12673
12674static rtx
12675rs6000_emit_vector_compare (enum rtx_code rcode,
12676 rtx op0, rtx op1,
12677 enum machine_mode dmode)
12678{
12679 int vec_cmp_insn;
12680 rtx mask;
12681 enum machine_mode dest_mode;
12682 enum machine_mode op_mode = GET_MODE (op1);
12683
37409796
NS
12684 gcc_assert (TARGET_ALTIVEC);
12685 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12686
12687 /* Floating point vector compare instructions uses destination V4SImode.
12688 Move destination to appropriate mode later. */
12689 if (dmode == V4SFmode)
12690 dest_mode = V4SImode;
12691 else
12692 dest_mode = dmode;
12693
12694 mask = gen_reg_rtx (dest_mode);
12695 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12696
12697 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12698 {
12699 bool swap_operands = false;
12700 bool try_again = false;
12701 switch (rcode)
12702 {
12703 case LT:
12704 rcode = GT;
12705 swap_operands = true;
12706 try_again = true;
12707 break;
12708 case LTU:
12709 rcode = GTU;
12710 swap_operands = true;
12711 try_again = true;
12712 break;
12713 case NE:
370df7db
JC
12714 case UNLE:
12715 case UNLT:
12716 case UNGE:
12717 case UNGT:
12718 /* Invert condition and try again.
12719 e.g., A != B becomes ~(A==B). */
21213b4c 12720 {
370df7db 12721 enum rtx_code rev_code;
21213b4c 12722 enum insn_code nor_code;
d1123cde 12723 rtx eq_rtx;
370df7db
JC
12724
12725 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12726 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12727 dest_mode);
94ff898d 12728
166cdb08 12729 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 12730 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12731 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12732
12733 if (dmode != dest_mode)
12734 {
12735 rtx temp = gen_reg_rtx (dest_mode);
12736 convert_move (temp, mask, 0);
12737 return temp;
12738 }
12739 return mask;
12740 }
12741 break;
12742 case GE:
12743 case GEU:
12744 case LE:
12745 case LEU:
12746 /* Try GT/GTU/LT/LTU OR EQ */
12747 {
12748 rtx c_rtx, eq_rtx;
12749 enum insn_code ior_code;
12750 enum rtx_code new_code;
12751
37409796
NS
12752 switch (rcode)
12753 {
12754 case GE:
12755 new_code = GT;
12756 break;
12757
12758 case GEU:
12759 new_code = GTU;
12760 break;
12761
12762 case LE:
12763 new_code = LT;
12764 break;
12765
12766 case LEU:
12767 new_code = LTU;
12768 break;
12769
12770 default:
12771 gcc_unreachable ();
12772 }
21213b4c
DP
12773
12774 c_rtx = rs6000_emit_vector_compare (new_code,
12775 op0, op1, dest_mode);
12776 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12777 dest_mode);
12778
166cdb08 12779 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 12780 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12781 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12782 if (dmode != dest_mode)
12783 {
12784 rtx temp = gen_reg_rtx (dest_mode);
12785 convert_move (temp, mask, 0);
12786 return temp;
12787 }
12788 return mask;
12789 }
12790 break;
12791 default:
37409796 12792 gcc_unreachable ();
21213b4c
DP
12793 }
12794
12795 if (try_again)
12796 {
12797 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12798 /* You only get two chances. */
12799 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12800 }
12801
12802 if (swap_operands)
12803 {
12804 rtx tmp;
12805 tmp = op0;
12806 op0 = op1;
12807 op1 = tmp;
12808 }
12809 }
12810
915167f5
GK
12811 emit_insn (gen_rtx_SET (VOIDmode, mask,
12812 gen_rtx_UNSPEC (dest_mode,
12813 gen_rtvec (2, op0, op1),
12814 vec_cmp_insn)));
21213b4c
DP
12815 if (dmode != dest_mode)
12816 {
12817 rtx temp = gen_reg_rtx (dest_mode);
12818 convert_move (temp, mask, 0);
12819 return temp;
12820 }
12821 return mask;
12822}
12823
12824/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12825 valid insn doesn exist for given mode. */
12826
12827static int
12828get_vsel_insn (enum machine_mode mode)
12829{
12830 switch (mode)
12831 {
12832 case V4SImode:
12833 return UNSPEC_VSEL4SI;
12834 break;
12835 case V4SFmode:
12836 return UNSPEC_VSEL4SF;
12837 break;
12838 case V8HImode:
12839 return UNSPEC_VSEL8HI;
12840 break;
12841 case V16QImode:
12842 return UNSPEC_VSEL16QI;
12843 break;
12844 default:
12845 return INSN_NOT_AVAILABLE;
12846 break;
12847 }
12848 return INSN_NOT_AVAILABLE;
12849}
12850
12851/* Emit vector select insn where DEST is destination using
12852 operands OP1, OP2 and MASK. */
12853
12854static void
12855rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12856{
12857 rtx t, temp;
12858 enum machine_mode dest_mode = GET_MODE (dest);
12859 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12860
12861 temp = gen_reg_rtx (dest_mode);
94ff898d 12862
bb8df8a6 12863 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12864 select op2. */
915167f5
GK
12865 t = gen_rtx_SET (VOIDmode, temp,
12866 gen_rtx_UNSPEC (dest_mode,
12867 gen_rtvec (3, op2, op1, mask),
12868 vsel_insn_index));
21213b4c
DP
12869 emit_insn (t);
12870 emit_move_insn (dest, temp);
12871 return;
12872}
12873
94ff898d 12874/* Emit vector conditional expression.
21213b4c
DP
12875 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12876 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12877
12878int
12879rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
12880 rtx cond, rtx cc_op0, rtx cc_op1)
12881{
12882 enum machine_mode dest_mode = GET_MODE (dest);
12883 enum rtx_code rcode = GET_CODE (cond);
12884 rtx mask;
12885
12886 if (!TARGET_ALTIVEC)
12887 return 0;
12888
12889 /* Get the vector mask for the given relational operations. */
12890 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
12891
12892 rs6000_emit_vector_select (dest, op1, op2, mask);
12893
12894 return 1;
12895}
12896
50a0b056
GK
12897/* Emit a conditional move: move TRUE_COND to DEST if OP of the
12898 operands of the last comparison is nonzero/true, FALSE_COND if it
12899 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 12900
50a0b056 12901int
a2369ed3 12902rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
12903{
12904 enum rtx_code code = GET_CODE (op);
12905 rtx op0 = rs6000_compare_op0;
12906 rtx op1 = rs6000_compare_op1;
12907 REAL_VALUE_TYPE c1;
3148ad6d
DJ
12908 enum machine_mode compare_mode = GET_MODE (op0);
12909 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 12910 rtx temp;
add2402e 12911 bool is_against_zero;
50a0b056 12912
a3c9585f 12913 /* These modes should always match. */
a3170dc6
AH
12914 if (GET_MODE (op1) != compare_mode
12915 /* In the isel case however, we can use a compare immediate, so
12916 op1 may be a small constant. */
12917 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 12918 return 0;
178c3eff 12919 if (GET_MODE (true_cond) != result_mode)
3148ad6d 12920 return 0;
178c3eff 12921 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
12922 return 0;
12923
50a0b056 12924 /* First, work out if the hardware can do this at all, or
a3c9585f 12925 if it's too slow.... */
50a0b056 12926 if (! rs6000_compare_fp_p)
a3170dc6
AH
12927 {
12928 if (TARGET_ISEL)
12929 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
12930 return 0;
12931 }
8ef65e3d 12932 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 12933 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 12934 return 0;
50a0b056 12935
add2402e 12936 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 12937
add2402e
GK
12938 /* A floating-point subtract might overflow, underflow, or produce
12939 an inexact result, thus changing the floating-point flags, so it
12940 can't be generated if we care about that. It's safe if one side
12941 of the construct is zero, since then no subtract will be
12942 generated. */
ebb109ad 12943 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
12944 && flag_trapping_math && ! is_against_zero)
12945 return 0;
12946
50a0b056
GK
12947 /* Eliminate half of the comparisons by switching operands, this
12948 makes the remaining code simpler. */
12949 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 12950 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
12951 {
12952 code = reverse_condition_maybe_unordered (code);
12953 temp = true_cond;
12954 true_cond = false_cond;
12955 false_cond = temp;
12956 }
12957
12958 /* UNEQ and LTGT take four instructions for a comparison with zero,
12959 it'll probably be faster to use a branch here too. */
bc9ec0e0 12960 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 12961 return 0;
f676971a 12962
50a0b056
GK
12963 if (GET_CODE (op1) == CONST_DOUBLE)
12964 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 12965
b6d08ca1 12966 /* We're going to try to implement comparisons by performing
50a0b056
GK
12967 a subtract, then comparing against zero. Unfortunately,
12968 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 12969 know that the operand is finite and the comparison
50a0b056 12970 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 12971 if (HONOR_INFINITIES (compare_mode)
50a0b056 12972 && code != GT && code != UNGE
045572c7 12973 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
12974 /* Constructs of the form (a OP b ? a : b) are safe. */
12975 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 12976 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
12977 && ! rtx_equal_p (op1, true_cond))))
12978 return 0;
add2402e 12979
50a0b056
GK
12980 /* At this point we know we can use fsel. */
12981
12982 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
12983 if (! is_against_zero)
12984 {
12985 temp = gen_reg_rtx (compare_mode);
12986 emit_insn (gen_rtx_SET (VOIDmode, temp,
12987 gen_rtx_MINUS (compare_mode, op0, op1)));
12988 op0 = temp;
12989 op1 = CONST0_RTX (compare_mode);
12990 }
50a0b056
GK
12991
12992 /* If we don't care about NaNs we can reduce some of the comparisons
12993 down to faster ones. */
bc9ec0e0 12994 if (! HONOR_NANS (compare_mode))
50a0b056
GK
12995 switch (code)
12996 {
12997 case GT:
12998 code = LE;
12999 temp = true_cond;
13000 true_cond = false_cond;
13001 false_cond = temp;
13002 break;
13003 case UNGE:
13004 code = GE;
13005 break;
13006 case UNEQ:
13007 code = EQ;
13008 break;
13009 default:
13010 break;
13011 }
13012
13013 /* Now, reduce everything down to a GE. */
13014 switch (code)
13015 {
13016 case GE:
13017 break;
13018
13019 case LE:
3148ad6d
DJ
13020 temp = gen_reg_rtx (compare_mode);
13021 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13022 op0 = temp;
13023 break;
13024
13025 case ORDERED:
3148ad6d
DJ
13026 temp = gen_reg_rtx (compare_mode);
13027 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13028 op0 = temp;
13029 break;
13030
13031 case EQ:
3148ad6d 13032 temp = gen_reg_rtx (compare_mode);
f676971a 13033 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13034 gen_rtx_NEG (compare_mode,
13035 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13036 op0 = temp;
13037 break;
13038
13039 case UNGE:
bc9ec0e0 13040 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13041 temp = gen_reg_rtx (result_mode);
50a0b056 13042 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13043 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13044 gen_rtx_GE (VOIDmode,
13045 op0, op1),
13046 true_cond, false_cond)));
bc9ec0e0
GK
13047 false_cond = true_cond;
13048 true_cond = temp;
50a0b056 13049
3148ad6d
DJ
13050 temp = gen_reg_rtx (compare_mode);
13051 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13052 op0 = temp;
13053 break;
13054
13055 case GT:
bc9ec0e0 13056 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13057 temp = gen_reg_rtx (result_mode);
50a0b056 13058 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13059 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13060 gen_rtx_GE (VOIDmode,
13061 op0, op1),
13062 true_cond, false_cond)));
bc9ec0e0
GK
13063 true_cond = false_cond;
13064 false_cond = temp;
50a0b056 13065
3148ad6d
DJ
13066 temp = gen_reg_rtx (compare_mode);
13067 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13068 op0 = temp;
13069 break;
13070
13071 default:
37409796 13072 gcc_unreachable ();
50a0b056
GK
13073 }
13074
13075 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13076 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13077 gen_rtx_GE (VOIDmode,
13078 op0, op1),
13079 true_cond, false_cond)));
13080 return 1;
13081}
13082
a3170dc6
AH
13083/* Same as above, but for ints (isel). */
13084
13085static int
a2369ed3 13086rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13087{
13088 rtx condition_rtx, cr;
13089
13090 /* All isel implementations thus far are 32-bits. */
13091 if (GET_MODE (rs6000_compare_op0) != SImode)
13092 return 0;
13093
13094 /* We still have to do the compare, because isel doesn't do a
13095 compare, it just looks at the CRx bits set by a previous compare
13096 instruction. */
13097 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13098 cr = XEXP (condition_rtx, 0);
13099
13100 if (GET_MODE (cr) == CCmode)
13101 emit_insn (gen_isel_signed (dest, condition_rtx,
13102 true_cond, false_cond, cr));
13103 else
13104 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13105 true_cond, false_cond, cr));
13106
13107 return 1;
13108}
13109
13110const char *
a2369ed3 13111output_isel (rtx *operands)
a3170dc6
AH
13112{
13113 enum rtx_code code;
13114
13115 code = GET_CODE (operands[1]);
13116 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13117 {
13118 PUT_CODE (operands[1], reverse_condition (code));
13119 return "isel %0,%3,%2,%j1";
13120 }
13121 else
13122 return "isel %0,%2,%3,%j1";
13123}
13124
50a0b056 13125void
a2369ed3 13126rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13127{
13128 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13129 enum rtx_code c;
50a0b056 13130 rtx target;
5dc8d536
AH
13131
13132 if (code == SMAX || code == SMIN)
13133 c = GE;
13134 else
13135 c = GEU;
13136
50a0b056 13137 if (code == SMAX || code == UMAX)
f676971a 13138 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13139 op0, op1, mode, 0);
13140 else
f676971a 13141 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13142 op1, op0, mode, 0);
37409796 13143 gcc_assert (target);
50a0b056
GK
13144 if (target != dest)
13145 emit_move_insn (dest, target);
13146}
46c07df8 13147
915167f5
GK
13148/* Emit instructions to perform a load-reserved/store-conditional operation.
13149 The operation performed is an atomic
13150 (set M (CODE:MODE M OP))
13151 If not NULL, BEFORE is atomically set to M before the operation, and
13152 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13153 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13154 Either OP or M may be wrapped in a NOT operation. */
13155
13156void
13157rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13158 rtx m, rtx op, rtx before_param, rtx after_param,
13159 bool sync_p)
13160{
13161 enum machine_mode used_mode;
13162 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13163 rtx used_m;
13164 rtvec vec;
13165 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13166 rtx shift = NULL_RTX;
bb8df8a6 13167
915167f5
GK
13168 if (sync_p)
13169 emit_insn (gen_memory_barrier ());
bb8df8a6 13170
915167f5
GK
13171 if (GET_CODE (m) == NOT)
13172 used_m = XEXP (m, 0);
13173 else
13174 used_m = m;
13175
13176 /* If this is smaller than SImode, we'll have to use SImode with
13177 adjustments. */
13178 if (mode == QImode || mode == HImode)
13179 {
13180 rtx newop, oldop;
13181
13182 if (MEM_ALIGN (used_m) >= 32)
13183 {
13184 int ishift = 0;
13185 if (BYTES_BIG_ENDIAN)
13186 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13187
915167f5 13188 shift = GEN_INT (ishift);
c75c6d11 13189 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13190 }
13191 else
13192 {
13193 rtx addrSI, aligned_addr;
a9c9d3fa 13194 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13195
c75c6d11
JJ
13196 addrSI = gen_lowpart_common (SImode,
13197 force_reg (Pmode, XEXP (used_m, 0)));
13198 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13199 shift = gen_reg_rtx (SImode);
13200
13201 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13202 GEN_INT (shift_mask)));
13203 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13204
13205 aligned_addr = expand_binop (Pmode, and_optab,
13206 XEXP (used_m, 0),
13207 GEN_INT (-4), NULL_RTX,
13208 1, OPTAB_LIB_WIDEN);
13209 used_m = change_address (used_m, SImode, aligned_addr);
13210 set_mem_align (used_m, 32);
915167f5 13211 }
c75c6d11
JJ
13212 /* It's safe to keep the old alias set of USED_M, because
13213 the operation is atomic and only affects the original
13214 USED_M. */
13215 if (GET_CODE (m) == NOT)
13216 m = gen_rtx_NOT (SImode, used_m);
13217 else
13218 m = used_m;
915167f5
GK
13219
13220 if (GET_CODE (op) == NOT)
13221 {
13222 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13223 oldop = gen_rtx_NOT (SImode, oldop);
13224 }
13225 else
13226 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13227
915167f5
GK
13228 switch (code)
13229 {
13230 case IOR:
13231 case XOR:
13232 newop = expand_binop (SImode, and_optab,
13233 oldop, GEN_INT (imask), NULL_RTX,
13234 1, OPTAB_LIB_WIDEN);
13235 emit_insn (gen_ashlsi3 (newop, newop, shift));
13236 break;
13237
13238 case AND:
13239 newop = expand_binop (SImode, ior_optab,
13240 oldop, GEN_INT (~imask), NULL_RTX,
13241 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13242 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13243 break;
13244
13245 case PLUS:
9f0076e5 13246 case MINUS:
915167f5
GK
13247 {
13248 rtx mask;
bb8df8a6 13249
915167f5
GK
13250 newop = expand_binop (SImode, and_optab,
13251 oldop, GEN_INT (imask), NULL_RTX,
13252 1, OPTAB_LIB_WIDEN);
13253 emit_insn (gen_ashlsi3 (newop, newop, shift));
13254
13255 mask = gen_reg_rtx (SImode);
13256 emit_move_insn (mask, GEN_INT (imask));
13257 emit_insn (gen_ashlsi3 (mask, mask, shift));
13258
9f0076e5
DE
13259 if (code == PLUS)
13260 newop = gen_rtx_PLUS (SImode, m, newop);
13261 else
13262 newop = gen_rtx_MINUS (SImode, m, newop);
13263 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13264 newop = gen_rtx_IOR (SImode, newop,
13265 gen_rtx_AND (SImode,
13266 gen_rtx_NOT (SImode, mask),
13267 m));
13268 break;
13269 }
13270
13271 default:
13272 gcc_unreachable ();
13273 }
13274
a9c9d3fa
GK
13275 if (GET_CODE (m) == NOT)
13276 {
13277 rtx mask, xorm;
13278
13279 mask = gen_reg_rtx (SImode);
13280 emit_move_insn (mask, GEN_INT (imask));
13281 emit_insn (gen_ashlsi3 (mask, mask, shift));
13282
13283 xorm = gen_rtx_XOR (SImode, used_m, mask);
13284 /* Depending on the value of 'op', the XOR or the operation might
13285 be able to be simplified away. */
13286 newop = simplify_gen_binary (code, SImode, xorm, newop);
13287 }
915167f5
GK
13288 op = newop;
13289 used_mode = SImode;
13290 before = gen_reg_rtx (used_mode);
13291 after = gen_reg_rtx (used_mode);
13292 }
13293 else
13294 {
13295 used_mode = mode;
13296 before = before_param;
13297 after = after_param;
13298
13299 if (before == NULL_RTX)
13300 before = gen_reg_rtx (used_mode);
13301 if (after == NULL_RTX)
13302 after = gen_reg_rtx (used_mode);
13303 }
bb8df8a6 13304
9f0076e5
DE
13305 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13306 && used_mode != mode)
915167f5
GK
13307 the_op = op; /* Computed above. */
13308 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13309 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13310 else
13311 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13312
13313 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13314 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13315 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13316 gen_rtx_UNSPEC (used_mode,
13317 gen_rtvec (1, the_op),
13318 UNSPEC_SYNC_OP));
915167f5
GK
13319 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13320
9f0076e5 13321 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13322 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13323 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13324 else
13325 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13326 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13327
13328 /* Shift and mask the return values properly. */
13329 if (used_mode != mode && before_param)
13330 {
13331 emit_insn (gen_lshrsi3 (before, before, shift));
13332 convert_move (before_param, before, 1);
13333 }
13334
13335 if (used_mode != mode && after_param)
13336 {
13337 emit_insn (gen_lshrsi3 (after, after, shift));
13338 convert_move (after_param, after, 1);
13339 }
13340
13341 /* The previous sequence will end with a branch that's dependent on
13342 the conditional store, so placing an isync will ensure that no
13343 other instructions (especially, no load or store instructions)
13344 can start before the atomic operation completes. */
13345 if (sync_p)
13346 emit_insn (gen_isync ());
13347}
13348
b52110d4
DE
13349/* A subroutine of the atomic operation splitters. Jump to LABEL if
13350 COND is true. Mark the jump as unlikely to be taken. */
13351
13352static void
13353emit_unlikely_jump (rtx cond, rtx label)
13354{
13355 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13356 rtx x;
13357
13358 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13359 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13360 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13361}
13362
13363/* A subroutine of the atomic operation splitters. Emit a load-locked
13364 instruction in MODE. */
13365
13366static void
13367emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13368{
13369 rtx (*fn) (rtx, rtx) = NULL;
13370 if (mode == SImode)
13371 fn = gen_load_locked_si;
13372 else if (mode == DImode)
13373 fn = gen_load_locked_di;
13374 emit_insn (fn (reg, mem));
13375}
13376
13377/* A subroutine of the atomic operation splitters. Emit a store-conditional
13378 instruction in MODE. */
13379
13380static void
13381emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13382{
13383 rtx (*fn) (rtx, rtx, rtx) = NULL;
13384 if (mode == SImode)
13385 fn = gen_store_conditional_si;
13386 else if (mode == DImode)
13387 fn = gen_store_conditional_di;
13388
9f0076e5 13389 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13390 if (PPC405_ERRATUM77)
13391 emit_insn (gen_memory_barrier ());
13392
13393 emit_insn (fn (res, mem, val));
13394}
13395
ea2c620c 13396/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13397 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13398 operand of the binary operator. BEFORE and AFTER are optional locations to
13399 return the value of MEM either before of after the operation. SCRATCH is
13400 a scratch register. */
13401
13402void
13403rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13404 rtx before, rtx after, rtx scratch)
13405{
13406 enum machine_mode mode = GET_MODE (mem);
13407 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13408
13409 emit_insn (gen_memory_barrier ());
13410
13411 label = gen_label_rtx ();
13412 emit_label (label);
13413 label = gen_rtx_LABEL_REF (VOIDmode, label);
13414
13415 if (before == NULL_RTX)
13416 before = scratch;
13417 emit_load_locked (mode, before, mem);
13418
13419 if (code == NOT)
13420 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13421 else if (code == AND)
13422 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13423 else
13424 x = gen_rtx_fmt_ee (code, mode, before, val);
13425
13426 if (after != NULL_RTX)
13427 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13428 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13429
13430 emit_store_conditional (mode, cond, mem, scratch);
13431
13432 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13433 emit_unlikely_jump (x, label);
13434
13435 emit_insn (gen_isync ());
13436}
13437
b52110d4
DE
13438/* Expand an atomic compare and swap operation. MEM is the memory on which
13439 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13440 value to be stored. SCRATCH is a scratch GPR. */
13441
13442void
13443rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13444 rtx scratch)
13445{
13446 enum machine_mode mode = GET_MODE (mem);
13447 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13448
13449 emit_insn (gen_memory_barrier ());
13450
13451 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13452 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13453 emit_label (XEXP (label1, 0));
13454
13455 emit_load_locked (mode, retval, mem);
13456
13457 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13458 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13459
13460 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13461 emit_unlikely_jump (x, label2);
13462
13463 emit_move_insn (scratch, newval);
13464 emit_store_conditional (mode, cond, mem, scratch);
13465
13466 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13467 emit_unlikely_jump (x, label1);
13468
13469 emit_insn (gen_isync ());
13470 emit_label (XEXP (label2, 0));
13471}
13472
13473/* Expand an atomic test and set operation. MEM is the memory on which
13474 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13475
13476void
13477rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13478{
13479 enum machine_mode mode = GET_MODE (mem);
13480 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13481
13482 emit_insn (gen_memory_barrier ());
13483
13484 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13485 emit_label (XEXP (label, 0));
13486
13487 emit_load_locked (mode, retval, mem);
13488 emit_move_insn (scratch, val);
13489 emit_store_conditional (mode, cond, mem, scratch);
13490
13491 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13492 emit_unlikely_jump (x, label);
13493
13494 emit_insn (gen_isync ());
13495}
13496
9fc75b97
DE
13497void
13498rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13499{
13500 enum machine_mode mode = GET_MODE (mem);
13501 rtx addrSI, align, wdst, shift, mask;
13502 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13503 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13504
13505 /* Shift amount for subword relative to aligned word. */
13506 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13507 shift = gen_reg_rtx (SImode);
13508 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13509 GEN_INT (shift_mask)));
13510 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13511
13512 /* Shift and mask old value into position within word. */
13513 oldval = convert_modes (SImode, mode, oldval, 1);
13514 oldval = expand_binop (SImode, and_optab,
13515 oldval, GEN_INT (imask), NULL_RTX,
13516 1, OPTAB_LIB_WIDEN);
13517 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13518
13519 /* Shift and mask new value into position within word. */
13520 newval = convert_modes (SImode, mode, newval, 1);
13521 newval = expand_binop (SImode, and_optab,
13522 newval, GEN_INT (imask), NULL_RTX,
13523 1, OPTAB_LIB_WIDEN);
13524 emit_insn (gen_ashlsi3 (newval, newval, shift));
13525
13526 /* Mask for insertion. */
13527 mask = gen_reg_rtx (SImode);
13528 emit_move_insn (mask, GEN_INT (imask));
13529 emit_insn (gen_ashlsi3 (mask, mask, shift));
13530
13531 /* Address of aligned word containing subword. */
13532 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13533 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13534 mem = change_address (mem, SImode, align);
13535 set_mem_align (mem, 32);
13536 MEM_VOLATILE_P (mem) = 1;
13537
13538 wdst = gen_reg_rtx (SImode);
13539 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13540 oldval, newval, mem));
13541
13542 emit_move_insn (dst, gen_lowpart (mode, wdst));
13543}
13544
13545void
13546rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13547 rtx oldval, rtx newval, rtx mem,
13548 rtx scratch)
13549{
13550 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13551
13552 emit_insn (gen_memory_barrier ());
13553 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13554 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13555 emit_label (XEXP (label1, 0));
13556
13557 emit_load_locked (SImode, scratch, mem);
13558
13559 /* Mask subword within loaded value for comparison with oldval.
13560 Use UNSPEC_AND to avoid clobber.*/
13561 emit_insn (gen_rtx_SET (SImode, dest,
13562 gen_rtx_UNSPEC (SImode,
13563 gen_rtvec (2, scratch, mask),
13564 UNSPEC_AND)));
13565
13566 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13567 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13568
13569 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13570 emit_unlikely_jump (x, label2);
13571
13572 /* Clear subword within loaded value for insertion of new value. */
13573 emit_insn (gen_rtx_SET (SImode, scratch,
13574 gen_rtx_AND (SImode,
13575 gen_rtx_NOT (SImode, mask), scratch)));
13576 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13577 emit_store_conditional (SImode, cond, mem, scratch);
13578
13579 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13580 emit_unlikely_jump (x, label1);
13581
13582 emit_insn (gen_isync ());
13583 emit_label (XEXP (label2, 0));
13584}
13585
13586
b52110d4 13587 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13588 multi-register moves. It will emit at most one instruction for
13589 each register that is accessed; that is, it won't emit li/lis pairs
13590 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13591 register. */
46c07df8 13592
46c07df8 13593void
a9baceb1 13594rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13595{
a9baceb1
GK
13596 /* The register number of the first register being moved. */
13597 int reg;
13598 /* The mode that is to be moved. */
13599 enum machine_mode mode;
13600 /* The mode that the move is being done in, and its size. */
13601 enum machine_mode reg_mode;
13602 int reg_mode_size;
13603 /* The number of registers that will be moved. */
13604 int nregs;
13605
13606 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13607 mode = GET_MODE (dst);
c8b622ff 13608 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13609 if (FP_REGNO_P (reg))
7393f7f8 13610 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13611 else if (ALTIVEC_REGNO_P (reg))
13612 reg_mode = V16QImode;
8521c414
JM
13613 else if (TARGET_E500_DOUBLE && mode == TFmode)
13614 reg_mode = DFmode;
a9baceb1
GK
13615 else
13616 reg_mode = word_mode;
13617 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13618
37409796 13619 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13620
a9baceb1
GK
13621 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13622 {
13623 /* Move register range backwards, if we might have destructive
13624 overlap. */
13625 int i;
13626 for (i = nregs - 1; i >= 0; i--)
f676971a 13627 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13628 simplify_gen_subreg (reg_mode, dst, mode,
13629 i * reg_mode_size),
13630 simplify_gen_subreg (reg_mode, src, mode,
13631 i * reg_mode_size)));
13632 }
46c07df8
HP
13633 else
13634 {
a9baceb1
GK
13635 int i;
13636 int j = -1;
13637 bool used_update = false;
46c07df8 13638
c1e55850 13639 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13640 {
13641 rtx breg;
3a1f863f 13642
a9baceb1
GK
13643 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13644 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13645 {
13646 rtx delta_rtx;
a9baceb1 13647 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13648 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13649 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13650 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13651 emit_insn (TARGET_32BIT
13652 ? gen_addsi3 (breg, breg, delta_rtx)
13653 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13654 src = replace_equiv_address (src, breg);
3a1f863f 13655 }
d04b6e6e 13656 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13657 {
13e2e16e 13658 rtx basereg;
c1e55850
GK
13659 basereg = gen_rtx_REG (Pmode, reg);
13660 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13661 src = replace_equiv_address (src, basereg);
c1e55850 13662 }
3a1f863f 13663
0423421f
AM
13664 breg = XEXP (src, 0);
13665 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13666 breg = XEXP (breg, 0);
13667
13668 /* If the base register we are using to address memory is
13669 also a destination reg, then change that register last. */
13670 if (REG_P (breg)
13671 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13672 && REGNO (breg) < REGNO (dst) + nregs)
13673 j = REGNO (breg) - REGNO (dst);
c4ad648e 13674 }
46c07df8 13675
a9baceb1 13676 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13677 {
13678 rtx breg;
13679
a9baceb1
GK
13680 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13681 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13682 {
13683 rtx delta_rtx;
a9baceb1 13684 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13685 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13686 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13687 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13688
13689 /* We have to update the breg before doing the store.
13690 Use store with update, if available. */
13691
13692 if (TARGET_UPDATE)
13693 {
a9baceb1 13694 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13695 emit_insn (TARGET_32BIT
13696 ? (TARGET_POWERPC64
13697 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13698 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13699 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13700 used_update = true;
3a1f863f
DE
13701 }
13702 else
a9baceb1
GK
13703 emit_insn (TARGET_32BIT
13704 ? gen_addsi3 (breg, breg, delta_rtx)
13705 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13706 dst = replace_equiv_address (dst, breg);
3a1f863f 13707 }
37409796 13708 else
d04b6e6e 13709 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13710 }
13711
46c07df8 13712 for (i = 0; i < nregs; i++)
f676971a 13713 {
3a1f863f
DE
13714 /* Calculate index to next subword. */
13715 ++j;
f676971a 13716 if (j == nregs)
3a1f863f 13717 j = 0;
46c07df8 13718
112cdef5 13719 /* If compiler already emitted move of first word by
a9baceb1 13720 store with update, no need to do anything. */
3a1f863f 13721 if (j == 0 && used_update)
a9baceb1 13722 continue;
f676971a 13723
a9baceb1
GK
13724 emit_insn (gen_rtx_SET (VOIDmode,
13725 simplify_gen_subreg (reg_mode, dst, mode,
13726 j * reg_mode_size),
13727 simplify_gen_subreg (reg_mode, src, mode,
13728 j * reg_mode_size)));
3a1f863f 13729 }
46c07df8
HP
13730 }
13731}
13732
12a4e8c5 13733\f
a4f6c312
SS
13734/* This page contains routines that are used to determine what the
13735 function prologue and epilogue code will do and write them out. */
9878760c 13736
a4f6c312
SS
13737/* Return the first fixed-point register that is required to be
13738 saved. 32 if none. */
9878760c
RK
13739
13740int
863d938c 13741first_reg_to_save (void)
9878760c
RK
13742{
13743 int first_reg;
13744
13745 /* Find lowest numbered live register. */
13746 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 13747 if (df_regs_ever_live_p (first_reg)
a38d360d 13748 && (! call_used_regs[first_reg]
1db02437 13749 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13750 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13751 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13752 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13753 break;
13754
ee890fe2 13755#if TARGET_MACHO
93638d7a
AM
13756 if (flag_pic
13757 && current_function_uses_pic_offset_table
13758 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13759 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13760#endif
13761
9878760c
RK
13762 return first_reg;
13763}
13764
13765/* Similar, for FP regs. */
13766
13767int
863d938c 13768first_fp_reg_to_save (void)
9878760c
RK
13769{
13770 int first_reg;
13771
13772 /* Find lowest numbered live register. */
13773 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 13774 if (df_regs_ever_live_p (first_reg))
9878760c
RK
13775 break;
13776
13777 return first_reg;
13778}
00b960c7
AH
13779
13780/* Similar, for AltiVec regs. */
13781
13782static int
863d938c 13783first_altivec_reg_to_save (void)
00b960c7
AH
13784{
13785 int i;
13786
13787 /* Stack frame remains as is unless we are in AltiVec ABI. */
13788 if (! TARGET_ALTIVEC_ABI)
13789 return LAST_ALTIVEC_REGNO + 1;
13790
22fa69da 13791 /* On Darwin, the unwind routines are compiled without
982afe02 13792 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13793 altivec registers when necessary. */
13794 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13795 && ! TARGET_ALTIVEC)
13796 return FIRST_ALTIVEC_REGNO + 20;
13797
00b960c7
AH
13798 /* Find lowest numbered live register. */
13799 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13800 if (df_regs_ever_live_p (i))
00b960c7
AH
13801 break;
13802
13803 return i;
13804}
13805
13806/* Return a 32-bit mask of the AltiVec registers we need to set in
13807 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13808 the 32-bit word is 0. */
13809
13810static unsigned int
863d938c 13811compute_vrsave_mask (void)
00b960c7
AH
13812{
13813 unsigned int i, mask = 0;
13814
22fa69da 13815 /* On Darwin, the unwind routines are compiled without
982afe02 13816 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13817 call-saved altivec registers when necessary. */
13818 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13819 && ! TARGET_ALTIVEC)
13820 mask |= 0xFFF;
13821
00b960c7
AH
13822 /* First, find out if we use _any_ altivec registers. */
13823 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13824 if (df_regs_ever_live_p (i))
00b960c7
AH
13825 mask |= ALTIVEC_REG_BIT (i);
13826
13827 if (mask == 0)
13828 return mask;
13829
00b960c7
AH
13830 /* Next, remove the argument registers from the set. These must
13831 be in the VRSAVE mask set by the caller, so we don't need to add
13832 them in again. More importantly, the mask we compute here is
13833 used to generate CLOBBERs in the set_vrsave insn, and we do not
13834 wish the argument registers to die. */
a6cf80f2 13835 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13836 mask &= ~ALTIVEC_REG_BIT (i);
13837
13838 /* Similarly, remove the return value from the set. */
13839 {
13840 bool yes = false;
13841 diddle_return_value (is_altivec_return_reg, &yes);
13842 if (yes)
13843 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13844 }
13845
13846 return mask;
13847}
13848
d62294f5 13849/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13850 size of prologues/epilogues by calling our own save/restore-the-world
13851 routines. */
d62294f5
FJ
13852
13853static void
f57fe068
AM
13854compute_save_world_info (rs6000_stack_t *info_ptr)
13855{
13856 info_ptr->world_save_p = 1;
13857 info_ptr->world_save_p
13858 = (WORLD_SAVE_P (info_ptr)
13859 && DEFAULT_ABI == ABI_DARWIN
13860 && ! (current_function_calls_setjmp && flag_exceptions)
13861 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13862 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13863 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13864 && info_ptr->cr_save_p);
f676971a 13865
d62294f5
FJ
13866 /* This will not work in conjunction with sibcalls. Make sure there
13867 are none. (This check is expensive, but seldom executed.) */
f57fe068 13868 if (WORLD_SAVE_P (info_ptr))
f676971a 13869 {
d62294f5
FJ
13870 rtx insn;
13871 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
13872 if ( GET_CODE (insn) == CALL_INSN
13873 && SIBLING_CALL_P (insn))
13874 {
13875 info_ptr->world_save_p = 0;
13876 break;
13877 }
d62294f5 13878 }
f676971a 13879
f57fe068 13880 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
13881 {
13882 /* Even if we're not touching VRsave, make sure there's room on the
13883 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 13884 will attempt to save it. */
d62294f5
FJ
13885 info_ptr->vrsave_size = 4;
13886
13887 /* "Save" the VRsave register too if we're saving the world. */
13888 if (info_ptr->vrsave_mask == 0)
c4ad648e 13889 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
13890
13891 /* Because the Darwin register save/restore routines only handle
c4ad648e 13892 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 13893 check. */
37409796
NS
13894 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
13895 && (info_ptr->first_altivec_reg_save
13896 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 13897 }
f676971a 13898 return;
d62294f5
FJ
13899}
13900
13901
00b960c7 13902static void
a2369ed3 13903is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
13904{
13905 bool *yes = (bool *) xyes;
13906 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
13907 *yes = true;
13908}
13909
4697a36c
MM
13910\f
13911/* Calculate the stack information for the current function. This is
13912 complicated by having two separate calling sequences, the AIX calling
13913 sequence and the V.4 calling sequence.
13914
592696dd 13915 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 13916 32-bit 64-bit
4697a36c 13917 SP----> +---------------------------------------+
a260abc9 13918 | back chain to caller | 0 0
4697a36c 13919 +---------------------------------------+
a260abc9 13920 | saved CR | 4 8 (8-11)
4697a36c 13921 +---------------------------------------+
a260abc9 13922 | saved LR | 8 16
4697a36c 13923 +---------------------------------------+
a260abc9 13924 | reserved for compilers | 12 24
4697a36c 13925 +---------------------------------------+
a260abc9 13926 | reserved for binders | 16 32
4697a36c 13927 +---------------------------------------+
a260abc9 13928 | saved TOC pointer | 20 40
4697a36c 13929 +---------------------------------------+
a260abc9 13930 | Parameter save area (P) | 24 48
4697a36c 13931 +---------------------------------------+
a260abc9 13932 | Alloca space (A) | 24+P etc.
802a0058 13933 +---------------------------------------+
a7df97e6 13934 | Local variable space (L) | 24+P+A
4697a36c 13935 +---------------------------------------+
a7df97e6 13936 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 13937 +---------------------------------------+
00b960c7
AH
13938 | Save area for AltiVec registers (W) | 24+P+A+L+X
13939 +---------------------------------------+
13940 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
13941 +---------------------------------------+
13942 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 13943 +---------------------------------------+
00b960c7
AH
13944 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
13945 +---------------------------------------+
13946 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
13947 +---------------------------------------+
13948 old SP->| back chain to caller's caller |
13949 +---------------------------------------+
13950
5376a30c
KR
13951 The required alignment for AIX configurations is two words (i.e., 8
13952 or 16 bytes).
13953
13954
4697a36c
MM
13955 V.4 stack frames look like:
13956
13957 SP----> +---------------------------------------+
13958 | back chain to caller | 0
13959 +---------------------------------------+
5eb387b8 13960 | caller's saved LR | 4
4697a36c
MM
13961 +---------------------------------------+
13962 | Parameter save area (P) | 8
13963 +---------------------------------------+
a7df97e6 13964 | Alloca space (A) | 8+P
f676971a 13965 +---------------------------------------+
a7df97e6 13966 | Varargs save area (V) | 8+P+A
f676971a 13967 +---------------------------------------+
a7df97e6 13968 | Local variable space (L) | 8+P+A+V
f676971a 13969 +---------------------------------------+
a7df97e6 13970 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 13971 +---------------------------------------+
00b960c7
AH
13972 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
13973 +---------------------------------------+
13974 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
13975 +---------------------------------------+
13976 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
13977 +---------------------------------------+
c4ad648e
AM
13978 | SPE: area for 64-bit GP registers |
13979 +---------------------------------------+
13980 | SPE alignment padding |
13981 +---------------------------------------+
00b960c7 13982 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 13983 +---------------------------------------+
00b960c7 13984 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 13985 +---------------------------------------+
00b960c7 13986 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
13987 +---------------------------------------+
13988 old SP->| back chain to caller's caller |
13989 +---------------------------------------+
b6c9286a 13990
5376a30c
KR
13991 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13992 given. (But note below and in sysv4.h that we require only 8 and
13993 may round up the size of our stack frame anyways. The historical
13994 reason is early versions of powerpc-linux which didn't properly
13995 align the stack at program startup. A happy side-effect is that
13996 -mno-eabi libraries can be used with -meabi programs.)
13997
50d440bc 13998 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
13999 the stack alignment requirements may differ. If -mno-eabi is not
14000 given, the required stack alignment is 8 bytes; if -mno-eabi is
14001 given, the required alignment is 16 bytes. (But see V.4 comment
14002 above.) */
4697a36c 14003
61b2fbe7
MM
14004#ifndef ABI_STACK_BOUNDARY
14005#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14006#endif
14007
d1d0c603 14008static rs6000_stack_t *
863d938c 14009rs6000_stack_info (void)
4697a36c 14010{
022123e6 14011 static rs6000_stack_t info;
4697a36c 14012 rs6000_stack_t *info_ptr = &info;
327e5343 14013 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14014 int ehrd_size;
64045029 14015 int save_align;
8070c91a 14016 int first_gp;
44688022 14017 HOST_WIDE_INT non_fixed_size;
4697a36c 14018
022123e6 14019 memset (&info, 0, sizeof (info));
4697a36c 14020
c19de7aa
AH
14021 if (TARGET_SPE)
14022 {
14023 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14024 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14025 cfun->machine->insn_chain_scanned_p
14026 = spe_func_has_64bit_regs_p () + 1;
14027 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14028 }
14029
a4f6c312 14030 /* Select which calling sequence. */
178274da 14031 info_ptr->abi = DEFAULT_ABI;
9878760c 14032
a4f6c312 14033 /* Calculate which registers need to be saved & save area size. */
4697a36c 14034 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14035 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14036 even if it currently looks like we won't. Reload may need it to
14037 get at a constant; if so, it will have already created a constant
14038 pool entry for it. */
2bfcf297 14039 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14040 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14041 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
8070c91a 14042 && current_function_uses_const_pool
1db02437 14043 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14044 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14045 else
8070c91a
DJ
14046 first_gp = info_ptr->first_gp_reg_save;
14047
14048 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14049
a3170dc6
AH
14050 /* For the SPE, we have an additional upper 32-bits on each GPR.
14051 Ideally we should save the entire 64-bits only when the upper
14052 half is used in SIMD instructions. Since we only record
14053 registers live (not the size they are used in), this proves
14054 difficult because we'd have to traverse the instruction chain at
14055 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14056 so we opt to save the GPRs in 64-bits always if but one register
14057 gets used in 64-bits. Otherwise, all the registers in the frame
14058 get saved in 32-bits.
a3170dc6 14059
c19de7aa 14060 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14061 traditional GP save area will be empty. */
c19de7aa 14062 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14063 info_ptr->gp_size = 0;
14064
4697a36c
MM
14065 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14066 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14067
00b960c7
AH
14068 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14069 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14070 - info_ptr->first_altivec_reg_save);
14071
592696dd 14072 /* Does this function call anything? */
71f123ca
FS
14073 info_ptr->calls_p = (! current_function_is_leaf
14074 || cfun->machine->ra_needs_full_frame);
b6c9286a 14075
a4f6c312 14076 /* Determine if we need to save the link register. */
022123e6
AM
14077 if ((DEFAULT_ABI == ABI_AIX
14078 && current_function_profile
14079 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14080#ifdef TARGET_RELOCATABLE
14081 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14082#endif
14083 || (info_ptr->first_fp_reg_save != 64
14084 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 14085 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 14086 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
14087 || info_ptr->calls_p
14088 || rs6000_ra_ever_killed ())
4697a36c
MM
14089 {
14090 info_ptr->lr_save_p = 1;
1de43f85 14091 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14092 }
14093
9ebbca7d 14094 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14095 if (df_regs_ever_live_p (CR2_REGNO)
14096 || df_regs_ever_live_p (CR3_REGNO)
14097 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14098 {
14099 info_ptr->cr_save_p = 1;
178274da 14100 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14101 info_ptr->cr_size = reg_size;
14102 }
14103
83720594
RH
14104 /* If the current function calls __builtin_eh_return, then we need
14105 to allocate stack space for registers that will hold data for
14106 the exception handler. */
14107 if (current_function_calls_eh_return)
14108 {
14109 unsigned int i;
14110 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14111 continue;
a3170dc6
AH
14112
14113 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14114 ehrd_size = i * (TARGET_SPE_ABI
14115 && info_ptr->spe_64bit_regs_used != 0
14116 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14117 }
14118 else
14119 ehrd_size = 0;
14120
592696dd 14121 /* Determine various sizes. */
4697a36c
MM
14122 info_ptr->reg_size = reg_size;
14123 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14124 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 14125 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 14126 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14127 if (FRAME_GROWS_DOWNWARD)
14128 info_ptr->vars_size
5b667039
JJ
14129 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14130 + info_ptr->parm_size,
7d5175e1 14131 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14132 - (info_ptr->fixed_size + info_ptr->vars_size
14133 + info_ptr->parm_size);
00b960c7 14134
c19de7aa 14135 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14136 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14137 else
14138 info_ptr->spe_gp_size = 0;
14139
4d774ff8
HP
14140 if (TARGET_ALTIVEC_ABI)
14141 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14142 else
4d774ff8
HP
14143 info_ptr->vrsave_mask = 0;
14144
14145 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14146 info_ptr->vrsave_size = 4;
14147 else
14148 info_ptr->vrsave_size = 0;
b6c9286a 14149
d62294f5
FJ
14150 compute_save_world_info (info_ptr);
14151
592696dd 14152 /* Calculate the offsets. */
178274da 14153 switch (DEFAULT_ABI)
4697a36c 14154 {
b6c9286a 14155 case ABI_NONE:
24d304eb 14156 default:
37409796 14157 gcc_unreachable ();
b6c9286a
MM
14158
14159 case ABI_AIX:
ee890fe2 14160 case ABI_DARWIN:
b6c9286a
MM
14161 info_ptr->fp_save_offset = - info_ptr->fp_size;
14162 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14163
14164 if (TARGET_ALTIVEC_ABI)
14165 {
14166 info_ptr->vrsave_save_offset
14167 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14168
982afe02 14169 /* Align stack so vector save area is on a quadword boundary.
9278121c 14170 The padding goes above the vectors. */
00b960c7
AH
14171 if (info_ptr->altivec_size != 0)
14172 info_ptr->altivec_padding_size
9278121c 14173 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14174 else
14175 info_ptr->altivec_padding_size = 0;
14176
14177 info_ptr->altivec_save_offset
14178 = info_ptr->vrsave_save_offset
14179 - info_ptr->altivec_padding_size
14180 - info_ptr->altivec_size;
9278121c
GK
14181 gcc_assert (info_ptr->altivec_size == 0
14182 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14183
14184 /* Adjust for AltiVec case. */
14185 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14186 }
14187 else
14188 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14189 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14190 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14191 break;
14192
14193 case ABI_V4:
b6c9286a
MM
14194 info_ptr->fp_save_offset = - info_ptr->fp_size;
14195 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14196 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14197
c19de7aa 14198 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14199 {
14200 /* Align stack so SPE GPR save area is aligned on a
14201 double-word boundary. */
14202 if (info_ptr->spe_gp_size != 0)
14203 info_ptr->spe_padding_size
14204 = 8 - (-info_ptr->cr_save_offset % 8);
14205 else
14206 info_ptr->spe_padding_size = 0;
14207
14208 info_ptr->spe_gp_save_offset
14209 = info_ptr->cr_save_offset
14210 - info_ptr->spe_padding_size
14211 - info_ptr->spe_gp_size;
14212
14213 /* Adjust for SPE case. */
022123e6 14214 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14215 }
a3170dc6 14216 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14217 {
14218 info_ptr->vrsave_save_offset
14219 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14220
14221 /* Align stack so vector save area is on a quadword boundary. */
14222 if (info_ptr->altivec_size != 0)
14223 info_ptr->altivec_padding_size
14224 = 16 - (-info_ptr->vrsave_save_offset % 16);
14225 else
14226 info_ptr->altivec_padding_size = 0;
14227
14228 info_ptr->altivec_save_offset
14229 = info_ptr->vrsave_save_offset
14230 - info_ptr->altivec_padding_size
14231 - info_ptr->altivec_size;
14232
14233 /* Adjust for AltiVec case. */
022123e6 14234 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14235 }
14236 else
022123e6
AM
14237 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14238 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14239 info_ptr->lr_save_offset = reg_size;
14240 break;
4697a36c
MM
14241 }
14242
64045029 14243 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14244 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14245 + info_ptr->gp_size
14246 + info_ptr->altivec_size
14247 + info_ptr->altivec_padding_size
a3170dc6
AH
14248 + info_ptr->spe_gp_size
14249 + info_ptr->spe_padding_size
00b960c7
AH
14250 + ehrd_size
14251 + info_ptr->cr_size
022123e6 14252 + info_ptr->vrsave_size,
64045029 14253 save_align);
00b960c7 14254
44688022 14255 non_fixed_size = (info_ptr->vars_size
ff381587 14256 + info_ptr->parm_size
5b667039 14257 + info_ptr->save_size);
ff381587 14258
44688022
AM
14259 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14260 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14261
14262 /* Determine if we need to allocate any stack frame:
14263
a4f6c312
SS
14264 For AIX we need to push the stack if a frame pointer is needed
14265 (because the stack might be dynamically adjusted), if we are
14266 debugging, if we make calls, or if the sum of fp_save, gp_save,
14267 and local variables are more than the space needed to save all
14268 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14269 + 18*8 = 288 (GPR13 reserved).
ff381587 14270
a4f6c312
SS
14271 For V.4 we don't have the stack cushion that AIX uses, but assume
14272 that the debugger can handle stackless frames. */
ff381587
MM
14273
14274 if (info_ptr->calls_p)
14275 info_ptr->push_p = 1;
14276
178274da 14277 else if (DEFAULT_ABI == ABI_V4)
44688022 14278 info_ptr->push_p = non_fixed_size != 0;
ff381587 14279
178274da
AM
14280 else if (frame_pointer_needed)
14281 info_ptr->push_p = 1;
14282
14283 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14284 info_ptr->push_p = 1;
14285
ff381587 14286 else
44688022 14287 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14288
a4f6c312 14289 /* Zero offsets if we're not saving those registers. */
8dda1a21 14290 if (info_ptr->fp_size == 0)
4697a36c
MM
14291 info_ptr->fp_save_offset = 0;
14292
8dda1a21 14293 if (info_ptr->gp_size == 0)
4697a36c
MM
14294 info_ptr->gp_save_offset = 0;
14295
00b960c7
AH
14296 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14297 info_ptr->altivec_save_offset = 0;
14298
14299 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14300 info_ptr->vrsave_save_offset = 0;
14301
c19de7aa
AH
14302 if (! TARGET_SPE_ABI
14303 || info_ptr->spe_64bit_regs_used == 0
14304 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14305 info_ptr->spe_gp_save_offset = 0;
14306
c81fc13e 14307 if (! info_ptr->lr_save_p)
4697a36c
MM
14308 info_ptr->lr_save_offset = 0;
14309
c81fc13e 14310 if (! info_ptr->cr_save_p)
4697a36c
MM
14311 info_ptr->cr_save_offset = 0;
14312
14313 return info_ptr;
14314}
14315
c19de7aa
AH
14316/* Return true if the current function uses any GPRs in 64-bit SIMD
14317 mode. */
14318
14319static bool
863d938c 14320spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14321{
14322 rtx insns, insn;
14323
14324 /* Functions that save and restore all the call-saved registers will
14325 need to save/restore the registers in 64-bits. */
14326 if (current_function_calls_eh_return
14327 || current_function_calls_setjmp
14328 || current_function_has_nonlocal_goto)
14329 return true;
14330
14331 insns = get_insns ();
14332
14333 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14334 {
14335 if (INSN_P (insn))
14336 {
14337 rtx i;
14338
b5a5beb9
AH
14339 /* FIXME: This should be implemented with attributes...
14340
14341 (set_attr "spe64" "true")....then,
14342 if (get_spe64(insn)) return true;
14343
14344 It's the only reliable way to do the stuff below. */
14345
c19de7aa 14346 i = PATTERN (insn);
f82f556d
AH
14347 if (GET_CODE (i) == SET)
14348 {
14349 enum machine_mode mode = GET_MODE (SET_SRC (i));
14350
14351 if (SPE_VECTOR_MODE (mode))
14352 return true;
17caeff2 14353 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
14354 return true;
14355 }
c19de7aa
AH
14356 }
14357 }
14358
14359 return false;
14360}
14361
d1d0c603 14362static void
a2369ed3 14363debug_stack_info (rs6000_stack_t *info)
9878760c 14364{
d330fd93 14365 const char *abi_string;
24d304eb 14366
c81fc13e 14367 if (! info)
4697a36c
MM
14368 info = rs6000_stack_info ();
14369
14370 fprintf (stderr, "\nStack information for function %s:\n",
14371 ((current_function_decl && DECL_NAME (current_function_decl))
14372 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14373 : "<unknown>"));
14374
24d304eb
RK
14375 switch (info->abi)
14376 {
b6c9286a
MM
14377 default: abi_string = "Unknown"; break;
14378 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14379 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14380 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14381 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14382 }
14383
14384 fprintf (stderr, "\tABI = %5s\n", abi_string);
14385
00b960c7
AH
14386 if (TARGET_ALTIVEC_ABI)
14387 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14388
a3170dc6
AH
14389 if (TARGET_SPE_ABI)
14390 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14391
4697a36c
MM
14392 if (info->first_gp_reg_save != 32)
14393 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14394
14395 if (info->first_fp_reg_save != 64)
14396 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14397
00b960c7
AH
14398 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14399 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14400 info->first_altivec_reg_save);
14401
4697a36c
MM
14402 if (info->lr_save_p)
14403 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14404
4697a36c
MM
14405 if (info->cr_save_p)
14406 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14407
00b960c7
AH
14408 if (info->vrsave_mask)
14409 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14410
4697a36c
MM
14411 if (info->push_p)
14412 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14413
14414 if (info->calls_p)
14415 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14416
4697a36c
MM
14417 if (info->gp_save_offset)
14418 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14419
14420 if (info->fp_save_offset)
14421 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14422
00b960c7
AH
14423 if (info->altivec_save_offset)
14424 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14425 info->altivec_save_offset);
14426
a3170dc6
AH
14427 if (info->spe_gp_save_offset)
14428 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14429 info->spe_gp_save_offset);
14430
00b960c7
AH
14431 if (info->vrsave_save_offset)
14432 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14433 info->vrsave_save_offset);
14434
4697a36c
MM
14435 if (info->lr_save_offset)
14436 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14437
14438 if (info->cr_save_offset)
14439 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14440
14441 if (info->varargs_save_offset)
14442 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14443
14444 if (info->total_size)
d1d0c603
JJ
14445 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14446 info->total_size);
4697a36c 14447
4697a36c 14448 if (info->vars_size)
d1d0c603
JJ
14449 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14450 info->vars_size);
4697a36c
MM
14451
14452 if (info->parm_size)
14453 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14454
14455 if (info->fixed_size)
14456 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14457
14458 if (info->gp_size)
14459 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14460
a3170dc6
AH
14461 if (info->spe_gp_size)
14462 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14463
4697a36c
MM
14464 if (info->fp_size)
14465 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14466
00b960c7
AH
14467 if (info->altivec_size)
14468 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14469
14470 if (info->vrsave_size)
14471 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14472
14473 if (info->altivec_padding_size)
14474 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14475 info->altivec_padding_size);
14476
a3170dc6
AH
14477 if (info->spe_padding_size)
14478 fprintf (stderr, "\tspe_padding_size = %5d\n",
14479 info->spe_padding_size);
14480
4697a36c
MM
14481 if (info->cr_size)
14482 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14483
14484 if (info->save_size)
14485 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14486
14487 if (info->reg_size != 4)
14488 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14489
14490 fprintf (stderr, "\n");
9878760c 14491}
71f123ca
FS
14492
14493rtx
a2369ed3 14494rs6000_return_addr (int count, rtx frame)
71f123ca 14495{
a4f6c312
SS
14496 /* Currently we don't optimize very well between prolog and body
14497 code and for PIC code the code can be actually quite bad, so
14498 don't try to be too clever here. */
f1384257 14499 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14500 {
14501 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14502
14503 return
14504 gen_rtx_MEM
14505 (Pmode,
14506 memory_address
14507 (Pmode,
14508 plus_constant (copy_to_reg
14509 (gen_rtx_MEM (Pmode,
14510 memory_address (Pmode, frame))),
14511 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14512 }
14513
8c29550d 14514 cfun->machine->ra_need_lr = 1;
1de43f85 14515 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14516}
14517
5e1bf043
DJ
14518/* Say whether a function is a candidate for sibcall handling or not.
14519 We do not allow indirect calls to be optimized into sibling calls.
14520 Also, we can't do it if there are any vector parameters; there's
14521 nowhere to put the VRsave code so it works; note that functions with
14522 vector parameters are required to have a prototype, so the argument
14523 type info must be available here. (The tail recursion case can work
14524 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14525static bool
a2369ed3 14526rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14527{
14528 tree type;
4977bab6 14529 if (decl)
5e1bf043
DJ
14530 {
14531 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14532 {
4977bab6 14533 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14534 type; type = TREE_CHAIN (type))
14535 {
c15b529f 14536 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14537 return false;
5e1bf043 14538 }
c4ad648e 14539 }
5e1bf043 14540 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14541 || ((*targetm.binds_local_p) (decl)
14542 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14543 {
4977bab6 14544 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14545
14546 if (!lookup_attribute ("longcall", attr_list)
14547 || lookup_attribute ("shortcall", attr_list))
4977bab6 14548 return true;
2bcc50d0 14549 }
5e1bf043 14550 }
4977bab6 14551 return false;
5e1bf043
DJ
14552}
14553
e7e64a25
AS
14554/* NULL if INSN insn is valid within a low-overhead loop.
14555 Otherwise return why doloop cannot be applied.
9419649c
DE
14556 PowerPC uses the COUNT register for branch on table instructions. */
14557
e7e64a25 14558static const char *
3101faab 14559rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
14560{
14561 if (CALL_P (insn))
e7e64a25 14562 return "Function call in the loop.";
9419649c
DE
14563
14564 if (JUMP_P (insn)
14565 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14566 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14567 return "Computed branch in the loop.";
9419649c 14568
e7e64a25 14569 return NULL;
9419649c
DE
14570}
14571
71f123ca 14572static int
863d938c 14573rs6000_ra_ever_killed (void)
71f123ca
FS
14574{
14575 rtx top;
5e1bf043
DJ
14576 rtx reg;
14577 rtx insn;
71f123ca 14578
dd292d0a 14579 if (current_function_is_thunk)
71f123ca 14580 return 0;
eb0424da 14581
36f7e964
AH
14582 /* regs_ever_live has LR marked as used if any sibcalls are present,
14583 but this should not force saving and restoring in the
14584 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14585 clobbers LR, so that is inappropriate. */
36f7e964 14586
5e1bf043
DJ
14587 /* Also, the prologue can generate a store into LR that
14588 doesn't really count, like this:
36f7e964 14589
5e1bf043
DJ
14590 move LR->R0
14591 bcl to set PIC register
14592 move LR->R31
14593 move R0->LR
36f7e964
AH
14594
14595 When we're called from the epilogue, we need to avoid counting
14596 this as a store. */
f676971a 14597
71f123ca
FS
14598 push_topmost_sequence ();
14599 top = get_insns ();
14600 pop_topmost_sequence ();
1de43f85 14601 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14602
5e1bf043
DJ
14603 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14604 {
14605 if (INSN_P (insn))
14606 {
022123e6
AM
14607 if (CALL_P (insn))
14608 {
14609 if (!SIBLING_CALL_P (insn))
14610 return 1;
14611 }
1de43f85 14612 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14613 return 1;
36f7e964
AH
14614 else if (set_of (reg, insn) != NULL_RTX
14615 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14616 return 1;
14617 }
14618 }
14619 return 0;
71f123ca 14620}
4697a36c 14621\f
9ebbca7d 14622/* Emit instructions needed to load the TOC register.
c7ca610e 14623 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14624 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14625
14626void
a2369ed3 14627rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14628{
6fb5fa3c 14629 rtx dest;
1db02437 14630 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14631
7f970b70 14632 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14633 {
7f970b70 14634 char buf[30];
e65a3857 14635 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14636
14637 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14638 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14639 if (flag_pic == 2)
14640 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14641 else
14642 got = rs6000_got_sym ();
14643 tmp1 = tmp2 = dest;
14644 if (!fromprolog)
14645 {
14646 tmp1 = gen_reg_rtx (Pmode);
14647 tmp2 = gen_reg_rtx (Pmode);
14648 }
6fb5fa3c
DB
14649 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14650 emit_move_insn (tmp1,
1de43f85 14651 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14652 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14653 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14654 }
14655 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14656 {
6fb5fa3c 14657 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14658 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14659 }
14660 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14661 {
14662 char buf[30];
20b71b17
AM
14663 rtx temp0 = (fromprolog
14664 ? gen_rtx_REG (Pmode, 0)
14665 : gen_reg_rtx (Pmode));
20b71b17 14666
20b71b17
AM
14667 if (fromprolog)
14668 {
ccbca5e4 14669 rtx symF, symL;
38c1f2d7 14670
20b71b17
AM
14671 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14672 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14673
20b71b17
AM
14674 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14675 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14676
6fb5fa3c
DB
14677 emit_insn (gen_load_toc_v4_PIC_1 (symF));
14678 emit_move_insn (dest,
1de43f85 14679 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 14680 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
14681 }
14682 else
20b71b17
AM
14683 {
14684 rtx tocsym;
20b71b17
AM
14685
14686 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14687 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14688 emit_move_insn (dest,
1de43f85 14689 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 14690 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14691 }
6fb5fa3c 14692 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 14693 }
20b71b17
AM
14694 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14695 {
14696 /* This is for AIX code running in non-PIC ELF32. */
14697 char buf[30];
14698 rtx realsym;
14699 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14700 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14701
6fb5fa3c
DB
14702 emit_insn (gen_elf_high (dest, realsym));
14703 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 14704 }
37409796 14705 else
9ebbca7d 14706 {
37409796 14707 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14708
9ebbca7d 14709 if (TARGET_32BIT)
6fb5fa3c 14710 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14711 else
6fb5fa3c 14712 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
14713 }
14714}
14715
d1d0c603
JJ
14716/* Emit instructions to restore the link register after determining where
14717 its value has been stored. */
14718
14719void
14720rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14721{
14722 rs6000_stack_t *info = rs6000_stack_info ();
14723 rtx operands[2];
14724
14725 operands[0] = source;
14726 operands[1] = scratch;
14727
14728 if (info->lr_save_p)
14729 {
14730 rtx frame_rtx = stack_pointer_rtx;
14731 HOST_WIDE_INT sp_offset = 0;
14732 rtx tmp;
14733
14734 if (frame_pointer_needed
14735 || current_function_calls_alloca
14736 || info->total_size > 32767)
14737 {
0be76840 14738 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14739 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14740 frame_rtx = operands[1];
14741 }
14742 else if (info->push_p)
14743 sp_offset = info->total_size;
14744
14745 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14746 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14747 emit_move_insn (tmp, operands[0]);
14748 }
14749 else
1de43f85 14750 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
14751}
14752
4862826d 14753static GTY(()) alias_set_type set = -1;
f103e34d 14754
4862826d 14755alias_set_type
863d938c 14756get_TOC_alias_set (void)
9ebbca7d 14757{
f103e34d
GK
14758 if (set == -1)
14759 set = new_alias_set ();
14760 return set;
f676971a 14761}
9ebbca7d 14762
c1207243 14763/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14764 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14765 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14766#if TARGET_ELF
3c9eb5f4 14767static int
f676971a 14768uses_TOC (void)
9ebbca7d 14769{
c4501e62 14770 rtx insn;
38c1f2d7 14771
c4501e62
JJ
14772 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14773 if (INSN_P (insn))
14774 {
14775 rtx pat = PATTERN (insn);
14776 int i;
9ebbca7d 14777
f676971a 14778 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14779 for (i = 0; i < XVECLEN (pat, 0); i++)
14780 {
14781 rtx sub = XVECEXP (pat, 0, i);
14782 if (GET_CODE (sub) == USE)
14783 {
14784 sub = XEXP (sub, 0);
14785 if (GET_CODE (sub) == UNSPEC
14786 && XINT (sub, 1) == UNSPEC_TOC)
14787 return 1;
14788 }
14789 }
14790 }
14791 return 0;
9ebbca7d 14792}
c954844a 14793#endif
38c1f2d7 14794
9ebbca7d 14795rtx
f676971a 14796create_TOC_reference (rtx symbol)
9ebbca7d 14797{
b3a13419 14798 if (!can_create_pseudo_p ())
6fb5fa3c 14799 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 14800 return gen_rtx_PLUS (Pmode,
a8a05998 14801 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14802 gen_rtx_CONST (Pmode,
14803 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14804 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14805}
38c1f2d7 14806
fc4767bb
JJ
14807/* If _Unwind_* has been called from within the same module,
14808 toc register is not guaranteed to be saved to 40(1) on function
14809 entry. Save it there in that case. */
c7ca610e 14810
9ebbca7d 14811void
863d938c 14812rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14813{
14814 rtx mem;
14815 rtx stack_top = gen_reg_rtx (Pmode);
14816 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14817 rtx opcode = gen_reg_rtx (SImode);
14818 rtx tocompare = gen_reg_rtx (SImode);
14819 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14820
8308679f 14821 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14822 emit_move_insn (stack_top, mem);
14823
8308679f
DE
14824 mem = gen_frame_mem (Pmode,
14825 gen_rtx_PLUS (Pmode, stack_top,
14826 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14827 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14828 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14829 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14830 : 0xE8410028, SImode));
9ebbca7d 14831
fc4767bb 14832 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14833 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14834 no_toc_save_needed);
9ebbca7d 14835
8308679f
DE
14836 mem = gen_frame_mem (Pmode,
14837 gen_rtx_PLUS (Pmode, stack_top,
14838 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14839 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14840 emit_label (no_toc_save_needed);
9ebbca7d 14841}
38c1f2d7 14842\f
0be76840
DE
14843/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14844 and the change to the stack pointer. */
ba4828e0 14845
9ebbca7d 14846static void
863d938c 14847rs6000_emit_stack_tie (void)
9ebbca7d 14848{
0be76840
DE
14849 rtx mem = gen_frame_mem (BLKmode,
14850 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14851
9ebbca7d
GK
14852 emit_insn (gen_stack_tie (mem));
14853}
38c1f2d7 14854
9ebbca7d
GK
14855/* Emit the correct code for allocating stack space, as insns.
14856 If COPY_R12, make sure a copy of the old frame is left in r12.
14857 The generated code may use hard register 0 as a temporary. */
14858
14859static void
a2369ed3 14860rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14861{
9ebbca7d
GK
14862 rtx insn;
14863 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14864 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14865 rtx todec = gen_int_mode (-size, Pmode);
14866
14867 if (INTVAL (todec) != -size)
14868 {
d4ee4d25 14869 warning (0, "stack frame too large");
61168ff1
RS
14870 emit_insn (gen_trap ());
14871 return;
14872 }
a157febd
GK
14873
14874 if (current_function_limit_stack)
14875 {
14876 if (REG_P (stack_limit_rtx)
f676971a 14877 && REGNO (stack_limit_rtx) > 1
a157febd
GK
14878 && REGNO (stack_limit_rtx) <= 31)
14879 {
5b71a4e7 14880 emit_insn (TARGET_32BIT
9ebbca7d
GK
14881 ? gen_addsi3 (tmp_reg,
14882 stack_limit_rtx,
14883 GEN_INT (size))
14884 : gen_adddi3 (tmp_reg,
14885 stack_limit_rtx,
14886 GEN_INT (size)));
5b71a4e7 14887
9ebbca7d
GK
14888 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14889 const0_rtx));
a157febd
GK
14890 }
14891 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 14892 && TARGET_32BIT
f607bc57 14893 && DEFAULT_ABI == ABI_V4)
a157febd 14894 {
9ebbca7d 14895 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
14896 gen_rtx_PLUS (Pmode,
14897 stack_limit_rtx,
9ebbca7d 14898 GEN_INT (size)));
5b71a4e7 14899
9ebbca7d
GK
14900 emit_insn (gen_elf_high (tmp_reg, toload));
14901 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
14902 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14903 const0_rtx));
a157febd
GK
14904 }
14905 else
d4ee4d25 14906 warning (0, "stack limit expression is not supported");
a157febd
GK
14907 }
14908
9ebbca7d
GK
14909 if (copy_r12 || ! TARGET_UPDATE)
14910 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
14911
38c1f2d7
MM
14912 if (TARGET_UPDATE)
14913 {
9ebbca7d 14914 if (size > 32767)
38c1f2d7 14915 {
9ebbca7d 14916 /* Need a note here so that try_split doesn't get confused. */
9390387d 14917 if (get_last_insn () == NULL_RTX)
2e040219 14918 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
14919 insn = emit_move_insn (tmp_reg, todec);
14920 try_split (PATTERN (insn), insn, 0);
14921 todec = tmp_reg;
38c1f2d7 14922 }
5b71a4e7
DE
14923
14924 insn = emit_insn (TARGET_32BIT
14925 ? gen_movsi_update (stack_reg, stack_reg,
14926 todec, stack_reg)
c4ad648e 14927 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 14928 todec, stack_reg));
38c1f2d7
MM
14929 }
14930 else
14931 {
5b71a4e7
DE
14932 insn = emit_insn (TARGET_32BIT
14933 ? gen_addsi3 (stack_reg, stack_reg, todec)
14934 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
14935 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
14936 gen_rtx_REG (Pmode, 12));
14937 }
f676971a 14938
9ebbca7d 14939 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 14940 REG_NOTES (insn) =
9ebbca7d 14941 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 14942 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
14943 gen_rtx_PLUS (Pmode, stack_reg,
14944 GEN_INT (-size))),
14945 REG_NOTES (insn));
14946}
14947
a4f6c312
SS
14948/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14949 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14950 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
14951 deduce these equivalences by itself so it wasn't necessary to hold
14952 its hand so much. */
9ebbca7d
GK
14953
14954static void
f676971a 14955rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 14956 rtx reg2, rtx rreg)
9ebbca7d
GK
14957{
14958 rtx real, temp;
14959
e56c4463
JL
14960 /* copy_rtx will not make unique copies of registers, so we need to
14961 ensure we don't have unwanted sharing here. */
14962 if (reg == reg2)
14963 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14964
14965 if (reg == rreg)
14966 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14967
9ebbca7d
GK
14968 real = copy_rtx (PATTERN (insn));
14969
89e7058f
AH
14970 if (reg2 != NULL_RTX)
14971 real = replace_rtx (real, reg2, rreg);
f676971a
EC
14972
14973 real = replace_rtx (real, reg,
9ebbca7d
GK
14974 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14975 STACK_POINTER_REGNUM),
14976 GEN_INT (val)));
f676971a 14977
9ebbca7d
GK
14978 /* We expect that 'real' is either a SET or a PARALLEL containing
14979 SETs (and possibly other stuff). In a PARALLEL, all the SETs
14980 are important so they all have to be marked RTX_FRAME_RELATED_P. */
14981
14982 if (GET_CODE (real) == SET)
14983 {
14984 rtx set = real;
f676971a 14985
9ebbca7d
GK
14986 temp = simplify_rtx (SET_SRC (set));
14987 if (temp)
14988 SET_SRC (set) = temp;
14989 temp = simplify_rtx (SET_DEST (set));
14990 if (temp)
14991 SET_DEST (set) = temp;
14992 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 14993 {
9ebbca7d
GK
14994 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14995 if (temp)
14996 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 14997 }
38c1f2d7 14998 }
37409796 14999 else
9ebbca7d
GK
15000 {
15001 int i;
37409796
NS
15002
15003 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15004 for (i = 0; i < XVECLEN (real, 0); i++)
15005 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15006 {
15007 rtx set = XVECEXP (real, 0, i);
f676971a 15008
9ebbca7d
GK
15009 temp = simplify_rtx (SET_SRC (set));
15010 if (temp)
15011 SET_SRC (set) = temp;
15012 temp = simplify_rtx (SET_DEST (set));
15013 if (temp)
15014 SET_DEST (set) = temp;
15015 if (GET_CODE (SET_DEST (set)) == MEM)
15016 {
15017 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15018 if (temp)
15019 XEXP (SET_DEST (set), 0) = temp;
15020 }
15021 RTX_FRAME_RELATED_P (set) = 1;
15022 }
15023 }
c19de7aa
AH
15024
15025 if (TARGET_SPE)
15026 real = spe_synthesize_frame_save (real);
15027
9ebbca7d
GK
15028 RTX_FRAME_RELATED_P (insn) = 1;
15029 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15030 real,
15031 REG_NOTES (insn));
38c1f2d7
MM
15032}
15033
c19de7aa
AH
15034/* Given an SPE frame note, return a PARALLEL of SETs with the
15035 original note, plus a synthetic register save. */
15036
15037static rtx
a2369ed3 15038spe_synthesize_frame_save (rtx real)
c19de7aa
AH
15039{
15040 rtx synth, offset, reg, real2;
15041
15042 if (GET_CODE (real) != SET
15043 || GET_MODE (SET_SRC (real)) != V2SImode)
15044 return real;
15045
15046 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
15047 frame related note. The parallel contains a set of the register
41f3a930 15048 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
15049 This is so we can differentiate between 64-bit and 32-bit saves.
15050 Words cannot describe this nastiness. */
15051
37409796
NS
15052 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
15053 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
15054 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
15055
15056 /* Transform:
15057 (set (mem (plus (reg x) (const y)))
15058 (reg z))
15059 into:
15060 (set (mem (plus (reg x) (const y+4)))
41f3a930 15061 (reg z+1200))
c19de7aa
AH
15062 */
15063
15064 real2 = copy_rtx (real);
15065 PUT_MODE (SET_DEST (real2), SImode);
15066 reg = SET_SRC (real2);
15067 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
15068 synth = copy_rtx (real2);
15069
15070 if (BYTES_BIG_ENDIAN)
15071 {
15072 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
15073 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
15074 }
15075
15076 reg = SET_SRC (synth);
41f3a930 15077
c19de7aa 15078 synth = replace_rtx (synth, reg,
41f3a930 15079 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
15080
15081 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
15082 synth = replace_rtx (synth, offset,
15083 GEN_INT (INTVAL (offset)
15084 + (BYTES_BIG_ENDIAN ? 0 : 4)));
15085
15086 RTX_FRAME_RELATED_P (synth) = 1;
15087 RTX_FRAME_RELATED_P (real2) = 1;
15088 if (BYTES_BIG_ENDIAN)
15089 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
15090 else
15091 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
15092
15093 return real;
15094}
15095
00b960c7
AH
15096/* Returns an insn that has a vrsave set operation with the
15097 appropriate CLOBBERs. */
15098
15099static rtx
a2369ed3 15100generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15101{
15102 int nclobs, i;
15103 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15104 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15105
a004eb82
AH
15106 clobs[0]
15107 = gen_rtx_SET (VOIDmode,
15108 vrsave,
15109 gen_rtx_UNSPEC_VOLATILE (SImode,
15110 gen_rtvec (2, reg, vrsave),
3aca4bff 15111 UNSPECV_SET_VRSAVE));
00b960c7
AH
15112
15113 nclobs = 1;
15114
9aa86737
AH
15115 /* We need to clobber the registers in the mask so the scheduler
15116 does not move sets to VRSAVE before sets of AltiVec registers.
15117
15118 However, if the function receives nonlocal gotos, reload will set
15119 all call saved registers live. We will end up with:
15120
15121 (set (reg 999) (mem))
15122 (parallel [ (set (reg vrsave) (unspec blah))
15123 (clobber (reg 999))])
15124
15125 The clobber will cause the store into reg 999 to be dead, and
15126 flow will attempt to delete an epilogue insn. In this case, we
15127 need an unspec use/set of the register. */
00b960c7
AH
15128
15129 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15130 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15131 {
15132 if (!epiloguep || call_used_regs [i])
15133 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15134 gen_rtx_REG (V4SImode, i));
15135 else
15136 {
15137 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15138
15139 clobs[nclobs++]
a004eb82
AH
15140 = gen_rtx_SET (VOIDmode,
15141 reg,
15142 gen_rtx_UNSPEC (V4SImode,
15143 gen_rtvec (1, reg), 27));
9aa86737
AH
15144 }
15145 }
00b960c7
AH
15146
15147 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15148
15149 for (i = 0; i < nclobs; ++i)
15150 XVECEXP (insn, 0, i) = clobs[i];
15151
15152 return insn;
15153}
15154
89e7058f
AH
15155/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15156 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15157
15158static void
f676971a 15159emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15160 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15161{
15162 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15163 rtx replacea, replaceb;
15164
15165 int_rtx = GEN_INT (offset);
15166
15167 /* Some cases that need register indexed addressing. */
15168 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 15169 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15170 || (TARGET_SPE_ABI
15171 && SPE_VECTOR_MODE (mode)
15172 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15173 {
15174 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15175 flow path of instructions in the prologue. */
89e7058f
AH
15176 offset_rtx = gen_rtx_REG (Pmode, 11);
15177 emit_move_insn (offset_rtx, int_rtx);
15178
15179 replacea = offset_rtx;
15180 replaceb = int_rtx;
15181 }
15182 else
15183 {
15184 offset_rtx = int_rtx;
15185 replacea = NULL_RTX;
15186 replaceb = NULL_RTX;
15187 }
15188
15189 reg = gen_rtx_REG (mode, regno);
15190 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15191 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15192
15193 insn = emit_move_insn (mem, reg);
15194
15195 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15196}
15197
a3170dc6
AH
15198/* Emit an offset memory reference suitable for a frame store, while
15199 converting to a valid addressing mode. */
15200
15201static rtx
a2369ed3 15202gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15203{
15204 rtx int_rtx, offset_rtx;
15205
15206 int_rtx = GEN_INT (offset);
15207
4d4cbc0e
AH
15208 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
15209 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15210 {
15211 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15212 emit_move_insn (offset_rtx, int_rtx);
15213 }
15214 else
15215 offset_rtx = int_rtx;
15216
0be76840 15217 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15218}
15219
6d0a8091
DJ
15220/* Look for user-defined global regs. We should not save and restore these,
15221 and cannot use stmw/lmw if there are any in its range. */
15222
15223static bool
15224no_global_regs_above (int first_greg)
15225{
15226 int i;
15227 for (i = 0; i < 32 - first_greg; i++)
15228 if (global_regs[first_greg + i])
15229 return false;
15230 return true;
15231}
15232
699c914a
MS
15233#ifndef TARGET_FIX_AND_CONTINUE
15234#define TARGET_FIX_AND_CONTINUE 0
15235#endif
15236
52ff33d0
NF
15237/* Determine whether the gp REG is really used. */
15238
15239static bool
15240rs6000_reg_live_or_pic_offset_p (int reg)
15241{
6fb5fa3c 15242 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15243 && (!call_used_regs[reg]
15244 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15245 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15246 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15247 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15248 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15249}
15250
9ebbca7d
GK
15251/* Emit function prologue as insns. */
15252
9878760c 15253void
863d938c 15254rs6000_emit_prologue (void)
9878760c 15255{
4697a36c 15256 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15257 enum machine_mode reg_mode = Pmode;
327e5343 15258 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15259 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15260 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15261 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15262 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
15263 rtx insn;
15264 int saving_FPRs_inline;
15265 int using_store_multiple;
15266 HOST_WIDE_INT sp_offset = 0;
f676971a 15267
699c914a
MS
15268 if (TARGET_FIX_AND_CONTINUE)
15269 {
15270 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15271 address by modifying the first 5 instructions of the function
699c914a
MS
15272 to branch to the overriding function. This is necessary to
15273 permit function pointers that point to the old function to
15274 actually forward to the new function. */
15275 emit_insn (gen_nop ());
15276 emit_insn (gen_nop ());
de2ab0ca 15277 emit_insn (gen_nop ());
699c914a
MS
15278 emit_insn (gen_nop ());
15279 emit_insn (gen_nop ());
15280 }
15281
15282 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15283 {
15284 reg_mode = V2SImode;
15285 reg_size = 8;
15286 }
a3170dc6 15287
9ebbca7d 15288 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15289 && (!TARGET_SPE_ABI
15290 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15291 && info->first_gp_reg_save < 31
15292 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15293 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 15294 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 15295 || current_function_calls_eh_return
8c29550d 15296 || cfun->machine->ra_need_lr);
9ebbca7d
GK
15297
15298 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15299 if (! WORLD_SAVE_P (info)
15300 && info->push_p
acd0b319
AM
15301 && (DEFAULT_ABI == ABI_V4
15302 || current_function_calls_eh_return))
9ebbca7d
GK
15303 {
15304 if (info->total_size < 32767)
15305 sp_offset = info->total_size;
15306 else
15307 frame_reg_rtx = frame_ptr_rtx;
f676971a 15308 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15309 (frame_reg_rtx != sp_reg_rtx
15310 && (info->cr_save_p
15311 || info->lr_save_p
15312 || info->first_fp_reg_save < 64
15313 || info->first_gp_reg_save < 32
15314 )));
15315 if (frame_reg_rtx != sp_reg_rtx)
15316 rs6000_emit_stack_tie ();
15317 }
15318
d62294f5 15319 /* Handle world saves specially here. */
f57fe068 15320 if (WORLD_SAVE_P (info))
d62294f5
FJ
15321 {
15322 int i, j, sz;
15323 rtx treg;
15324 rtvec p;
22fa69da 15325 rtx reg0;
d62294f5
FJ
15326
15327 /* save_world expects lr in r0. */
22fa69da 15328 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15329 if (info->lr_save_p)
c4ad648e 15330 {
22fa69da 15331 insn = emit_move_insn (reg0,
1de43f85 15332 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15333 RTX_FRAME_RELATED_P (insn) = 1;
15334 }
d62294f5
FJ
15335
15336 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15337 assumptions about the offsets of various bits of the stack
992d08b1 15338 frame. */
37409796
NS
15339 gcc_assert (info->gp_save_offset == -220
15340 && info->fp_save_offset == -144
15341 && info->lr_save_offset == 8
15342 && info->cr_save_offset == 4
15343 && info->push_p
15344 && info->lr_save_p
15345 && (!current_function_calls_eh_return
15346 || info->ehrd_offset == -432)
15347 && info->vrsave_save_offset == -224
22fa69da 15348 && info->altivec_save_offset == -416);
d62294f5
FJ
15349
15350 treg = gen_rtx_REG (SImode, 11);
15351 emit_move_insn (treg, GEN_INT (-info->total_size));
15352
15353 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15354 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15355
15356 /* Preserve CR2 for save_world prologues */
22fa69da 15357 sz = 5;
d62294f5
FJ
15358 sz += 32 - info->first_gp_reg_save;
15359 sz += 64 - info->first_fp_reg_save;
15360 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15361 p = rtvec_alloc (sz);
15362 j = 0;
15363 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15364 gen_rtx_REG (SImode,
1de43f85 15365 LR_REGNO));
d62294f5 15366 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15367 gen_rtx_SYMBOL_REF (Pmode,
15368 "*save_world"));
d62294f5 15369 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
15370 properly. */
15371 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15372 {
15373 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15374 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15375 GEN_INT (info->fp_save_offset
15376 + sp_offset + 8 * i));
0be76840 15377 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15378
15379 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15380 }
d62294f5 15381 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15382 {
15383 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15384 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15385 GEN_INT (info->altivec_save_offset
15386 + sp_offset + 16 * i));
0be76840 15387 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15388
15389 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15390 }
d62294f5 15391 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15392 {
15393 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15394 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15395 GEN_INT (info->gp_save_offset
15396 + sp_offset + reg_size * i));
0be76840 15397 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15398
15399 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15400 }
15401
15402 {
15403 /* CR register traditionally saved as CR2. */
15404 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15405 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15406 GEN_INT (info->cr_save_offset
15407 + sp_offset));
0be76840 15408 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15409
15410 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
15411 }
22fa69da
GK
15412 /* Explain about use of R0. */
15413 if (info->lr_save_p)
15414 {
15415 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15416 GEN_INT (info->lr_save_offset
15417 + sp_offset));
15418 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 15419
22fa69da
GK
15420 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
15421 }
15422 /* Explain what happens to the stack pointer. */
15423 {
15424 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
15425 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
15426 }
d62294f5
FJ
15427
15428 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
15429 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
15430 treg, GEN_INT (-info->total_size));
15431 sp_offset = info->total_size;
d62294f5
FJ
15432 }
15433
9ebbca7d 15434 /* If we use the link register, get it into r0. */
f57fe068 15435 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 15436 {
52ff33d0
NF
15437 rtx addr, reg, mem;
15438
f8a57be8 15439 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15440 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15441 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15442
15443 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15444 GEN_INT (info->lr_save_offset + sp_offset));
15445 reg = gen_rtx_REG (Pmode, 0);
15446 mem = gen_rtx_MEM (Pmode, addr);
15447 /* This should not be of rs6000_sr_alias_set, because of
15448 __builtin_return_address. */
15449
15450 insn = emit_move_insn (mem, reg);
15451 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15452 NULL_RTX, NULL_RTX);
f8a57be8 15453 }
9ebbca7d
GK
15454
15455 /* If we need to save CR, put it into r12. */
f57fe068 15456 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15457 {
f8a57be8 15458 rtx set;
f676971a 15459
9ebbca7d 15460 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15461 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15462 RTX_FRAME_RELATED_P (insn) = 1;
15463 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15464 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15465 But that's OK. All we have to do is specify that _one_ condition
15466 code register is saved in this stack slot. The thrower's epilogue
15467 will then restore all the call-saved registers.
15468 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15469 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15470 gen_rtx_REG (SImode, CR2_REGNO));
15471 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15472 set,
15473 REG_NOTES (insn));
9ebbca7d
GK
15474 }
15475
a4f6c312
SS
15476 /* Do any required saving of fpr's. If only one or two to save, do
15477 it ourselves. Otherwise, call function. */
f57fe068 15478 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15479 {
15480 int i;
15481 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15482 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15483 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15484 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15485 info->first_fp_reg_save + i,
15486 info->fp_save_offset + sp_offset + 8 * i,
15487 info->total_size);
9ebbca7d 15488 }
f57fe068 15489 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15490 {
15491 int i;
15492 char rname[30];
520a57c8 15493 const char *alloc_rname;
9ebbca7d
GK
15494 rtvec p;
15495 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15496
15497 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15498 gen_rtx_REG (Pmode,
1de43f85 15499 LR_REGNO));
9ebbca7d
GK
15500 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15501 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15502 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15503 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15504 gen_rtx_SYMBOL_REF (Pmode,
15505 alloc_rname));
15506 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15507 {
15508 rtx addr, reg, mem;
15509 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15510 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15511 GEN_INT (info->fp_save_offset
9ebbca7d 15512 + sp_offset + 8*i));
0be76840 15513 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15514
15515 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15516 }
15517 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15518 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15519 NULL_RTX, NULL_RTX);
15520 }
b6c9286a 15521
9ebbca7d
GK
15522 /* Save GPRs. This is done as a PARALLEL if we are using
15523 the store-multiple instructions. */
f57fe068 15524 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15525 {
308c142a 15526 rtvec p;
9ebbca7d
GK
15527 int i;
15528 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15529 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15530 {
15531 rtx addr, reg, mem;
15532 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15533 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15534 GEN_INT (info->gp_save_offset
15535 + sp_offset
9ebbca7d 15536 + reg_size * i));
0be76840 15537 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15538
15539 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15540 }
15541 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15542 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15543 NULL_RTX, NULL_RTX);
b6c9286a 15544 }
52ff33d0
NF
15545 else if (!WORLD_SAVE_P (info)
15546 && TARGET_SPE_ABI
15547 && info->spe_64bit_regs_used != 0
15548 && info->first_gp_reg_save != 32)
15549 {
15550 int i;
15551 rtx spe_save_area_ptr;
15552 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15553 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15554 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15555
15556 /* Determine whether we can address all of the registers that need
15557 to be saved with an offset from the stack pointer that fits in
15558 the small const field for SPE memory instructions. */
15559 int spe_regs_addressable_via_sp
15560 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15561 + (32 - info->first_gp_reg_save - 1) * reg_size);
15562 int spe_offset;
15563
15564 if (spe_regs_addressable_via_sp)
15565 {
15566 spe_save_area_ptr = sp_reg_rtx;
15567 spe_offset = info->spe_gp_save_offset + sp_offset;
15568 }
15569 else
15570 {
15571 /* Make r11 point to the start of the SPE save area. We need
15572 to be careful here if r11 is holding the static chain. If
15573 it is, then temporarily save it in r0. We would use r0 as
15574 our base register here, but using r0 as a base register in
15575 loads and stores means something different from what we
15576 would like. */
15577 if (using_static_chain_p)
15578 {
15579 rtx r0 = gen_rtx_REG (Pmode, 0);
15580
15581 gcc_assert (info->first_gp_reg_save > 11);
15582
15583 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15584 }
15585
15586 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
15587 emit_insn (gen_addsi3 (spe_save_area_ptr, sp_reg_rtx,
15588 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15589
15590 spe_offset = 0;
15591 }
15592
15593 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15594 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15595 {
15596 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15597 rtx offset, addr, mem;
15598
15599 /* We're doing all this to ensure that the offset fits into
15600 the immediate offset of 'evstdd'. */
15601 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15602
15603 offset = GEN_INT (reg_size * i + spe_offset);
15604 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15605 mem = gen_rtx_MEM (V2SImode, addr);
15606
15607 insn = emit_move_insn (mem, reg);
15608
15609 rs6000_frame_related (insn, spe_save_area_ptr,
15610 info->spe_gp_save_offset
15611 + sp_offset + reg_size * i,
15612 offset, const0_rtx);
15613 }
15614
15615 /* Move the static chain pointer back. */
15616 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15617 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15618 }
f57fe068 15619 else if (!WORLD_SAVE_P (info))
b6c9286a 15620 {
9ebbca7d
GK
15621 int i;
15622 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15623 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15624 {
15625 rtx addr, reg, mem;
15626 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15627
52ff33d0
NF
15628 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15629 GEN_INT (info->gp_save_offset
15630 + sp_offset
15631 + reg_size * i));
15632 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15633
52ff33d0
NF
15634 insn = emit_move_insn (mem, reg);
15635 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15636 NULL_RTX, NULL_RTX);
15637 }
9ebbca7d
GK
15638 }
15639
83720594
RH
15640 /* ??? There's no need to emit actual instructions here, but it's the
15641 easiest way to get the frame unwind information emitted. */
22fa69da 15642 if (current_function_calls_eh_return)
83720594 15643 {
78e1b90d
DE
15644 unsigned int i, regno;
15645
fc4767bb
JJ
15646 /* In AIX ABI we need to pretend we save r2 here. */
15647 if (TARGET_AIX)
15648 {
15649 rtx addr, reg, mem;
15650
15651 reg = gen_rtx_REG (reg_mode, 2);
15652 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15653 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15654 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15655
15656 insn = emit_move_insn (mem, reg);
f676971a 15657 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15658 NULL_RTX, NULL_RTX);
15659 PATTERN (insn) = gen_blockage ();
15660 }
15661
83720594
RH
15662 for (i = 0; ; ++i)
15663 {
83720594
RH
15664 regno = EH_RETURN_DATA_REGNO (i);
15665 if (regno == INVALID_REGNUM)
15666 break;
15667
89e7058f
AH
15668 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15669 info->ehrd_offset + sp_offset
15670 + reg_size * (int) i,
15671 info->total_size);
83720594
RH
15672 }
15673 }
15674
9ebbca7d 15675 /* Save CR if we use any that must be preserved. */
f57fe068 15676 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15677 {
15678 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15679 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15680 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15681 /* See the large comment above about why CR2_REGNO is used. */
15682 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15683
9ebbca7d
GK
15684 /* If r12 was used to hold the original sp, copy cr into r0 now
15685 that it's free. */
15686 if (REGNO (frame_reg_rtx) == 12)
15687 {
f8a57be8
GK
15688 rtx set;
15689
9ebbca7d 15690 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15691 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15692 RTX_FRAME_RELATED_P (insn) = 1;
15693 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15694 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15695 set,
15696 REG_NOTES (insn));
f676971a 15697
9ebbca7d
GK
15698 }
15699 insn = emit_move_insn (mem, cr_save_rtx);
15700
f676971a 15701 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15702 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15703 }
15704
f676971a 15705 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15706 for which it was done previously. */
f57fe068 15707 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15708 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 15709 {
bcb2d701 15710 if (info->total_size < 32767)
2b2c2fe5 15711 sp_offset = info->total_size;
bcb2d701
EC
15712 else
15713 frame_reg_rtx = frame_ptr_rtx;
15714 rs6000_emit_allocate_stack (info->total_size,
15715 (frame_reg_rtx != sp_reg_rtx
15716 && ((info->altivec_size != 0)
15717 || (info->vrsave_mask != 0)
15718 )));
15719 if (frame_reg_rtx != sp_reg_rtx)
15720 rs6000_emit_stack_tie ();
2b2c2fe5 15721 }
9ebbca7d
GK
15722
15723 /* Set frame pointer, if needed. */
15724 if (frame_pointer_needed)
15725 {
7d5175e1 15726 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15727 sp_reg_rtx);
15728 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15729 }
9878760c 15730
2b2c2fe5
EC
15731 /* Save AltiVec registers if needed. Save here because the red zone does
15732 not include AltiVec registers. */
15733 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15734 {
15735 int i;
15736
15737 /* There should be a non inline version of this, for when we
15738 are saving lots of vector registers. */
15739 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15740 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15741 {
15742 rtx areg, savereg, mem;
15743 int offset;
15744
15745 offset = info->altivec_save_offset + sp_offset
15746 + 16 * (i - info->first_altivec_reg_save);
15747
15748 savereg = gen_rtx_REG (V4SImode, i);
15749
15750 areg = gen_rtx_REG (Pmode, 0);
15751 emit_move_insn (areg, GEN_INT (offset));
15752
15753 /* AltiVec addressing mode is [reg+reg]. */
15754 mem = gen_frame_mem (V4SImode,
15755 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15756
15757 insn = emit_move_insn (mem, savereg);
15758
15759 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15760 areg, GEN_INT (offset));
15761 }
15762 }
15763
15764 /* VRSAVE is a bit vector representing which AltiVec registers
15765 are used. The OS uses this to determine which vector
15766 registers to save on a context switch. We need to save
15767 VRSAVE on the stack frame, add whatever AltiVec registers we
15768 used in this function, and do the corresponding magic in the
15769 epilogue. */
15770
15771 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15772 && info->vrsave_mask != 0)
15773 {
15774 rtx reg, mem, vrsave;
15775 int offset;
15776
15777 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15778 as frame_reg_rtx and r11 as the static chain pointer for
15779 nested functions. */
15780 reg = gen_rtx_REG (SImode, 0);
15781 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15782 if (TARGET_MACHO)
15783 emit_insn (gen_get_vrsave_internal (reg));
15784 else
15785 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15786
15787 if (!WORLD_SAVE_P (info))
15788 {
15789 /* Save VRSAVE. */
15790 offset = info->vrsave_save_offset + sp_offset;
15791 mem = gen_frame_mem (SImode,
15792 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15793 GEN_INT (offset)));
15794 insn = emit_move_insn (mem, reg);
15795 }
15796
15797 /* Include the registers in the mask. */
15798 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15799
15800 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15801 }
15802
1db02437 15803 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15804 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15805 || (DEFAULT_ABI == ABI_V4
15806 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 15807 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
15808 {
15809 /* If emit_load_toc_table will use the link register, we need to save
15810 it. We use R12 for this purpose because emit_load_toc_table
15811 can use register 0. This allows us to use a plain 'blr' to return
15812 from the procedure more often. */
15813 int save_LR_around_toc_setup = (TARGET_ELF
15814 && DEFAULT_ABI != ABI_AIX
15815 && flag_pic
15816 && ! info->lr_save_p
15817 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15818 if (save_LR_around_toc_setup)
15819 {
1de43f85 15820 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 15821
c4ad648e 15822 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 15823 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15824
c4ad648e 15825 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15826
c4ad648e 15827 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
15828 RTX_FRAME_RELATED_P (insn) = 1;
15829 }
15830 else
15831 rs6000_emit_load_toc_table (TRUE);
15832 }
ee890fe2 15833
fcce224d 15834#if TARGET_MACHO
ee890fe2
SS
15835 if (DEFAULT_ABI == ABI_DARWIN
15836 && flag_pic && current_function_uses_pic_offset_table)
15837 {
1de43f85 15838 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 15839 rtx src = machopic_function_base_sym ();
ee890fe2 15840
6d0a8091
DJ
15841 /* Save and restore LR locally around this call (in R0). */
15842 if (!info->lr_save_p)
6fb5fa3c 15843 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 15844
6fb5fa3c 15845 emit_insn (gen_load_macho_picbase (src));
ee890fe2 15846
6fb5fa3c
DB
15847 emit_move_insn (gen_rtx_REG (Pmode,
15848 RS6000_PIC_OFFSET_TABLE_REGNUM),
15849 lr);
6d0a8091
DJ
15850
15851 if (!info->lr_save_p)
6fb5fa3c 15852 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 15853 }
fcce224d 15854#endif
9ebbca7d
GK
15855}
15856
9ebbca7d 15857/* Write function prologue. */
a4f6c312 15858
08c148a8 15859static void
f676971a 15860rs6000_output_function_prologue (FILE *file,
a2369ed3 15861 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15862{
15863 rs6000_stack_t *info = rs6000_stack_info ();
15864
4697a36c
MM
15865 if (TARGET_DEBUG_STACK)
15866 debug_stack_info (info);
9878760c 15867
a4f6c312
SS
15868 /* Write .extern for any function we will call to save and restore
15869 fp values. */
15870 if (info->first_fp_reg_save < 64
15871 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 15872 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 15873 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
15874 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
15875 RESTORE_FP_SUFFIX);
9878760c 15876
c764f757
RK
15877 /* Write .extern for AIX common mode routines, if needed. */
15878 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
15879 {
f6709c70
JW
15880 fputs ("\t.extern __mulh\n", file);
15881 fputs ("\t.extern __mull\n", file);
15882 fputs ("\t.extern __divss\n", file);
15883 fputs ("\t.extern __divus\n", file);
15884 fputs ("\t.extern __quoss\n", file);
15885 fputs ("\t.extern __quous\n", file);
c764f757
RK
15886 common_mode_defined = 1;
15887 }
9878760c 15888
9ebbca7d 15889 if (! HAVE_prologue)
979721f8 15890 {
9ebbca7d 15891 start_sequence ();
9dda4cc8 15892
a4f6c312
SS
15893 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
15894 the "toplevel" insn chain. */
2e040219 15895 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15896 rs6000_emit_prologue ();
2e040219 15897 emit_note (NOTE_INSN_DELETED);
178c3eff 15898
a3c9585f 15899 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15900 {
15901 rtx insn;
15902 unsigned addr = 0;
15903 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15904 {
15905 INSN_ADDRESSES_NEW (insn, addr);
15906 addr += 4;
15907 }
15908 }
9dda4cc8 15909
9ebbca7d 15910 if (TARGET_DEBUG_STACK)
a4f6c312 15911 debug_rtx_list (get_insns (), 100);
c9d691e9 15912 final (get_insns (), file, FALSE);
9ebbca7d 15913 end_sequence ();
979721f8
MM
15914 }
15915
9ebbca7d
GK
15916 rs6000_pic_labelno++;
15917}
f676971a 15918
9ebbca7d 15919/* Emit function epilogue as insns.
9878760c 15920
9ebbca7d
GK
15921 At present, dwarf2out_frame_debug_expr doesn't understand
15922 register restores, so we don't bother setting RTX_FRAME_RELATED_P
15923 anywhere in the epilogue. Most of the insns below would in any case
15924 need special notes to explain where r11 is in relation to the stack. */
9878760c 15925
9ebbca7d 15926void
a2369ed3 15927rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
15928{
15929 rs6000_stack_t *info;
15930 int restoring_FPRs_inline;
15931 int using_load_multiple;
d296e02e 15932 int using_mtcr_multiple;
9ebbca7d
GK
15933 int use_backchain_to_restore_sp;
15934 int sp_offset = 0;
15935 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
15936 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 15937 enum machine_mode reg_mode = Pmode;
327e5343 15938 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15939 int i;
15940
c19de7aa
AH
15941 info = rs6000_stack_info ();
15942
15943 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
15944 {
15945 reg_mode = V2SImode;
15946 reg_size = 8;
15947 }
15948
9ebbca7d 15949 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15950 && (!TARGET_SPE_ABI
15951 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15952 && info->first_gp_reg_save < 31
15953 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15954 restoring_FPRs_inline = (sibcall
83720594 15955 || current_function_calls_eh_return
9ebbca7d
GK
15956 || info->first_fp_reg_save == 64
15957 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 15958 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
15959 || current_function_calls_alloca
15960 || info->total_size > 32767);
d296e02e 15961 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
15962 || rs6000_cpu == PROCESSOR_PPC603
15963 || rs6000_cpu == PROCESSOR_PPC750
15964 || optimize_size);
15965
f57fe068 15966 if (WORLD_SAVE_P (info))
d62294f5
FJ
15967 {
15968 int i, j;
15969 char rname[30];
15970 const char *alloc_rname;
15971 rtvec p;
15972
15973 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
15974 stack slot (which is not likely to be our caller.)
15975 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
15976 rest_world is similar, except any R10 parameter is ignored.
15977 The exception-handling stuff that was here in 2.95 is no
15978 longer necessary. */
d62294f5
FJ
15979
15980 p = rtvec_alloc (9
15981 + 1
f676971a 15982 + 32 - info->first_gp_reg_save
c4ad648e
AM
15983 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
15984 + 63 + 1 - info->first_fp_reg_save);
d62294f5 15985
c4ad648e
AM
15986 strcpy (rname, ((current_function_calls_eh_return) ?
15987 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
15988 alloc_rname = ggc_strdup (rname);
15989
15990 j = 0;
15991 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
15992 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 15993 gen_rtx_REG (Pmode,
1de43f85 15994 LR_REGNO));
d62294f5 15995 RTVEC_ELT (p, j++)
c4ad648e 15996 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 15997 /* The instruction pattern requires a clobber here;
c4ad648e 15998 it is shared with the restVEC helper. */
d62294f5 15999 RTVEC_ELT (p, j++)
c4ad648e 16000 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16001
16002 {
c4ad648e
AM
16003 /* CR register traditionally saved as CR2. */
16004 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16005 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16006 GEN_INT (info->cr_save_offset));
0be76840 16007 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16008
16009 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16010 }
16011
16012 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16013 {
16014 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16015 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16016 GEN_INT (info->gp_save_offset
16017 + reg_size * i));
0be76840 16018 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16019
16020 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16021 }
d62294f5 16022 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16023 {
16024 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16025 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16026 GEN_INT (info->altivec_save_offset
16027 + 16 * i));
0be76840 16028 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16029
16030 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16031 }
d62294f5 16032 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16033 {
16034 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16035 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16036 GEN_INT (info->fp_save_offset
16037 + 8 * i));
0be76840 16038 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16039
16040 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16041 }
d62294f5 16042 RTVEC_ELT (p, j++)
c4ad648e 16043 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16044 RTVEC_ELT (p, j++)
c4ad648e 16045 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16046 RTVEC_ELT (p, j++)
c4ad648e 16047 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16048 RTVEC_ELT (p, j++)
c4ad648e 16049 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16050 RTVEC_ELT (p, j++)
c4ad648e 16051 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16052 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16053
16054 return;
16055 }
16056
45b194f8
AM
16057 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16058 if (info->push_p)
2b2c2fe5 16059 sp_offset = info->total_size;
f676971a 16060
9aa86737
AH
16061 /* Restore AltiVec registers if needed. */
16062 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16063 {
16064 int i;
16065
16066 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16067 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16068 {
16069 rtx addr, areg, mem;
16070
16071 areg = gen_rtx_REG (Pmode, 0);
16072 emit_move_insn
16073 (areg, GEN_INT (info->altivec_save_offset
16074 + sp_offset
16075 + 16 * (i - info->first_altivec_reg_save)));
16076
16077 /* AltiVec addressing mode is [reg+reg]. */
16078 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16079 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16080
16081 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16082 }
16083 }
16084
16085 /* Restore VRSAVE if needed. */
44688022 16086 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 16087 && info->vrsave_mask != 0)
9aa86737
AH
16088 {
16089 rtx addr, mem, reg;
16090
16091 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16092 GEN_INT (info->vrsave_save_offset + sp_offset));
0be76840 16093 mem = gen_frame_mem (SImode, addr);
9aa86737
AH
16094 reg = gen_rtx_REG (SImode, 12);
16095 emit_move_insn (reg, mem);
16096
16097 emit_insn (generate_set_vrsave (reg, info, 1));
16098 }
16099
2b2c2fe5
EC
16100 /* If we have a frame pointer, a call to alloca, or a large stack
16101 frame, restore the old stack pointer using the backchain. Otherwise,
16102 we know what size to update it with. */
16103 if (use_backchain_to_restore_sp)
16104 {
16105 /* Under V.4, don't reset the stack pointer until after we're done
16106 loading the saved registers. */
16107 if (DEFAULT_ABI == ABI_V4)
16108 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16109
16110 emit_move_insn (frame_reg_rtx,
16111 gen_rtx_MEM (Pmode, sp_reg_rtx));
45b194f8 16112 sp_offset = 0;
2b2c2fe5 16113 }
45b194f8
AM
16114 else if (info->push_p
16115 && DEFAULT_ABI != ABI_V4
16116 && !current_function_calls_eh_return)
2b2c2fe5 16117 {
45b194f8
AM
16118 emit_insn (TARGET_32BIT
16119 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16120 GEN_INT (info->total_size))
16121 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16122 GEN_INT (info->total_size)));
16123 sp_offset = 0;
2b2c2fe5
EC
16124 }
16125
9ebbca7d
GK
16126 /* Get the old lr if we saved it. */
16127 if (info->lr_save_p)
b6c9286a 16128 {
a3170dc6
AH
16129 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16130 info->lr_save_offset + sp_offset);
ba4828e0 16131
9ebbca7d 16132 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16133 }
f676971a 16134
9ebbca7d
GK
16135 /* Get the old cr if we saved it. */
16136 if (info->cr_save_p)
16137 {
16138 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16139 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16140 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16141
9ebbca7d
GK
16142 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16143 }
f676971a 16144
9ebbca7d 16145 /* Set LR here to try to overlap restores below. */
4697a36c 16146 if (info->lr_save_p)
1de43f85 16147 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16148 gen_rtx_REG (Pmode, 0));
f676971a 16149
83720594
RH
16150 /* Load exception handler data registers, if needed. */
16151 if (current_function_calls_eh_return)
16152 {
78e1b90d
DE
16153 unsigned int i, regno;
16154
fc4767bb
JJ
16155 if (TARGET_AIX)
16156 {
16157 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16158 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16159 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16160
16161 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16162 }
16163
83720594
RH
16164 for (i = 0; ; ++i)
16165 {
a3170dc6 16166 rtx mem;
83720594
RH
16167
16168 regno = EH_RETURN_DATA_REGNO (i);
16169 if (regno == INVALID_REGNUM)
16170 break;
16171
a3170dc6
AH
16172 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
16173 info->ehrd_offset + sp_offset
16174 + reg_size * (int) i);
83720594
RH
16175
16176 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
16177 }
16178 }
f676971a 16179
9ebbca7d
GK
16180 /* Restore GPRs. This is done as a PARALLEL if we are using
16181 the load-multiple instructions. */
16182 if (using_load_multiple)
979721f8 16183 {
9ebbca7d
GK
16184 rtvec p;
16185 p = rtvec_alloc (32 - info->first_gp_reg_save);
16186 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 16187 {
f676971a
EC
16188 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16189 GEN_INT (info->gp_save_offset
16190 + sp_offset
9ebbca7d 16191 + reg_size * i));
0be76840 16192 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 16193
f676971a 16194 RTVEC_ELT (p, i) =
9ebbca7d
GK
16195 gen_rtx_SET (VOIDmode,
16196 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16197 mem);
979721f8 16198 }
9ebbca7d 16199 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 16200 }
52ff33d0
NF
16201 else if (TARGET_SPE_ABI
16202 && info->spe_64bit_regs_used != 0
16203 && info->first_gp_reg_save != 32)
16204 {
52ff33d0
NF
16205 /* Determine whether we can address all of the registers that need
16206 to be saved with an offset from the stack pointer that fits in
16207 the small const field for SPE memory instructions. */
16208 int spe_regs_addressable_via_sp
16209 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16210 + (32 - info->first_gp_reg_save - 1) * reg_size);
16211 int spe_offset;
16212
16213 if (spe_regs_addressable_via_sp)
45b194f8 16214 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
16215 else
16216 {
45b194f8 16217 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 16218 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 16219 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
16220 There's no need to worry here because the static chain is passed
16221 anew to every function. */
45b194f8
AM
16222 if (frame_reg_rtx == sp_reg_rtx)
16223 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16224 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
52ff33d0 16225 GEN_INT (info->spe_gp_save_offset + sp_offset)));
45b194f8
AM
16226 /* Keep the invariant that frame_reg_rtx + sp_offset points
16227 at the top of the stack frame. */
16228 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
16229
16230 spe_offset = 0;
16231 }
16232
16233 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16234 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16235 {
16236 rtx offset, addr, mem;
16237
16238 /* We're doing all this to ensure that the immediate offset
16239 fits into the immediate field of 'evldd'. */
16240 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
16241
16242 offset = GEN_INT (spe_offset + reg_size * i);
45b194f8 16243 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
52ff33d0
NF
16244 mem = gen_rtx_MEM (V2SImode, addr);
16245
16246 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
16247 mem);
16248 }
16249 }
9ebbca7d
GK
16250 else
16251 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 16252 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 16253 {
f676971a
EC
16254 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16255 GEN_INT (info->gp_save_offset
16256 + sp_offset
9ebbca7d 16257 + reg_size * i));
0be76840 16258 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 16259
f676971a 16260 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 16261 info->first_gp_reg_save + i), mem);
9ebbca7d 16262 }
9878760c 16263
9ebbca7d
GK
16264 /* Restore fpr's if we need to do it without calling a function. */
16265 if (restoring_FPRs_inline)
16266 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16267 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
16268 && ! call_used_regs[info->first_fp_reg_save+i]))
16269 {
16270 rtx addr, mem;
16271 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
16272 GEN_INT (info->fp_save_offset
16273 + sp_offset
a4f6c312 16274 + 8 * i));
0be76840 16275 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16276
f676971a 16277 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
16278 info->first_fp_reg_save + i),
16279 mem);
16280 }
8d30c4ee 16281
9ebbca7d
GK
16282 /* If we saved cr, restore it here. Just those that were used. */
16283 if (info->cr_save_p)
979721f8 16284 {
9ebbca7d 16285 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 16286 int count = 0;
f676971a 16287
d296e02e 16288 if (using_mtcr_multiple)
979721f8 16289 {
9ebbca7d 16290 for (i = 0; i < 8; i++)
6fb5fa3c 16291 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 16292 count++;
37409796 16293 gcc_assert (count);
e35b9579
GK
16294 }
16295
d296e02e 16296 if (using_mtcr_multiple && count > 1)
e35b9579
GK
16297 {
16298 rtvec p;
16299 int ndx;
f676971a 16300
e35b9579 16301 p = rtvec_alloc (count);
9ebbca7d 16302
e35b9579 16303 ndx = 0;
9ebbca7d 16304 for (i = 0; i < 8; i++)
6fb5fa3c 16305 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
16306 {
16307 rtvec r = rtvec_alloc (2);
16308 RTVEC_ELT (r, 0) = r12_rtx;
16309 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 16310 RTVEC_ELT (p, ndx) =
f676971a 16311 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 16312 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 16313 ndx++;
9ebbca7d
GK
16314 }
16315 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 16316 gcc_assert (ndx == count);
979721f8
MM
16317 }
16318 else
9ebbca7d 16319 for (i = 0; i < 8; i++)
6fb5fa3c 16320 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 16321 {
f676971a 16322 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
16323 CR0_REGNO+i),
16324 r12_rtx));
979721f8 16325 }
979721f8
MM
16326 }
16327
9ebbca7d 16328 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
16329 have been done. */
16330 if (frame_reg_rtx != sp_reg_rtx)
16331 {
16332 /* This blockage is needed so that sched doesn't decide to move
16333 the sp change before the register restores. */
16334 rs6000_emit_stack_tie ();
45b194f8
AM
16335 if (sp_offset != 0)
16336 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
16337 GEN_INT (sp_offset)));
52ff33d0
NF
16338 else
16339 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
16340 }
16341 else if (sp_offset != 0)
16342 emit_insn (TARGET_32BIT
16343 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16344 GEN_INT (sp_offset))
16345 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16346 GEN_INT (sp_offset)));
b6c9286a 16347
83720594
RH
16348 if (current_function_calls_eh_return)
16349 {
16350 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 16351 emit_insn (TARGET_32BIT
83720594
RH
16352 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
16353 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
16354 }
16355
9ebbca7d
GK
16356 if (!sibcall)
16357 {
16358 rtvec p;
16359 if (! restoring_FPRs_inline)
16360 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
16361 else
16362 p = rtvec_alloc (2);
b6c9286a 16363
e35b9579 16364 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
16365 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
16366 gen_rtx_REG (Pmode,
1de43f85 16367 LR_REGNO));
9ebbca7d
GK
16368
16369 /* If we have to restore more than two FP registers, branch to the
16370 restore function. It will return to our caller. */
16371 if (! restoring_FPRs_inline)
16372 {
16373 int i;
16374 char rname[30];
520a57c8 16375 const char *alloc_rname;
979721f8 16376
f676971a 16377 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 16378 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 16379 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
16380 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
16381 gen_rtx_SYMBOL_REF (Pmode,
16382 alloc_rname));
b6c9286a 16383
9ebbca7d
GK
16384 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16385 {
16386 rtx addr, mem;
16387 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
16388 GEN_INT (info->fp_save_offset + 8*i));
0be76840 16389 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 16390
f676971a 16391 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
16392 gen_rtx_SET (VOIDmode,
16393 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
16394 mem);
b6c9286a
MM
16395 }
16396 }
f676971a 16397
9ebbca7d 16398 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 16399 }
9878760c
RK
16400}
16401
16402/* Write function epilogue. */
16403
08c148a8 16404static void
f676971a 16405rs6000_output_function_epilogue (FILE *file,
a2369ed3 16406 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 16407{
9ebbca7d 16408 if (! HAVE_epilogue)
9878760c 16409 {
9ebbca7d
GK
16410 rtx insn = get_last_insn ();
16411 /* If the last insn was a BARRIER, we don't have to write anything except
16412 the trace table. */
16413 if (GET_CODE (insn) == NOTE)
16414 insn = prev_nonnote_insn (insn);
16415 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 16416 {
9ebbca7d
GK
16417 /* This is slightly ugly, but at least we don't have two
16418 copies of the epilogue-emitting code. */
16419 start_sequence ();
16420
16421 /* A NOTE_INSN_DELETED is supposed to be at the start
16422 and end of the "toplevel" insn chain. */
2e040219 16423 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16424 rs6000_emit_epilogue (FALSE);
2e040219 16425 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16426
a3c9585f 16427 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16428 {
16429 rtx insn;
16430 unsigned addr = 0;
16431 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16432 {
16433 INSN_ADDRESSES_NEW (insn, addr);
16434 addr += 4;
16435 }
16436 }
16437
9ebbca7d 16438 if (TARGET_DEBUG_STACK)
a4f6c312 16439 debug_rtx_list (get_insns (), 100);
c9d691e9 16440 final (get_insns (), file, FALSE);
9ebbca7d 16441 end_sequence ();
4697a36c 16442 }
9878760c 16443 }
b4ac57ab 16444
efdba735
SH
16445#if TARGET_MACHO
16446 macho_branch_islands ();
0e5da0be
GK
16447 /* Mach-O doesn't support labels at the end of objects, so if
16448 it looks like we might want one, insert a NOP. */
16449 {
16450 rtx insn = get_last_insn ();
16451 while (insn
16452 && NOTE_P (insn)
a38e7aa5 16453 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16454 insn = PREV_INSN (insn);
f676971a
EC
16455 if (insn
16456 && (LABEL_P (insn)
0e5da0be 16457 || (NOTE_P (insn)
a38e7aa5 16458 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16459 fputs ("\tnop\n", file);
16460 }
16461#endif
16462
9b30bae2 16463 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16464 on its format.
16465
16466 We don't output a traceback table if -finhibit-size-directive was
16467 used. The documentation for -finhibit-size-directive reads
16468 ``don't output a @code{.size} assembler directive, or anything
16469 else that would cause trouble if the function is split in the
16470 middle, and the two halves are placed at locations far apart in
16471 memory.'' The traceback table has this property, since it
16472 includes the offset from the start of the function to the
4d30c363
MM
16473 traceback table itself.
16474
16475 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16476 different traceback table. */
57ac7be9 16477 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16478 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16479 {
69c75916 16480 const char *fname = NULL;
3ac88239 16481 const char *language_string = lang_hooks.name;
6041bf2f 16482 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16483 int i;
57ac7be9 16484 int optional_tbtab;
8097c268 16485 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16486
16487 if (rs6000_traceback == traceback_full)
16488 optional_tbtab = 1;
16489 else if (rs6000_traceback == traceback_part)
16490 optional_tbtab = 0;
16491 else
16492 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16493
69c75916
AM
16494 if (optional_tbtab)
16495 {
16496 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16497 while (*fname == '.') /* V.4 encodes . in the name */
16498 fname++;
16499
16500 /* Need label immediately before tbtab, so we can compute
16501 its offset from the function start. */
16502 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16503 ASM_OUTPUT_LABEL (file, fname);
16504 }
314fc5a9
ILT
16505
16506 /* The .tbtab pseudo-op can only be used for the first eight
16507 expressions, since it can't handle the possibly variable
16508 length fields that follow. However, if you omit the optional
16509 fields, the assembler outputs zeros for all optional fields
16510 anyways, giving each variable length field is minimum length
16511 (as defined in sys/debug.h). Thus we can not use the .tbtab
16512 pseudo-op at all. */
16513
16514 /* An all-zero word flags the start of the tbtab, for debuggers
16515 that have to find it by searching forward from the entry
16516 point or from the current pc. */
19d2d16f 16517 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16518
16519 /* Tbtab format type. Use format type 0. */
19d2d16f 16520 fputs ("\t.byte 0,", file);
314fc5a9 16521
5fc921c1
DE
16522 /* Language type. Unfortunately, there does not seem to be any
16523 official way to discover the language being compiled, so we
16524 use language_string.
16525 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16526 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16527 a number, so for now use 9. */
5fc921c1 16528 if (! strcmp (language_string, "GNU C"))
314fc5a9 16529 i = 0;
6de9cd9a
DN
16530 else if (! strcmp (language_string, "GNU F77")
16531 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16532 i = 1;
8b83775b 16533 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16534 i = 2;
5fc921c1
DE
16535 else if (! strcmp (language_string, "GNU Ada"))
16536 i = 3;
56438901
AM
16537 else if (! strcmp (language_string, "GNU C++")
16538 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16539 i = 9;
9517ead8
AG
16540 else if (! strcmp (language_string, "GNU Java"))
16541 i = 13;
5fc921c1
DE
16542 else if (! strcmp (language_string, "GNU Objective-C"))
16543 i = 14;
314fc5a9 16544 else
37409796 16545 gcc_unreachable ();
314fc5a9
ILT
16546 fprintf (file, "%d,", i);
16547
16548 /* 8 single bit fields: global linkage (not set for C extern linkage,
16549 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16550 from start of procedure stored in tbtab, internal function, function
16551 has controlled storage, function has no toc, function uses fp,
16552 function logs/aborts fp operations. */
16553 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16554 fprintf (file, "%d,",
16555 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16556
16557 /* 6 bitfields: function is interrupt handler, name present in
16558 proc table, function calls alloca, on condition directives
16559 (controls stack walks, 3 bits), saves condition reg, saves
16560 link reg. */
16561 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16562 set up as a frame pointer, even when there is no alloca call. */
16563 fprintf (file, "%d,",
6041bf2f
DE
16564 ((optional_tbtab << 6)
16565 | ((optional_tbtab & frame_pointer_needed) << 5)
16566 | (info->cr_save_p << 1)
16567 | (info->lr_save_p)));
314fc5a9 16568
6041bf2f 16569 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16570 (6 bits). */
16571 fprintf (file, "%d,",
4697a36c 16572 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16573
16574 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16575 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16576
6041bf2f
DE
16577 if (optional_tbtab)
16578 {
16579 /* Compute the parameter info from the function decl argument
16580 list. */
16581 tree decl;
16582 int next_parm_info_bit = 31;
314fc5a9 16583
6041bf2f
DE
16584 for (decl = DECL_ARGUMENTS (current_function_decl);
16585 decl; decl = TREE_CHAIN (decl))
16586 {
16587 rtx parameter = DECL_INCOMING_RTL (decl);
16588 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16589
6041bf2f
DE
16590 if (GET_CODE (parameter) == REG)
16591 {
ebb109ad 16592 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16593 {
16594 int bits;
16595
16596 float_parms++;
16597
37409796
NS
16598 switch (mode)
16599 {
16600 case SFmode:
16601 bits = 0x2;
16602 break;
16603
16604 case DFmode:
7393f7f8 16605 case DDmode:
37409796 16606 case TFmode:
7393f7f8 16607 case TDmode:
37409796
NS
16608 bits = 0x3;
16609 break;
16610
16611 default:
16612 gcc_unreachable ();
16613 }
6041bf2f
DE
16614
16615 /* If only one bit will fit, don't or in this entry. */
16616 if (next_parm_info_bit > 0)
16617 parm_info |= (bits << (next_parm_info_bit - 1));
16618 next_parm_info_bit -= 2;
16619 }
16620 else
16621 {
16622 fixed_parms += ((GET_MODE_SIZE (mode)
16623 + (UNITS_PER_WORD - 1))
16624 / UNITS_PER_WORD);
16625 next_parm_info_bit -= 1;
16626 }
16627 }
16628 }
16629 }
314fc5a9
ILT
16630
16631 /* Number of fixed point parameters. */
16632 /* This is actually the number of words of fixed point parameters; thus
16633 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16634 fprintf (file, "%d,", fixed_parms);
16635
16636 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16637 all on stack. */
16638 /* This is actually the number of fp registers that hold parameters;
16639 and thus the maximum value is 13. */
16640 /* Set parameters on stack bit if parameters are not in their original
16641 registers, regardless of whether they are on the stack? Xlc
16642 seems to set the bit when not optimizing. */
16643 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16644
6041bf2f
DE
16645 if (! optional_tbtab)
16646 return;
16647
314fc5a9
ILT
16648 /* Optional fields follow. Some are variable length. */
16649
16650 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16651 11 double float. */
16652 /* There is an entry for each parameter in a register, in the order that
16653 they occur in the parameter list. Any intervening arguments on the
16654 stack are ignored. If the list overflows a long (max possible length
16655 34 bits) then completely leave off all elements that don't fit. */
16656 /* Only emit this long if there was at least one parameter. */
16657 if (fixed_parms || float_parms)
16658 fprintf (file, "\t.long %d\n", parm_info);
16659
16660 /* Offset from start of code to tb table. */
19d2d16f 16661 fputs ("\t.long ", file);
314fc5a9 16662 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16663 if (TARGET_AIX)
16664 RS6000_OUTPUT_BASENAME (file, fname);
16665 else
16666 assemble_name (file, fname);
16667 putc ('-', file);
16668 rs6000_output_function_entry (file, fname);
19d2d16f 16669 putc ('\n', file);
314fc5a9
ILT
16670
16671 /* Interrupt handler mask. */
16672 /* Omit this long, since we never set the interrupt handler bit
16673 above. */
16674
16675 /* Number of CTL (controlled storage) anchors. */
16676 /* Omit this long, since the has_ctl bit is never set above. */
16677
16678 /* Displacement into stack of each CTL anchor. */
16679 /* Omit this list of longs, because there are no CTL anchors. */
16680
16681 /* Length of function name. */
69c75916
AM
16682 if (*fname == '*')
16683 ++fname;
296b8152 16684 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16685
16686 /* Function name. */
16687 assemble_string (fname, strlen (fname));
16688
16689 /* Register for alloca automatic storage; this is always reg 31.
16690 Only emit this if the alloca bit was set above. */
16691 if (frame_pointer_needed)
19d2d16f 16692 fputs ("\t.byte 31\n", file);
b1765bde
DE
16693
16694 fputs ("\t.align 2\n", file);
9b30bae2 16695 }
9878760c 16696}
17167fd8 16697\f
a4f6c312
SS
16698/* A C compound statement that outputs the assembler code for a thunk
16699 function, used to implement C++ virtual function calls with
16700 multiple inheritance. The thunk acts as a wrapper around a virtual
16701 function, adjusting the implicit object parameter before handing
16702 control off to the real function.
16703
16704 First, emit code to add the integer DELTA to the location that
16705 contains the incoming first argument. Assume that this argument
16706 contains a pointer, and is the one used to pass the `this' pointer
16707 in C++. This is the incoming argument *before* the function
16708 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16709 values of all other incoming arguments.
17167fd8
MM
16710
16711 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16712 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16713 not touch the return address. Hence returning from FUNCTION will
16714 return to whoever called the current `thunk'.
17167fd8 16715
a4f6c312
SS
16716 The effect must be as if FUNCTION had been called directly with the
16717 adjusted first argument. This macro is responsible for emitting
16718 all of the code for a thunk function; output_function_prologue()
16719 and output_function_epilogue() are not invoked.
17167fd8 16720
a4f6c312
SS
16721 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16722 been extracted from it.) It might possibly be useful on some
16723 targets, but probably not.
17167fd8 16724
a4f6c312
SS
16725 If you do not define this macro, the target-independent code in the
16726 C++ frontend will generate a less efficient heavyweight thunk that
16727 calls FUNCTION instead of jumping to it. The generic approach does
16728 not support varargs. */
17167fd8 16729
3961e8fe 16730static void
f676971a
EC
16731rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16732 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16733 tree function)
17167fd8 16734{
5b71a4e7 16735 rtx this, insn, funexp;
17167fd8 16736
5b71a4e7 16737 reload_completed = 1;
fe3ad572 16738 epilogue_completed = 1;
56a7189a 16739
5b71a4e7 16740 /* Mark the end of the (empty) prologue. */
2e040219 16741 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16742
5b71a4e7
DE
16743 /* Find the "this" pointer. If the function returns a structure,
16744 the structure return pointer is in r3. */
61f71b34 16745 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16746 this = gen_rtx_REG (Pmode, 4);
56a7189a 16747 else
5b71a4e7 16748 this = gen_rtx_REG (Pmode, 3);
17167fd8 16749
5b71a4e7
DE
16750 /* Apply the constant offset, if required. */
16751 if (delta)
16752 {
16753 rtx delta_rtx = GEN_INT (delta);
16754 emit_insn (TARGET_32BIT
16755 ? gen_addsi3 (this, this, delta_rtx)
16756 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16757 }
16758
5b71a4e7
DE
16759 /* Apply the offset from the vtable, if required. */
16760 if (vcall_offset)
17167fd8 16761 {
5b71a4e7
DE
16762 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16763 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16764
5b71a4e7 16765 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16766 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16767 {
16768 emit_insn (TARGET_32BIT
16769 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16770 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16771 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16772 }
16773 else
16774 {
16775 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16776
16777 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16778 }
5b71a4e7
DE
16779 emit_insn (TARGET_32BIT
16780 ? gen_addsi3 (this, this, tmp)
16781 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16782 }
16783
5b71a4e7
DE
16784 /* Generate a tail call to the target function. */
16785 if (!TREE_USED (function))
16786 {
16787 assemble_external (function);
16788 TREE_USED (function) = 1;
16789 }
16790 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16791 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16792
16793#if TARGET_MACHO
ab82a49f 16794 if (MACHOPIC_INDIRECT)
5b71a4e7 16795 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16796#endif
5b71a4e7
DE
16797
16798 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16799 generate sibcall RTL explicitly. */
5b71a4e7
DE
16800 insn = emit_call_insn (
16801 gen_rtx_PARALLEL (VOIDmode,
16802 gen_rtvec (4,
16803 gen_rtx_CALL (VOIDmode,
16804 funexp, const0_rtx),
16805 gen_rtx_USE (VOIDmode, const0_rtx),
16806 gen_rtx_USE (VOIDmode,
16807 gen_rtx_REG (SImode,
1de43f85 16808 LR_REGNO)),
5b71a4e7
DE
16809 gen_rtx_RETURN (VOIDmode))));
16810 SIBLING_CALL_P (insn) = 1;
16811 emit_barrier ();
16812
16813 /* Run just enough of rest_of_compilation to get the insns emitted.
16814 There's not really enough bulk here to make other passes such as
16815 instruction scheduling worth while. Note that use_thunk calls
16816 assemble_start_function and assemble_end_function. */
16817 insn = get_insns ();
55e092c4 16818 insn_locators_alloc ();
5b71a4e7
DE
16819 shorten_branches (insn);
16820 final_start_function (insn, file, 1);
c9d691e9 16821 final (insn, file, 1);
5b71a4e7
DE
16822 final_end_function ();
16823
16824 reload_completed = 0;
fe3ad572 16825 epilogue_completed = 0;
9ebbca7d 16826}
9ebbca7d
GK
16827\f
16828/* A quick summary of the various types of 'constant-pool tables'
16829 under PowerPC:
16830
f676971a 16831 Target Flags Name One table per
9ebbca7d
GK
16832 AIX (none) AIX TOC object file
16833 AIX -mfull-toc AIX TOC object file
16834 AIX -mminimal-toc AIX minimal TOC translation unit
16835 SVR4/EABI (none) SVR4 SDATA object file
16836 SVR4/EABI -fpic SVR4 pic object file
16837 SVR4/EABI -fPIC SVR4 PIC translation unit
16838 SVR4/EABI -mrelocatable EABI TOC function
16839 SVR4/EABI -maix AIX TOC object file
f676971a 16840 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16841 AIX minimal TOC translation unit
16842
16843 Name Reg. Set by entries contains:
16844 made by addrs? fp? sum?
16845
16846 AIX TOC 2 crt0 as Y option option
16847 AIX minimal TOC 30 prolog gcc Y Y option
16848 SVR4 SDATA 13 crt0 gcc N Y N
16849 SVR4 pic 30 prolog ld Y not yet N
16850 SVR4 PIC 30 prolog gcc Y option option
16851 EABI TOC 30 prolog gcc Y option option
16852
16853*/
16854
9ebbca7d
GK
16855/* Hash functions for the hash table. */
16856
16857static unsigned
a2369ed3 16858rs6000_hash_constant (rtx k)
9ebbca7d 16859{
46b33600
RH
16860 enum rtx_code code = GET_CODE (k);
16861 enum machine_mode mode = GET_MODE (k);
16862 unsigned result = (code << 3) ^ mode;
16863 const char *format;
16864 int flen, fidx;
f676971a 16865
46b33600
RH
16866 format = GET_RTX_FORMAT (code);
16867 flen = strlen (format);
16868 fidx = 0;
9ebbca7d 16869
46b33600
RH
16870 switch (code)
16871 {
16872 case LABEL_REF:
16873 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
16874
16875 case CONST_DOUBLE:
16876 if (mode != VOIDmode)
16877 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
16878 flen = 2;
16879 break;
16880
16881 case CODE_LABEL:
16882 fidx = 3;
16883 break;
16884
16885 default:
16886 break;
16887 }
9ebbca7d
GK
16888
16889 for (; fidx < flen; fidx++)
16890 switch (format[fidx])
16891 {
16892 case 's':
16893 {
16894 unsigned i, len;
16895 const char *str = XSTR (k, fidx);
16896 len = strlen (str);
16897 result = result * 613 + len;
16898 for (i = 0; i < len; i++)
16899 result = result * 613 + (unsigned) str[i];
17167fd8
MM
16900 break;
16901 }
9ebbca7d
GK
16902 case 'u':
16903 case 'e':
16904 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
16905 break;
16906 case 'i':
16907 case 'n':
16908 result = result * 613 + (unsigned) XINT (k, fidx);
16909 break;
16910 case 'w':
16911 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
16912 result = result * 613 + (unsigned) XWINT (k, fidx);
16913 else
16914 {
16915 size_t i;
9390387d 16916 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
16917 result = result * 613 + (unsigned) (XWINT (k, fidx)
16918 >> CHAR_BIT * i);
16919 }
16920 break;
09501938
DE
16921 case '0':
16922 break;
9ebbca7d 16923 default:
37409796 16924 gcc_unreachable ();
9ebbca7d 16925 }
46b33600 16926
9ebbca7d
GK
16927 return result;
16928}
16929
16930static unsigned
a2369ed3 16931toc_hash_function (const void *hash_entry)
9ebbca7d 16932{
f676971a 16933 const struct toc_hash_struct *thc =
a9098fd0
GK
16934 (const struct toc_hash_struct *) hash_entry;
16935 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
16936}
16937
16938/* Compare H1 and H2 for equivalence. */
16939
16940static int
a2369ed3 16941toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
16942{
16943 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
16944 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
16945
a9098fd0
GK
16946 if (((const struct toc_hash_struct *) h1)->key_mode
16947 != ((const struct toc_hash_struct *) h2)->key_mode)
16948 return 0;
16949
5692c7bc 16950 return rtx_equal_p (r1, r2);
9ebbca7d
GK
16951}
16952
28e510bd
MM
16953/* These are the names given by the C++ front-end to vtables, and
16954 vtable-like objects. Ideally, this logic should not be here;
16955 instead, there should be some programmatic way of inquiring as
16956 to whether or not an object is a vtable. */
16957
16958#define VTABLE_NAME_P(NAME) \
9390387d 16959 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
16960 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
16961 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 16962 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 16963 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
16964
16965void
a2369ed3 16966rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
16967{
16968 /* Currently C++ toc references to vtables can be emitted before it
16969 is decided whether the vtable is public or private. If this is
16970 the case, then the linker will eventually complain that there is
f676971a 16971 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
16972 we emit the TOC reference to reference the symbol and not the
16973 section. */
16974 const char *name = XSTR (x, 0);
54ee9799 16975
f676971a 16976 if (VTABLE_NAME_P (name))
54ee9799
DE
16977 {
16978 RS6000_OUTPUT_BASENAME (file, name);
16979 }
16980 else
16981 assemble_name (file, name);
28e510bd
MM
16982}
16983
a4f6c312
SS
16984/* Output a TOC entry. We derive the entry name from what is being
16985 written. */
9878760c
RK
16986
16987void
a2369ed3 16988output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
16989{
16990 char buf[256];
3cce094d 16991 const char *name = buf;
ec940faa 16992 const char *real_name;
9878760c 16993 rtx base = x;
16fdeb48 16994 HOST_WIDE_INT offset = 0;
9878760c 16995
37409796 16996 gcc_assert (!TARGET_NO_TOC);
4697a36c 16997
9ebbca7d
GK
16998 /* When the linker won't eliminate them, don't output duplicate
16999 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17000 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17001 CODE_LABELs. */
17002 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17003 {
17004 struct toc_hash_struct *h;
17005 void * * found;
f676971a 17006
17211ab5 17007 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17008 time because GGC is not initialized at that point. */
17211ab5 17009 if (toc_hash_table == NULL)
f676971a 17010 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17011 toc_hash_eq, NULL);
17012
9ebbca7d
GK
17013 h = ggc_alloc (sizeof (*h));
17014 h->key = x;
a9098fd0 17015 h->key_mode = mode;
9ebbca7d 17016 h->labelno = labelno;
f676971a 17017
9ebbca7d
GK
17018 found = htab_find_slot (toc_hash_table, h, 1);
17019 if (*found == NULL)
17020 *found = h;
f676971a 17021 else /* This is indeed a duplicate.
9ebbca7d
GK
17022 Set this label equal to that label. */
17023 {
17024 fputs ("\t.set ", file);
17025 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17026 fprintf (file, "%d,", labelno);
17027 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17028 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17029 found)->labelno));
17030 return;
17031 }
17032 }
17033
17034 /* If we're going to put a double constant in the TOC, make sure it's
17035 aligned properly when strict alignment is on. */
ff1720ed
RK
17036 if (GET_CODE (x) == CONST_DOUBLE
17037 && STRICT_ALIGNMENT
a9098fd0 17038 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17039 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17040 ASM_OUTPUT_ALIGN (file, 3);
17041 }
17042
4977bab6 17043 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17044
37c37a57
RK
17045 /* Handle FP constants specially. Note that if we have a minimal
17046 TOC, things we put here aren't actually in the TOC, so we can allow
17047 FP constants. */
00b79d54
BE
17048 if (GET_CODE (x) == CONST_DOUBLE &&
17049 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17050 {
17051 REAL_VALUE_TYPE rv;
17052 long k[4];
17053
17054 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17055 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17056 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17057 else
17058 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17059
17060 if (TARGET_64BIT)
17061 {
17062 if (TARGET_MINIMAL_TOC)
17063 fputs (DOUBLE_INT_ASM_OP, file);
17064 else
17065 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17066 k[0] & 0xffffffff, k[1] & 0xffffffff,
17067 k[2] & 0xffffffff, k[3] & 0xffffffff);
17068 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17069 k[0] & 0xffffffff, k[1] & 0xffffffff,
17070 k[2] & 0xffffffff, k[3] & 0xffffffff);
17071 return;
17072 }
17073 else
17074 {
17075 if (TARGET_MINIMAL_TOC)
17076 fputs ("\t.long ", file);
17077 else
17078 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17079 k[0] & 0xffffffff, k[1] & 0xffffffff,
17080 k[2] & 0xffffffff, k[3] & 0xffffffff);
17081 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17082 k[0] & 0xffffffff, k[1] & 0xffffffff,
17083 k[2] & 0xffffffff, k[3] & 0xffffffff);
17084 return;
17085 }
17086 }
00b79d54
BE
17087 else if (GET_CODE (x) == CONST_DOUBLE &&
17088 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17089 {
042259f2
DE
17090 REAL_VALUE_TYPE rv;
17091 long k[2];
0adc764e 17092
042259f2 17093 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17094
17095 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17096 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17097 else
17098 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17099
13ded975
DE
17100 if (TARGET_64BIT)
17101 {
17102 if (TARGET_MINIMAL_TOC)
2bfcf297 17103 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17104 else
2f0552b6
AM
17105 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17106 k[0] & 0xffffffff, k[1] & 0xffffffff);
17107 fprintf (file, "0x%lx%08lx\n",
17108 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17109 return;
17110 }
1875cc88 17111 else
13ded975
DE
17112 {
17113 if (TARGET_MINIMAL_TOC)
2bfcf297 17114 fputs ("\t.long ", file);
13ded975 17115 else
2f0552b6
AM
17116 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17117 k[0] & 0xffffffff, k[1] & 0xffffffff);
17118 fprintf (file, "0x%lx,0x%lx\n",
17119 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17120 return;
17121 }
9878760c 17122 }
00b79d54
BE
17123 else if (GET_CODE (x) == CONST_DOUBLE &&
17124 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17125 {
042259f2
DE
17126 REAL_VALUE_TYPE rv;
17127 long l;
9878760c 17128
042259f2 17129 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17130 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17131 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17132 else
17133 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17134
31bfaa0b
DE
17135 if (TARGET_64BIT)
17136 {
17137 if (TARGET_MINIMAL_TOC)
2bfcf297 17138 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17139 else
2f0552b6
AM
17140 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17141 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17142 return;
17143 }
042259f2 17144 else
31bfaa0b
DE
17145 {
17146 if (TARGET_MINIMAL_TOC)
2bfcf297 17147 fputs ("\t.long ", file);
31bfaa0b 17148 else
2f0552b6
AM
17149 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17150 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17151 return;
17152 }
042259f2 17153 }
f176e826 17154 else if (GET_MODE (x) == VOIDmode
a9098fd0 17155 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17156 {
e2c953b6 17157 unsigned HOST_WIDE_INT low;
042259f2
DE
17158 HOST_WIDE_INT high;
17159
17160 if (GET_CODE (x) == CONST_DOUBLE)
17161 {
17162 low = CONST_DOUBLE_LOW (x);
17163 high = CONST_DOUBLE_HIGH (x);
17164 }
17165 else
17166#if HOST_BITS_PER_WIDE_INT == 32
17167 {
17168 low = INTVAL (x);
0858c623 17169 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
17170 }
17171#else
17172 {
c4ad648e
AM
17173 low = INTVAL (x) & 0xffffffff;
17174 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
17175 }
17176#endif
9878760c 17177
a9098fd0
GK
17178 /* TOC entries are always Pmode-sized, but since this
17179 is a bigendian machine then if we're putting smaller
17180 integer constants in the TOC we have to pad them.
17181 (This is still a win over putting the constants in
17182 a separate constant pool, because then we'd have
02a4ec28
FS
17183 to have both a TOC entry _and_ the actual constant.)
17184
17185 For a 32-bit target, CONST_INT values are loaded and shifted
17186 entirely within `low' and can be stored in one TOC entry. */
17187
37409796
NS
17188 /* It would be easy to make this work, but it doesn't now. */
17189 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
17190
17191 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
17192 {
17193#if HOST_BITS_PER_WIDE_INT == 32
17194 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
17195 POINTER_SIZE, &low, &high, 0);
17196#else
17197 low |= high << 32;
17198 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
17199 high = (HOST_WIDE_INT) low >> 32;
17200 low &= 0xffffffff;
17201#endif
17202 }
a9098fd0 17203
13ded975
DE
17204 if (TARGET_64BIT)
17205 {
17206 if (TARGET_MINIMAL_TOC)
2bfcf297 17207 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17208 else
2f0552b6
AM
17209 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
17210 (long) high & 0xffffffff, (long) low & 0xffffffff);
17211 fprintf (file, "0x%lx%08lx\n",
17212 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
17213 return;
17214 }
1875cc88 17215 else
13ded975 17216 {
02a4ec28
FS
17217 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
17218 {
17219 if (TARGET_MINIMAL_TOC)
2bfcf297 17220 fputs ("\t.long ", file);
02a4ec28 17221 else
2bfcf297 17222 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
17223 (long) high & 0xffffffff, (long) low & 0xffffffff);
17224 fprintf (file, "0x%lx,0x%lx\n",
17225 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 17226 }
13ded975 17227 else
02a4ec28
FS
17228 {
17229 if (TARGET_MINIMAL_TOC)
2bfcf297 17230 fputs ("\t.long ", file);
02a4ec28 17231 else
2f0552b6
AM
17232 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
17233 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 17234 }
13ded975
DE
17235 return;
17236 }
9878760c
RK
17237 }
17238
17239 if (GET_CODE (x) == CONST)
17240 {
37409796 17241 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 17242
9878760c
RK
17243 base = XEXP (XEXP (x, 0), 0);
17244 offset = INTVAL (XEXP (XEXP (x, 0), 1));
17245 }
f676971a 17246
37409796
NS
17247 switch (GET_CODE (base))
17248 {
17249 case SYMBOL_REF:
17250 name = XSTR (base, 0);
17251 break;
17252
17253 case LABEL_REF:
17254 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
17255 CODE_LABEL_NUMBER (XEXP (base, 0)));
17256 break;
17257
17258 case CODE_LABEL:
17259 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
17260 break;
17261
17262 default:
17263 gcc_unreachable ();
17264 }
9878760c 17265
772c5265 17266 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 17267 if (TARGET_MINIMAL_TOC)
2bfcf297 17268 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
17269 else
17270 {
b6c9286a 17271 fprintf (file, "\t.tc %s", real_name);
9878760c 17272
1875cc88 17273 if (offset < 0)
16fdeb48 17274 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 17275 else if (offset)
16fdeb48 17276 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 17277
19d2d16f 17278 fputs ("[TC],", file);
1875cc88 17279 }
581bc4de
MM
17280
17281 /* Currently C++ toc references to vtables can be emitted before it
17282 is decided whether the vtable is public or private. If this is
17283 the case, then the linker will eventually complain that there is
17284 a TOC reference to an unknown section. Thus, for vtables only,
17285 we emit the TOC reference to reference the symbol and not the
17286 section. */
28e510bd 17287 if (VTABLE_NAME_P (name))
581bc4de 17288 {
54ee9799 17289 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 17290 if (offset < 0)
16fdeb48 17291 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 17292 else if (offset > 0)
16fdeb48 17293 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
17294 }
17295 else
17296 output_addr_const (file, x);
19d2d16f 17297 putc ('\n', file);
9878760c
RK
17298}
17299\f
17300/* Output an assembler pseudo-op to write an ASCII string of N characters
17301 starting at P to FILE.
17302
17303 On the RS/6000, we have to do this using the .byte operation and
17304 write out special characters outside the quoted string.
17305 Also, the assembler is broken; very long strings are truncated,
a4f6c312 17306 so we must artificially break them up early. */
9878760c
RK
17307
17308void
a2369ed3 17309output_ascii (FILE *file, const char *p, int n)
9878760c
RK
17310{
17311 char c;
17312 int i, count_string;
d330fd93
KG
17313 const char *for_string = "\t.byte \"";
17314 const char *for_decimal = "\t.byte ";
17315 const char *to_close = NULL;
9878760c
RK
17316
17317 count_string = 0;
17318 for (i = 0; i < n; i++)
17319 {
17320 c = *p++;
17321 if (c >= ' ' && c < 0177)
17322 {
17323 if (for_string)
17324 fputs (for_string, file);
17325 putc (c, file);
17326
17327 /* Write two quotes to get one. */
17328 if (c == '"')
17329 {
17330 putc (c, file);
17331 ++count_string;
17332 }
17333
17334 for_string = NULL;
17335 for_decimal = "\"\n\t.byte ";
17336 to_close = "\"\n";
17337 ++count_string;
17338
17339 if (count_string >= 512)
17340 {
17341 fputs (to_close, file);
17342
17343 for_string = "\t.byte \"";
17344 for_decimal = "\t.byte ";
17345 to_close = NULL;
17346 count_string = 0;
17347 }
17348 }
17349 else
17350 {
17351 if (for_decimal)
17352 fputs (for_decimal, file);
17353 fprintf (file, "%d", c);
17354
17355 for_string = "\n\t.byte \"";
17356 for_decimal = ", ";
17357 to_close = "\n";
17358 count_string = 0;
17359 }
17360 }
17361
17362 /* Now close the string if we have written one. Then end the line. */
17363 if (to_close)
9ebbca7d 17364 fputs (to_close, file);
9878760c
RK
17365}
17366\f
17367/* Generate a unique section name for FILENAME for a section type
17368 represented by SECTION_DESC. Output goes into BUF.
17369
17370 SECTION_DESC can be any string, as long as it is different for each
17371 possible section type.
17372
17373 We name the section in the same manner as xlc. The name begins with an
17374 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
17375 names) with the last period replaced by the string SECTION_DESC. If
17376 FILENAME does not contain a period, SECTION_DESC is appended to the end of
17377 the name. */
9878760c
RK
17378
17379void
f676971a 17380rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 17381 const char *section_desc)
9878760c 17382{
9ebbca7d 17383 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
17384 char *p;
17385 int len;
9878760c
RK
17386
17387 after_last_slash = filename;
17388 for (q = filename; *q; q++)
11e5fe42
RK
17389 {
17390 if (*q == '/')
17391 after_last_slash = q + 1;
17392 else if (*q == '.')
17393 last_period = q;
17394 }
9878760c 17395
11e5fe42 17396 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 17397 *buf = (char *) xmalloc (len);
9878760c
RK
17398
17399 p = *buf;
17400 *p++ = '_';
17401
17402 for (q = after_last_slash; *q; q++)
17403 {
11e5fe42 17404 if (q == last_period)
c4ad648e 17405 {
9878760c
RK
17406 strcpy (p, section_desc);
17407 p += strlen (section_desc);
e3981aab 17408 break;
c4ad648e 17409 }
9878760c 17410
e9a780ec 17411 else if (ISALNUM (*q))
c4ad648e 17412 *p++ = *q;
9878760c
RK
17413 }
17414
11e5fe42 17415 if (last_period == 0)
9878760c
RK
17416 strcpy (p, section_desc);
17417 else
17418 *p = '\0';
17419}
e165f3f0 17420\f
a4f6c312 17421/* Emit profile function. */
411707f4 17422
411707f4 17423void
a2369ed3 17424output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 17425{
858081ad
AH
17426 /* Non-standard profiling for kernels, which just saves LR then calls
17427 _mcount without worrying about arg saves. The idea is to change
17428 the function prologue as little as possible as it isn't easy to
17429 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
17430 if (TARGET_PROFILE_KERNEL)
17431 return;
17432
8480e480
CC
17433 if (DEFAULT_ABI == ABI_AIX)
17434 {
9739c90c
JJ
17435#ifndef NO_PROFILE_COUNTERS
17436# define NO_PROFILE_COUNTERS 0
17437#endif
f676971a 17438 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17439 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17440 else
17441 {
17442 char buf[30];
17443 const char *label_name;
17444 rtx fun;
411707f4 17445
9739c90c
JJ
17446 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17447 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17448 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17449
9739c90c
JJ
17450 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17451 fun, Pmode);
17452 }
8480e480 17453 }
ee890fe2
SS
17454 else if (DEFAULT_ABI == ABI_DARWIN)
17455 {
d5fa86ba 17456 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17457 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17458
17459 /* Be conservative and always set this, at least for now. */
17460 current_function_uses_pic_offset_table = 1;
17461
17462#if TARGET_MACHO
17463 /* For PIC code, set up a stub and collect the caller's address
17464 from r0, which is where the prologue puts it. */
11abc112
MM
17465 if (MACHOPIC_INDIRECT
17466 && current_function_uses_pic_offset_table)
17467 caller_addr_regno = 0;
ee890fe2
SS
17468#endif
17469 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17470 0, VOIDmode, 1,
17471 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17472 }
411707f4
CC
17473}
17474
a4f6c312 17475/* Write function profiler code. */
e165f3f0
RK
17476
17477void
a2369ed3 17478output_function_profiler (FILE *file, int labelno)
e165f3f0 17479{
3daf36a4 17480 char buf[100];
e165f3f0 17481
38c1f2d7 17482 switch (DEFAULT_ABI)
3daf36a4 17483 {
38c1f2d7 17484 default:
37409796 17485 gcc_unreachable ();
38c1f2d7
MM
17486
17487 case ABI_V4:
09eeeacb
AM
17488 if (!TARGET_32BIT)
17489 {
d4ee4d25 17490 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17491 return;
17492 }
ffcfcb5f 17493 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17494 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17495 if (NO_PROFILE_COUNTERS)
17496 {
17497 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17498 reg_names[0], reg_names[1]);
17499 }
17500 else if (TARGET_SECURE_PLT && flag_pic)
17501 {
17502 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17503 reg_names[0], reg_names[1]);
17504 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17505 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17506 reg_names[12], reg_names[12]);
17507 assemble_name (file, buf);
17508 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17509 assemble_name (file, buf);
17510 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17511 }
17512 else if (flag_pic == 1)
38c1f2d7 17513 {
dfdfa60f 17514 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17515 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17516 reg_names[0], reg_names[1]);
17167fd8 17517 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17518 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17519 assemble_name (file, buf);
17167fd8 17520 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17521 }
9ebbca7d 17522 else if (flag_pic > 1)
38c1f2d7 17523 {
71625f3d
AM
17524 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17525 reg_names[0], reg_names[1]);
9ebbca7d 17526 /* Now, we need to get the address of the label. */
71625f3d 17527 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17528 assemble_name (file, buf);
9ebbca7d
GK
17529 fputs ("-.\n1:", file);
17530 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17531 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17532 reg_names[0], reg_names[11]);
17533 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17534 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17535 }
38c1f2d7
MM
17536 else
17537 {
17167fd8 17538 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17539 assemble_name (file, buf);
dfdfa60f 17540 fputs ("@ha\n", file);
71625f3d
AM
17541 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17542 reg_names[0], reg_names[1]);
a260abc9 17543 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17544 assemble_name (file, buf);
17167fd8 17545 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17546 }
17547
50d440bc 17548 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17549 fprintf (file, "\tbl %s%s\n",
17550 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17551 break;
17552
17553 case ABI_AIX:
ee890fe2 17554 case ABI_DARWIN:
ffcfcb5f
AM
17555 if (!TARGET_PROFILE_KERNEL)
17556 {
a3c9585f 17557 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17558 }
17559 else
17560 {
37409796 17561 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17562
17563 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17564 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17565
6de9cd9a 17566 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17567 {
17568 asm_fprintf (file, "\tstd %s,24(%s)\n",
17569 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17570 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17571 asm_fprintf (file, "\tld %s,24(%s)\n",
17572 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17573 }
17574 else
17575 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17576 }
38c1f2d7
MM
17577 break;
17578 }
e165f3f0 17579}
a251ffd0 17580
b54cf83a 17581\f
44cd321e
PS
17582
17583/* The following variable value is the last issued insn. */
17584
17585static rtx last_scheduled_insn;
17586
17587/* The following variable helps to balance issuing of load and
17588 store instructions */
17589
17590static int load_store_pendulum;
17591
b54cf83a
DE
17592/* Power4 load update and store update instructions are cracked into a
17593 load or store and an integer insn which are executed in the same cycle.
17594 Branches have their own dispatch slot which does not count against the
17595 GCC issue rate, but it changes the program flow so there are no other
17596 instructions to issue in this cycle. */
17597
17598static int
f676971a
EC
17599rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17600 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17601 rtx insn, int more)
b54cf83a 17602{
44cd321e 17603 last_scheduled_insn = insn;
b54cf83a
DE
17604 if (GET_CODE (PATTERN (insn)) == USE
17605 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17606 {
17607 cached_can_issue_more = more;
17608 return cached_can_issue_more;
17609 }
17610
17611 if (insn_terminates_group_p (insn, current_group))
17612 {
17613 cached_can_issue_more = 0;
17614 return cached_can_issue_more;
17615 }
b54cf83a 17616
d296e02e
AP
17617 /* If no reservation, but reach here */
17618 if (recog_memoized (insn) < 0)
17619 return more;
17620
ec507f2d 17621 if (rs6000_sched_groups)
b54cf83a 17622 {
cbe26ab8 17623 if (is_microcoded_insn (insn))
44cd321e 17624 cached_can_issue_more = 0;
cbe26ab8 17625 else if (is_cracked_insn (insn))
44cd321e
PS
17626 cached_can_issue_more = more > 2 ? more - 2 : 0;
17627 else
17628 cached_can_issue_more = more - 1;
17629
17630 return cached_can_issue_more;
b54cf83a 17631 }
165b263e 17632
d296e02e
AP
17633 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17634 return 0;
17635
44cd321e
PS
17636 cached_can_issue_more = more - 1;
17637 return cached_can_issue_more;
b54cf83a
DE
17638}
17639
a251ffd0
TG
17640/* Adjust the cost of a scheduling dependency. Return the new cost of
17641 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17642
c237e94a 17643static int
0a4f0294 17644rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17645{
44cd321e 17646 enum attr_type attr_type;
a251ffd0 17647
44cd321e 17648 if (! recog_memoized (insn))
a251ffd0
TG
17649 return 0;
17650
44cd321e 17651 switch (REG_NOTE_KIND (link))
a251ffd0 17652 {
44cd321e
PS
17653 case REG_DEP_TRUE:
17654 {
17655 /* Data dependency; DEP_INSN writes a register that INSN reads
17656 some cycles later. */
17657
17658 /* Separate a load from a narrower, dependent store. */
17659 if (rs6000_sched_groups
17660 && GET_CODE (PATTERN (insn)) == SET
17661 && GET_CODE (PATTERN (dep_insn)) == SET
17662 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17663 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17664 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17665 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17666 return cost + 14;
17667
17668 attr_type = get_attr_type (insn);
17669
17670 switch (attr_type)
17671 {
17672 case TYPE_JMPREG:
17673 /* Tell the first scheduling pass about the latency between
17674 a mtctr and bctr (and mtlr and br/blr). The first
17675 scheduling pass will not know about this latency since
17676 the mtctr instruction, which has the latency associated
17677 to it, will be generated by reload. */
17678 return TARGET_POWER ? 5 : 4;
17679 case TYPE_BRANCH:
17680 /* Leave some extra cycles between a compare and its
17681 dependent branch, to inhibit expensive mispredicts. */
17682 if ((rs6000_cpu_attr == CPU_PPC603
17683 || rs6000_cpu_attr == CPU_PPC604
17684 || rs6000_cpu_attr == CPU_PPC604E
17685 || rs6000_cpu_attr == CPU_PPC620
17686 || rs6000_cpu_attr == CPU_PPC630
17687 || rs6000_cpu_attr == CPU_PPC750
17688 || rs6000_cpu_attr == CPU_PPC7400
17689 || rs6000_cpu_attr == CPU_PPC7450
17690 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17691 || rs6000_cpu_attr == CPU_POWER5
17692 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17693 && recog_memoized (dep_insn)
17694 && (INSN_CODE (dep_insn) >= 0))
982afe02 17695
44cd321e
PS
17696 switch (get_attr_type (dep_insn))
17697 {
17698 case TYPE_CMP:
17699 case TYPE_COMPARE:
17700 case TYPE_DELAYED_COMPARE:
17701 case TYPE_IMUL_COMPARE:
17702 case TYPE_LMUL_COMPARE:
17703 case TYPE_FPCOMPARE:
17704 case TYPE_CR_LOGICAL:
17705 case TYPE_DELAYED_CR:
17706 return cost + 2;
17707 default:
17708 break;
17709 }
17710 break;
17711
17712 case TYPE_STORE:
17713 case TYPE_STORE_U:
17714 case TYPE_STORE_UX:
17715 case TYPE_FPSTORE:
17716 case TYPE_FPSTORE_U:
17717 case TYPE_FPSTORE_UX:
17718 if ((rs6000_cpu == PROCESSOR_POWER6)
17719 && recog_memoized (dep_insn)
17720 && (INSN_CODE (dep_insn) >= 0))
17721 {
17722
17723 if (GET_CODE (PATTERN (insn)) != SET)
17724 /* If this happens, we have to extend this to schedule
17725 optimally. Return default for now. */
17726 return cost;
17727
17728 /* Adjust the cost for the case where the value written
17729 by a fixed point operation is used as the address
17730 gen value on a store. */
17731 switch (get_attr_type (dep_insn))
17732 {
17733 case TYPE_LOAD:
17734 case TYPE_LOAD_U:
17735 case TYPE_LOAD_UX:
17736 case TYPE_CNTLZ:
17737 {
17738 if (! store_data_bypass_p (dep_insn, insn))
17739 return 4;
17740 break;
17741 }
17742 case TYPE_LOAD_EXT:
17743 case TYPE_LOAD_EXT_U:
17744 case TYPE_LOAD_EXT_UX:
17745 case TYPE_VAR_SHIFT_ROTATE:
17746 case TYPE_VAR_DELAYED_COMPARE:
17747 {
17748 if (! store_data_bypass_p (dep_insn, insn))
17749 return 6;
17750 break;
17751 }
17752 case TYPE_INTEGER:
17753 case TYPE_COMPARE:
17754 case TYPE_FAST_COMPARE:
17755 case TYPE_EXTS:
17756 case TYPE_SHIFT:
17757 case TYPE_INSERT_WORD:
17758 case TYPE_INSERT_DWORD:
17759 case TYPE_FPLOAD_U:
17760 case TYPE_FPLOAD_UX:
17761 case TYPE_STORE_U:
17762 case TYPE_STORE_UX:
17763 case TYPE_FPSTORE_U:
17764 case TYPE_FPSTORE_UX:
17765 {
17766 if (! store_data_bypass_p (dep_insn, insn))
17767 return 3;
17768 break;
17769 }
17770 case TYPE_IMUL:
17771 case TYPE_IMUL2:
17772 case TYPE_IMUL3:
17773 case TYPE_LMUL:
17774 case TYPE_IMUL_COMPARE:
17775 case TYPE_LMUL_COMPARE:
17776 {
17777 if (! store_data_bypass_p (dep_insn, insn))
17778 return 17;
17779 break;
17780 }
17781 case TYPE_IDIV:
17782 {
17783 if (! store_data_bypass_p (dep_insn, insn))
17784 return 45;
17785 break;
17786 }
17787 case TYPE_LDIV:
17788 {
17789 if (! store_data_bypass_p (dep_insn, insn))
17790 return 57;
17791 break;
17792 }
17793 default:
17794 break;
17795 }
17796 }
17797 break;
17798
17799 case TYPE_LOAD:
17800 case TYPE_LOAD_U:
17801 case TYPE_LOAD_UX:
17802 case TYPE_LOAD_EXT:
17803 case TYPE_LOAD_EXT_U:
17804 case TYPE_LOAD_EXT_UX:
17805 if ((rs6000_cpu == PROCESSOR_POWER6)
17806 && recog_memoized (dep_insn)
17807 && (INSN_CODE (dep_insn) >= 0))
17808 {
17809
17810 /* Adjust the cost for the case where the value written
17811 by a fixed point instruction is used within the address
17812 gen portion of a subsequent load(u)(x) */
17813 switch (get_attr_type (dep_insn))
17814 {
17815 case TYPE_LOAD:
17816 case TYPE_LOAD_U:
17817 case TYPE_LOAD_UX:
17818 case TYPE_CNTLZ:
17819 {
17820 if (set_to_load_agen (dep_insn, insn))
17821 return 4;
17822 break;
17823 }
17824 case TYPE_LOAD_EXT:
17825 case TYPE_LOAD_EXT_U:
17826 case TYPE_LOAD_EXT_UX:
17827 case TYPE_VAR_SHIFT_ROTATE:
17828 case TYPE_VAR_DELAYED_COMPARE:
17829 {
17830 if (set_to_load_agen (dep_insn, insn))
17831 return 6;
17832 break;
17833 }
17834 case TYPE_INTEGER:
17835 case TYPE_COMPARE:
17836 case TYPE_FAST_COMPARE:
17837 case TYPE_EXTS:
17838 case TYPE_SHIFT:
17839 case TYPE_INSERT_WORD:
17840 case TYPE_INSERT_DWORD:
17841 case TYPE_FPLOAD_U:
17842 case TYPE_FPLOAD_UX:
17843 case TYPE_STORE_U:
17844 case TYPE_STORE_UX:
17845 case TYPE_FPSTORE_U:
17846 case TYPE_FPSTORE_UX:
17847 {
17848 if (set_to_load_agen (dep_insn, insn))
17849 return 3;
17850 break;
17851 }
17852 case TYPE_IMUL:
17853 case TYPE_IMUL2:
17854 case TYPE_IMUL3:
17855 case TYPE_LMUL:
17856 case TYPE_IMUL_COMPARE:
17857 case TYPE_LMUL_COMPARE:
17858 {
17859 if (set_to_load_agen (dep_insn, insn))
17860 return 17;
17861 break;
17862 }
17863 case TYPE_IDIV:
17864 {
17865 if (set_to_load_agen (dep_insn, insn))
17866 return 45;
17867 break;
17868 }
17869 case TYPE_LDIV:
17870 {
17871 if (set_to_load_agen (dep_insn, insn))
17872 return 57;
17873 break;
17874 }
17875 default:
17876 break;
17877 }
17878 }
17879 break;
17880
17881 case TYPE_FPLOAD:
17882 if ((rs6000_cpu == PROCESSOR_POWER6)
17883 && recog_memoized (dep_insn)
17884 && (INSN_CODE (dep_insn) >= 0)
17885 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
17886 return 2;
17887
17888 default:
17889 break;
17890 }
c9dbf840 17891
a251ffd0 17892 /* Fall out to return default cost. */
44cd321e
PS
17893 }
17894 break;
17895
17896 case REG_DEP_OUTPUT:
17897 /* Output dependency; DEP_INSN writes a register that INSN writes some
17898 cycles later. */
17899 if ((rs6000_cpu == PROCESSOR_POWER6)
17900 && recog_memoized (dep_insn)
17901 && (INSN_CODE (dep_insn) >= 0))
17902 {
17903 attr_type = get_attr_type (insn);
17904
17905 switch (attr_type)
17906 {
17907 case TYPE_FP:
17908 if (get_attr_type (dep_insn) == TYPE_FP)
17909 return 1;
17910 break;
17911 case TYPE_FPLOAD:
17912 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
17913 return 2;
17914 break;
17915 default:
17916 break;
17917 }
17918 }
17919 case REG_DEP_ANTI:
17920 /* Anti dependency; DEP_INSN reads a register that INSN writes some
17921 cycles later. */
17922 return 0;
17923
17924 default:
17925 gcc_unreachable ();
a251ffd0
TG
17926 }
17927
17928 return cost;
17929}
b6c9286a 17930
cbe26ab8 17931/* The function returns a true if INSN is microcoded.
839a4992 17932 Return false otherwise. */
cbe26ab8
DN
17933
17934static bool
17935is_microcoded_insn (rtx insn)
17936{
17937 if (!insn || !INSN_P (insn)
17938 || GET_CODE (PATTERN (insn)) == USE
17939 || GET_CODE (PATTERN (insn)) == CLOBBER)
17940 return false;
17941
d296e02e
AP
17942 if (rs6000_cpu_attr == CPU_CELL)
17943 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
17944
ec507f2d 17945 if (rs6000_sched_groups)
cbe26ab8
DN
17946 {
17947 enum attr_type type = get_attr_type (insn);
17948 if (type == TYPE_LOAD_EXT_U
17949 || type == TYPE_LOAD_EXT_UX
17950 || type == TYPE_LOAD_UX
17951 || type == TYPE_STORE_UX
17952 || type == TYPE_MFCR)
c4ad648e 17953 return true;
cbe26ab8
DN
17954 }
17955
17956 return false;
17957}
17958
cbe26ab8
DN
17959/* The function returns true if INSN is cracked into 2 instructions
17960 by the processor (and therefore occupies 2 issue slots). */
17961
17962static bool
17963is_cracked_insn (rtx insn)
17964{
17965 if (!insn || !INSN_P (insn)
17966 || GET_CODE (PATTERN (insn)) == USE
17967 || GET_CODE (PATTERN (insn)) == CLOBBER)
17968 return false;
17969
ec507f2d 17970 if (rs6000_sched_groups)
cbe26ab8
DN
17971 {
17972 enum attr_type type = get_attr_type (insn);
17973 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
17974 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
17975 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
17976 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
17977 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
17978 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
17979 || type == TYPE_IDIV || type == TYPE_LDIV
17980 || type == TYPE_INSERT_WORD)
17981 return true;
cbe26ab8
DN
17982 }
17983
17984 return false;
17985}
17986
17987/* The function returns true if INSN can be issued only from
a3c9585f 17988 the branch slot. */
cbe26ab8
DN
17989
17990static bool
17991is_branch_slot_insn (rtx insn)
17992{
17993 if (!insn || !INSN_P (insn)
17994 || GET_CODE (PATTERN (insn)) == USE
17995 || GET_CODE (PATTERN (insn)) == CLOBBER)
17996 return false;
17997
ec507f2d 17998 if (rs6000_sched_groups)
cbe26ab8
DN
17999 {
18000 enum attr_type type = get_attr_type (insn);
18001 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18002 return true;
cbe26ab8
DN
18003 return false;
18004 }
18005
18006 return false;
18007}
79ae11c4 18008
44cd321e
PS
18009/* The function returns true if out_inst sets a value that is
18010 used in the address generation computation of in_insn */
18011static bool
18012set_to_load_agen (rtx out_insn, rtx in_insn)
18013{
18014 rtx out_set, in_set;
18015
18016 /* For performance reasons, only handle the simple case where
18017 both loads are a single_set. */
18018 out_set = single_set (out_insn);
18019 if (out_set)
18020 {
18021 in_set = single_set (in_insn);
18022 if (in_set)
18023 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18024 }
18025
18026 return false;
18027}
18028
18029/* The function returns true if the target storage location of
18030 out_insn is adjacent to the target storage location of in_insn */
18031/* Return 1 if memory locations are adjacent. */
18032
18033static bool
18034adjacent_mem_locations (rtx insn1, rtx insn2)
18035{
18036
e3a0e200
PB
18037 rtx a = get_store_dest (PATTERN (insn1));
18038 rtx b = get_store_dest (PATTERN (insn2));
18039
44cd321e
PS
18040 if ((GET_CODE (XEXP (a, 0)) == REG
18041 || (GET_CODE (XEXP (a, 0)) == PLUS
18042 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18043 && (GET_CODE (XEXP (b, 0)) == REG
18044 || (GET_CODE (XEXP (b, 0)) == PLUS
18045 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18046 {
18047 HOST_WIDE_INT val0 = 0, val1 = 0;
18048 rtx reg0, reg1;
18049 int val_diff;
18050
18051 if (GET_CODE (XEXP (a, 0)) == PLUS)
18052 {
18053 reg0 = XEXP (XEXP (a, 0), 0);
18054 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18055 }
18056 else
18057 reg0 = XEXP (a, 0);
18058
18059 if (GET_CODE (XEXP (b, 0)) == PLUS)
18060 {
18061 reg1 = XEXP (XEXP (b, 0), 0);
18062 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18063 }
18064 else
18065 reg1 = XEXP (b, 0);
18066
18067 val_diff = val1 - val0;
18068
18069 return ((REGNO (reg0) == REGNO (reg1))
18070 && (val_diff == INTVAL (MEM_SIZE (a))
18071 || val_diff == -INTVAL (MEM_SIZE (b))));
18072 }
18073
18074 return false;
18075}
18076
a4f6c312 18077/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18078 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18079 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18080 define this macro if you do not need to adjust the scheduling
18081 priorities of insns. */
bef84347 18082
c237e94a 18083static int
a2369ed3 18084rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18085{
a4f6c312
SS
18086 /* On machines (like the 750) which have asymmetric integer units,
18087 where one integer unit can do multiply and divides and the other
18088 can't, reduce the priority of multiply/divide so it is scheduled
18089 before other integer operations. */
bef84347
VM
18090
18091#if 0
2c3c49de 18092 if (! INSN_P (insn))
bef84347
VM
18093 return priority;
18094
18095 if (GET_CODE (PATTERN (insn)) == USE)
18096 return priority;
18097
18098 switch (rs6000_cpu_attr) {
18099 case CPU_PPC750:
18100 switch (get_attr_type (insn))
18101 {
18102 default:
18103 break;
18104
18105 case TYPE_IMUL:
18106 case TYPE_IDIV:
3cb999d8
DE
18107 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18108 priority, priority);
bef84347
VM
18109 if (priority >= 0 && priority < 0x01000000)
18110 priority >>= 3;
18111 break;
18112 }
18113 }
18114#endif
18115
44cd321e 18116 if (insn_must_be_first_in_group (insn)
79ae11c4 18117 && reload_completed
f676971a 18118 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18119 && rs6000_sched_restricted_insns_priority)
18120 {
18121
c4ad648e
AM
18122 /* Prioritize insns that can be dispatched only in the first
18123 dispatch slot. */
79ae11c4 18124 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18125 /* Attach highest priority to insn. This means that in
18126 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18127 precede 'priority' (critical path) considerations. */
f676971a 18128 return current_sched_info->sched_max_insns_priority;
79ae11c4 18129 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18130 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18131 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18132 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18133 return (priority + 1);
18134 }
79ae11c4 18135
44cd321e
PS
18136 if (rs6000_cpu == PROCESSOR_POWER6
18137 && ((load_store_pendulum == -2 && is_load_insn (insn))
18138 || (load_store_pendulum == 2 && is_store_insn (insn))))
18139 /* Attach highest priority to insn if the scheduler has just issued two
18140 stores and this instruction is a load, or two loads and this instruction
18141 is a store. Power6 wants loads and stores scheduled alternately
18142 when possible */
18143 return current_sched_info->sched_max_insns_priority;
18144
bef84347
VM
18145 return priority;
18146}
18147
d296e02e
AP
18148/* Return true if the instruction is nonpipelined on the Cell. */
18149static bool
18150is_nonpipeline_insn (rtx insn)
18151{
18152 enum attr_type type;
18153 if (!insn || !INSN_P (insn)
18154 || GET_CODE (PATTERN (insn)) == USE
18155 || GET_CODE (PATTERN (insn)) == CLOBBER)
18156 return false;
18157
18158 type = get_attr_type (insn);
18159 if (type == TYPE_IMUL
18160 || type == TYPE_IMUL2
18161 || type == TYPE_IMUL3
18162 || type == TYPE_LMUL
18163 || type == TYPE_IDIV
18164 || type == TYPE_LDIV
18165 || type == TYPE_SDIV
18166 || type == TYPE_DDIV
18167 || type == TYPE_SSQRT
18168 || type == TYPE_DSQRT
18169 || type == TYPE_MFCR
18170 || type == TYPE_MFCRF
18171 || type == TYPE_MFJMPR)
18172 {
18173 return true;
18174 }
18175 return false;
18176}
18177
18178
a4f6c312
SS
18179/* Return how many instructions the machine can issue per cycle. */
18180
c237e94a 18181static int
863d938c 18182rs6000_issue_rate (void)
b6c9286a 18183{
3317bab1
DE
18184 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
18185 if (!reload_completed)
18186 return 1;
18187
b6c9286a 18188 switch (rs6000_cpu_attr) {
3cb999d8
DE
18189 case CPU_RIOS1: /* ? */
18190 case CPU_RS64A:
18191 case CPU_PPC601: /* ? */
ed947a96 18192 case CPU_PPC7450:
3cb999d8 18193 return 3;
b54cf83a 18194 case CPU_PPC440:
b6c9286a 18195 case CPU_PPC603:
bef84347 18196 case CPU_PPC750:
ed947a96 18197 case CPU_PPC7400:
be12c2b0 18198 case CPU_PPC8540:
d296e02e 18199 case CPU_CELL:
f676971a 18200 return 2;
3cb999d8 18201 case CPU_RIOS2:
b6c9286a 18202 case CPU_PPC604:
19684119 18203 case CPU_PPC604E:
b6c9286a 18204 case CPU_PPC620:
3cb999d8 18205 case CPU_PPC630:
b6c9286a 18206 return 4;
cbe26ab8 18207 case CPU_POWER4:
ec507f2d 18208 case CPU_POWER5:
44cd321e 18209 case CPU_POWER6:
cbe26ab8 18210 return 5;
b6c9286a
MM
18211 default:
18212 return 1;
18213 }
18214}
18215
be12c2b0
VM
18216/* Return how many instructions to look ahead for better insn
18217 scheduling. */
18218
18219static int
863d938c 18220rs6000_use_sched_lookahead (void)
be12c2b0
VM
18221{
18222 if (rs6000_cpu_attr == CPU_PPC8540)
18223 return 4;
d296e02e
AP
18224 if (rs6000_cpu_attr == CPU_CELL)
18225 return (reload_completed ? 8 : 0);
be12c2b0
VM
18226 return 0;
18227}
18228
d296e02e
AP
18229/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
18230static int
18231rs6000_use_sched_lookahead_guard (rtx insn)
18232{
18233 if (rs6000_cpu_attr != CPU_CELL)
18234 return 1;
18235
18236 if (insn == NULL_RTX || !INSN_P (insn))
18237 abort ();
982afe02 18238
d296e02e
AP
18239 if (!reload_completed
18240 || is_nonpipeline_insn (insn)
18241 || is_microcoded_insn (insn))
18242 return 0;
18243
18244 return 1;
18245}
18246
569fa502
DN
18247/* Determine is PAT refers to memory. */
18248
18249static bool
18250is_mem_ref (rtx pat)
18251{
18252 const char * fmt;
18253 int i, j;
18254 bool ret = false;
18255
18256 if (GET_CODE (pat) == MEM)
18257 return true;
18258
18259 /* Recursively process the pattern. */
18260 fmt = GET_RTX_FORMAT (GET_CODE (pat));
18261
18262 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
18263 {
18264 if (fmt[i] == 'e')
18265 ret |= is_mem_ref (XEXP (pat, i));
18266 else if (fmt[i] == 'E')
18267 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
18268 ret |= is_mem_ref (XVECEXP (pat, i, j));
18269 }
18270
18271 return ret;
18272}
18273
18274/* Determine if PAT is a PATTERN of a load insn. */
f676971a 18275
569fa502
DN
18276static bool
18277is_load_insn1 (rtx pat)
18278{
18279 if (!pat || pat == NULL_RTX)
18280 return false;
18281
18282 if (GET_CODE (pat) == SET)
18283 return is_mem_ref (SET_SRC (pat));
18284
18285 if (GET_CODE (pat) == PARALLEL)
18286 {
18287 int i;
18288
18289 for (i = 0; i < XVECLEN (pat, 0); i++)
18290 if (is_load_insn1 (XVECEXP (pat, 0, i)))
18291 return true;
18292 }
18293
18294 return false;
18295}
18296
18297/* Determine if INSN loads from memory. */
18298
18299static bool
18300is_load_insn (rtx insn)
18301{
18302 if (!insn || !INSN_P (insn))
18303 return false;
18304
18305 if (GET_CODE (insn) == CALL_INSN)
18306 return false;
18307
18308 return is_load_insn1 (PATTERN (insn));
18309}
18310
18311/* Determine if PAT is a PATTERN of a store insn. */
18312
18313static bool
18314is_store_insn1 (rtx pat)
18315{
18316 if (!pat || pat == NULL_RTX)
18317 return false;
18318
18319 if (GET_CODE (pat) == SET)
18320 return is_mem_ref (SET_DEST (pat));
18321
18322 if (GET_CODE (pat) == PARALLEL)
18323 {
18324 int i;
18325
18326 for (i = 0; i < XVECLEN (pat, 0); i++)
18327 if (is_store_insn1 (XVECEXP (pat, 0, i)))
18328 return true;
18329 }
18330
18331 return false;
18332}
18333
18334/* Determine if INSN stores to memory. */
18335
18336static bool
18337is_store_insn (rtx insn)
18338{
18339 if (!insn || !INSN_P (insn))
18340 return false;
18341
18342 return is_store_insn1 (PATTERN (insn));
18343}
18344
e3a0e200
PB
18345/* Return the dest of a store insn. */
18346
18347static rtx
18348get_store_dest (rtx pat)
18349{
18350 gcc_assert (is_store_insn1 (pat));
18351
18352 if (GET_CODE (pat) == SET)
18353 return SET_DEST (pat);
18354 else if (GET_CODE (pat) == PARALLEL)
18355 {
18356 int i;
18357
18358 for (i = 0; i < XVECLEN (pat, 0); i++)
18359 {
18360 rtx inner_pat = XVECEXP (pat, 0, i);
18361 if (GET_CODE (inner_pat) == SET
18362 && is_mem_ref (SET_DEST (inner_pat)))
18363 return inner_pat;
18364 }
18365 }
18366 /* We shouldn't get here, because we should have either a simple
18367 store insn or a store with update which are covered above. */
18368 gcc_unreachable();
18369}
18370
569fa502
DN
18371/* Returns whether the dependence between INSN and NEXT is considered
18372 costly by the given target. */
18373
18374static bool
b198261f 18375rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 18376{
b198261f
MK
18377 rtx insn;
18378 rtx next;
18379
aabcd309 18380 /* If the flag is not enabled - no dependence is considered costly;
f676971a 18381 allow all dependent insns in the same group.
569fa502
DN
18382 This is the most aggressive option. */
18383 if (rs6000_sched_costly_dep == no_dep_costly)
18384 return false;
18385
f676971a 18386 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
18387 do not allow dependent instructions in the same group.
18388 This is the most conservative option. */
18389 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 18390 return true;
569fa502 18391
b198261f
MK
18392 insn = DEP_PRO (dep);
18393 next = DEP_CON (dep);
18394
f676971a
EC
18395 if (rs6000_sched_costly_dep == store_to_load_dep_costly
18396 && is_load_insn (next)
569fa502
DN
18397 && is_store_insn (insn))
18398 /* Prevent load after store in the same group. */
18399 return true;
18400
18401 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 18402 && is_load_insn (next)
569fa502 18403 && is_store_insn (insn)
e2f6ff94 18404 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
18405 /* Prevent load after store in the same group if it is a true
18406 dependence. */
569fa502 18407 return true;
f676971a
EC
18408
18409 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
18410 and will not be scheduled in the same group. */
18411 if (rs6000_sched_costly_dep <= max_dep_latency
18412 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
18413 return true;
18414
18415 return false;
18416}
18417
f676971a 18418/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
18419 skipping any "non-active" insns - insns that will not actually occupy
18420 an issue slot. Return NULL_RTX if such an insn is not found. */
18421
18422static rtx
18423get_next_active_insn (rtx insn, rtx tail)
18424{
f489aff8 18425 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
18426 return NULL_RTX;
18427
f489aff8 18428 while (1)
cbe26ab8 18429 {
f489aff8
AM
18430 insn = NEXT_INSN (insn);
18431 if (insn == NULL_RTX || insn == tail)
18432 return NULL_RTX;
cbe26ab8 18433
f489aff8
AM
18434 if (CALL_P (insn)
18435 || JUMP_P (insn)
18436 || (NONJUMP_INSN_P (insn)
18437 && GET_CODE (PATTERN (insn)) != USE
18438 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18439 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18440 break;
18441 }
18442 return insn;
cbe26ab8
DN
18443}
18444
44cd321e
PS
18445/* We are about to begin issuing insns for this clock cycle. */
18446
18447static int
18448rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18449 rtx *ready ATTRIBUTE_UNUSED,
18450 int *pn_ready ATTRIBUTE_UNUSED,
18451 int clock_var ATTRIBUTE_UNUSED)
18452{
d296e02e
AP
18453 int n_ready = *pn_ready;
18454
44cd321e
PS
18455 if (sched_verbose)
18456 fprintf (dump, "// rs6000_sched_reorder :\n");
18457
d296e02e
AP
18458 /* Reorder the ready list, if the second to last ready insn
18459 is a nonepipeline insn. */
18460 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18461 {
18462 if (is_nonpipeline_insn (ready[n_ready - 1])
18463 && (recog_memoized (ready[n_ready - 2]) > 0))
18464 /* Simply swap first two insns. */
18465 {
18466 rtx tmp = ready[n_ready - 1];
18467 ready[n_ready - 1] = ready[n_ready - 2];
18468 ready[n_ready - 2] = tmp;
18469 }
18470 }
18471
44cd321e
PS
18472 if (rs6000_cpu == PROCESSOR_POWER6)
18473 load_store_pendulum = 0;
18474
18475 return rs6000_issue_rate ();
18476}
18477
18478/* Like rs6000_sched_reorder, but called after issuing each insn. */
18479
18480static int
18481rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18482 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18483{
18484 if (sched_verbose)
18485 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18486
18487 /* For Power6, we need to handle some special cases to try and keep the
18488 store queue from overflowing and triggering expensive flushes.
18489
18490 This code monitors how load and store instructions are being issued
18491 and skews the ready list one way or the other to increase the likelihood
18492 that a desired instruction is issued at the proper time.
18493
18494 A couple of things are done. First, we maintain a "load_store_pendulum"
18495 to track the current state of load/store issue.
18496
18497 - If the pendulum is at zero, then no loads or stores have been
18498 issued in the current cycle so we do nothing.
18499
18500 - If the pendulum is 1, then a single load has been issued in this
18501 cycle and we attempt to locate another load in the ready list to
18502 issue with it.
18503
2f8e468b 18504 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18505 issued in this cycle, so we increase the priority of the first load
18506 in the ready list to increase it's likelihood of being chosen first
18507 in the next cycle.
18508
18509 - If the pendulum is -1, then a single store has been issued in this
18510 cycle and we attempt to locate another store in the ready list to
18511 issue with it, preferring a store to an adjacent memory location to
18512 facilitate store pairing in the store queue.
18513
18514 - If the pendulum is 2, then two loads have already been
18515 issued in this cycle, so we increase the priority of the first store
18516 in the ready list to increase it's likelihood of being chosen first
18517 in the next cycle.
18518
18519 - If the pendulum < -2 or > 2, then do nothing.
18520
18521 Note: This code covers the most common scenarios. There exist non
18522 load/store instructions which make use of the LSU and which
18523 would need to be accounted for to strictly model the behavior
18524 of the machine. Those instructions are currently unaccounted
18525 for to help minimize compile time overhead of this code.
18526 */
18527 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18528 {
18529 int pos;
18530 int i;
18531 rtx tmp;
18532
18533 if (is_store_insn (last_scheduled_insn))
18534 /* Issuing a store, swing the load_store_pendulum to the left */
18535 load_store_pendulum--;
18536 else if (is_load_insn (last_scheduled_insn))
18537 /* Issuing a load, swing the load_store_pendulum to the right */
18538 load_store_pendulum++;
18539 else
18540 return cached_can_issue_more;
18541
18542 /* If the pendulum is balanced, or there is only one instruction on
18543 the ready list, then all is well, so return. */
18544 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18545 return cached_can_issue_more;
18546
18547 if (load_store_pendulum == 1)
18548 {
18549 /* A load has been issued in this cycle. Scan the ready list
18550 for another load to issue with it */
18551 pos = *pn_ready-1;
18552
18553 while (pos >= 0)
18554 {
18555 if (is_load_insn (ready[pos]))
18556 {
18557 /* Found a load. Move it to the head of the ready list,
18558 and adjust it's priority so that it is more likely to
18559 stay there */
18560 tmp = ready[pos];
18561 for (i=pos; i<*pn_ready-1; i++)
18562 ready[i] = ready[i + 1];
18563 ready[*pn_ready-1] = tmp;
18564 if INSN_PRIORITY_KNOWN (tmp)
18565 INSN_PRIORITY (tmp)++;
18566 break;
18567 }
18568 pos--;
18569 }
18570 }
18571 else if (load_store_pendulum == -2)
18572 {
18573 /* Two stores have been issued in this cycle. Increase the
18574 priority of the first load in the ready list to favor it for
18575 issuing in the next cycle. */
18576 pos = *pn_ready-1;
18577
18578 while (pos >= 0)
18579 {
18580 if (is_load_insn (ready[pos])
18581 && INSN_PRIORITY_KNOWN (ready[pos]))
18582 {
18583 INSN_PRIORITY (ready[pos])++;
18584
18585 /* Adjust the pendulum to account for the fact that a load
18586 was found and increased in priority. This is to prevent
18587 increasing the priority of multiple loads */
18588 load_store_pendulum--;
18589
18590 break;
18591 }
18592 pos--;
18593 }
18594 }
18595 else if (load_store_pendulum == -1)
18596 {
18597 /* A store has been issued in this cycle. Scan the ready list for
18598 another store to issue with it, preferring a store to an adjacent
18599 memory location */
18600 int first_store_pos = -1;
18601
18602 pos = *pn_ready-1;
18603
18604 while (pos >= 0)
18605 {
18606 if (is_store_insn (ready[pos]))
18607 {
18608 /* Maintain the index of the first store found on the
18609 list */
18610 if (first_store_pos == -1)
18611 first_store_pos = pos;
18612
18613 if (is_store_insn (last_scheduled_insn)
18614 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18615 {
18616 /* Found an adjacent store. Move it to the head of the
18617 ready list, and adjust it's priority so that it is
18618 more likely to stay there */
18619 tmp = ready[pos];
18620 for (i=pos; i<*pn_ready-1; i++)
18621 ready[i] = ready[i + 1];
18622 ready[*pn_ready-1] = tmp;
18623 if INSN_PRIORITY_KNOWN (tmp)
18624 INSN_PRIORITY (tmp)++;
18625 first_store_pos = -1;
18626
18627 break;
18628 };
18629 }
18630 pos--;
18631 }
18632
18633 if (first_store_pos >= 0)
18634 {
18635 /* An adjacent store wasn't found, but a non-adjacent store was,
18636 so move the non-adjacent store to the front of the ready
18637 list, and adjust its priority so that it is more likely to
18638 stay there. */
18639 tmp = ready[first_store_pos];
18640 for (i=first_store_pos; i<*pn_ready-1; i++)
18641 ready[i] = ready[i + 1];
18642 ready[*pn_ready-1] = tmp;
18643 if INSN_PRIORITY_KNOWN (tmp)
18644 INSN_PRIORITY (tmp)++;
18645 }
18646 }
18647 else if (load_store_pendulum == 2)
18648 {
18649 /* Two loads have been issued in this cycle. Increase the priority
18650 of the first store in the ready list to favor it for issuing in
18651 the next cycle. */
18652 pos = *pn_ready-1;
18653
18654 while (pos >= 0)
18655 {
18656 if (is_store_insn (ready[pos])
18657 && INSN_PRIORITY_KNOWN (ready[pos]))
18658 {
18659 INSN_PRIORITY (ready[pos])++;
18660
18661 /* Adjust the pendulum to account for the fact that a store
18662 was found and increased in priority. This is to prevent
18663 increasing the priority of multiple stores */
18664 load_store_pendulum++;
18665
18666 break;
18667 }
18668 pos--;
18669 }
18670 }
18671 }
18672
18673 return cached_can_issue_more;
18674}
18675
839a4992 18676/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18677 of group WHICH_GROUP.
18678
18679 If WHICH_GROUP == current_group, this function will return true if INSN
18680 causes the termination of the current group (i.e, the dispatch group to
18681 which INSN belongs). This means that INSN will be the last insn in the
18682 group it belongs to.
18683
18684 If WHICH_GROUP == previous_group, this function will return true if INSN
18685 causes the termination of the previous group (i.e, the dispatch group that
18686 precedes the group to which INSN belongs). This means that INSN will be
18687 the first insn in the group it belongs to). */
18688
18689static bool
18690insn_terminates_group_p (rtx insn, enum group_termination which_group)
18691{
44cd321e 18692 bool first, last;
cbe26ab8
DN
18693
18694 if (! insn)
18695 return false;
569fa502 18696
44cd321e
PS
18697 first = insn_must_be_first_in_group (insn);
18698 last = insn_must_be_last_in_group (insn);
cbe26ab8 18699
44cd321e 18700 if (first && last)
cbe26ab8
DN
18701 return true;
18702
18703 if (which_group == current_group)
44cd321e 18704 return last;
cbe26ab8 18705 else if (which_group == previous_group)
44cd321e
PS
18706 return first;
18707
18708 return false;
18709}
18710
18711
18712static bool
18713insn_must_be_first_in_group (rtx insn)
18714{
18715 enum attr_type type;
18716
18717 if (!insn
18718 || insn == NULL_RTX
18719 || GET_CODE (insn) == NOTE
18720 || GET_CODE (PATTERN (insn)) == USE
18721 || GET_CODE (PATTERN (insn)) == CLOBBER)
18722 return false;
18723
18724 switch (rs6000_cpu)
cbe26ab8 18725 {
44cd321e
PS
18726 case PROCESSOR_POWER5:
18727 if (is_cracked_insn (insn))
18728 return true;
18729 case PROCESSOR_POWER4:
18730 if (is_microcoded_insn (insn))
18731 return true;
18732
18733 if (!rs6000_sched_groups)
18734 return false;
18735
18736 type = get_attr_type (insn);
18737
18738 switch (type)
18739 {
18740 case TYPE_MFCR:
18741 case TYPE_MFCRF:
18742 case TYPE_MTCR:
18743 case TYPE_DELAYED_CR:
18744 case TYPE_CR_LOGICAL:
18745 case TYPE_MTJMPR:
18746 case TYPE_MFJMPR:
18747 case TYPE_IDIV:
18748 case TYPE_LDIV:
18749 case TYPE_LOAD_L:
18750 case TYPE_STORE_C:
18751 case TYPE_ISYNC:
18752 case TYPE_SYNC:
18753 return true;
18754 default:
18755 break;
18756 }
18757 break;
18758 case PROCESSOR_POWER6:
18759 type = get_attr_type (insn);
18760
18761 switch (type)
18762 {
18763 case TYPE_INSERT_DWORD:
18764 case TYPE_EXTS:
18765 case TYPE_CNTLZ:
18766 case TYPE_SHIFT:
18767 case TYPE_VAR_SHIFT_ROTATE:
18768 case TYPE_TRAP:
18769 case TYPE_IMUL:
18770 case TYPE_IMUL2:
18771 case TYPE_IMUL3:
18772 case TYPE_LMUL:
18773 case TYPE_IDIV:
18774 case TYPE_INSERT_WORD:
18775 case TYPE_DELAYED_COMPARE:
18776 case TYPE_IMUL_COMPARE:
18777 case TYPE_LMUL_COMPARE:
18778 case TYPE_FPCOMPARE:
18779 case TYPE_MFCR:
18780 case TYPE_MTCR:
18781 case TYPE_MFJMPR:
18782 case TYPE_MTJMPR:
18783 case TYPE_ISYNC:
18784 case TYPE_SYNC:
18785 case TYPE_LOAD_L:
18786 case TYPE_STORE_C:
18787 case TYPE_LOAD_U:
18788 case TYPE_LOAD_UX:
18789 case TYPE_LOAD_EXT_UX:
18790 case TYPE_STORE_U:
18791 case TYPE_STORE_UX:
18792 case TYPE_FPLOAD_U:
18793 case TYPE_FPLOAD_UX:
18794 case TYPE_FPSTORE_U:
18795 case TYPE_FPSTORE_UX:
18796 return true;
18797 default:
18798 break;
18799 }
18800 break;
18801 default:
18802 break;
18803 }
18804
18805 return false;
18806}
18807
18808static bool
18809insn_must_be_last_in_group (rtx insn)
18810{
18811 enum attr_type type;
18812
18813 if (!insn
18814 || insn == NULL_RTX
18815 || GET_CODE (insn) == NOTE
18816 || GET_CODE (PATTERN (insn)) == USE
18817 || GET_CODE (PATTERN (insn)) == CLOBBER)
18818 return false;
18819
18820 switch (rs6000_cpu) {
18821 case PROCESSOR_POWER4:
18822 case PROCESSOR_POWER5:
18823 if (is_microcoded_insn (insn))
18824 return true;
18825
18826 if (is_branch_slot_insn (insn))
18827 return true;
18828
18829 break;
18830 case PROCESSOR_POWER6:
18831 type = get_attr_type (insn);
18832
18833 switch (type)
18834 {
18835 case TYPE_EXTS:
18836 case TYPE_CNTLZ:
18837 case TYPE_SHIFT:
18838 case TYPE_VAR_SHIFT_ROTATE:
18839 case TYPE_TRAP:
18840 case TYPE_IMUL:
18841 case TYPE_IMUL2:
18842 case TYPE_IMUL3:
18843 case TYPE_LMUL:
18844 case TYPE_IDIV:
18845 case TYPE_DELAYED_COMPARE:
18846 case TYPE_IMUL_COMPARE:
18847 case TYPE_LMUL_COMPARE:
18848 case TYPE_FPCOMPARE:
18849 case TYPE_MFCR:
18850 case TYPE_MTCR:
18851 case TYPE_MFJMPR:
18852 case TYPE_MTJMPR:
18853 case TYPE_ISYNC:
18854 case TYPE_SYNC:
18855 case TYPE_LOAD_L:
18856 case TYPE_STORE_C:
18857 return true;
18858 default:
18859 break;
cbe26ab8 18860 }
44cd321e
PS
18861 break;
18862 default:
18863 break;
18864 }
cbe26ab8
DN
18865
18866 return false;
18867}
18868
839a4992 18869/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
18870 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18871
18872static bool
18873is_costly_group (rtx *group_insns, rtx next_insn)
18874{
18875 int i;
cbe26ab8
DN
18876 int issue_rate = rs6000_issue_rate ();
18877
18878 for (i = 0; i < issue_rate; i++)
18879 {
e2f6ff94
MK
18880 sd_iterator_def sd_it;
18881 dep_t dep;
cbe26ab8 18882 rtx insn = group_insns[i];
b198261f 18883
cbe26ab8 18884 if (!insn)
c4ad648e 18885 continue;
b198261f 18886
e2f6ff94 18887 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 18888 {
b198261f
MK
18889 rtx next = DEP_CON (dep);
18890
18891 if (next == next_insn
18892 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
18893 return true;
c4ad648e 18894 }
cbe26ab8
DN
18895 }
18896
18897 return false;
18898}
18899
f676971a 18900/* Utility of the function redefine_groups.
cbe26ab8
DN
18901 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
18902 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
18903 to keep it "far" (in a separate group) from GROUP_INSNS, following
18904 one of the following schemes, depending on the value of the flag
18905 -minsert_sched_nops = X:
18906 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 18907 in order to force NEXT_INSN into a separate group.
f676971a
EC
18908 (2) X < sched_finish_regroup_exact: insert exactly X nops.
18909 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
18910 insertion (has a group just ended, how many vacant issue slots remain in the
18911 last group, and how many dispatch groups were encountered so far). */
18912
f676971a 18913static int
c4ad648e
AM
18914force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
18915 rtx next_insn, bool *group_end, int can_issue_more,
18916 int *group_count)
cbe26ab8
DN
18917{
18918 rtx nop;
18919 bool force;
18920 int issue_rate = rs6000_issue_rate ();
18921 bool end = *group_end;
18922 int i;
18923
18924 if (next_insn == NULL_RTX)
18925 return can_issue_more;
18926
18927 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
18928 return can_issue_more;
18929
18930 force = is_costly_group (group_insns, next_insn);
18931 if (!force)
18932 return can_issue_more;
18933
18934 if (sched_verbose > 6)
18935 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 18936 *group_count ,can_issue_more);
cbe26ab8
DN
18937
18938 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
18939 {
18940 if (*group_end)
c4ad648e 18941 can_issue_more = 0;
cbe26ab8
DN
18942
18943 /* Since only a branch can be issued in the last issue_slot, it is
18944 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
18945 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
18946 in this case the last nop will start a new group and the branch
18947 will be forced to the new group. */
cbe26ab8 18948 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 18949 can_issue_more--;
cbe26ab8
DN
18950
18951 while (can_issue_more > 0)
c4ad648e 18952 {
9390387d 18953 nop = gen_nop ();
c4ad648e
AM
18954 emit_insn_before (nop, next_insn);
18955 can_issue_more--;
18956 }
cbe26ab8
DN
18957
18958 *group_end = true;
18959 return 0;
f676971a 18960 }
cbe26ab8
DN
18961
18962 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
18963 {
18964 int n_nops = rs6000_sched_insert_nops;
18965
f676971a 18966 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 18967 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 18968 if (can_issue_more == 0)
c4ad648e 18969 can_issue_more = issue_rate;
cbe26ab8
DN
18970 can_issue_more--;
18971 if (can_issue_more == 0)
c4ad648e
AM
18972 {
18973 can_issue_more = issue_rate - 1;
18974 (*group_count)++;
18975 end = true;
18976 for (i = 0; i < issue_rate; i++)
18977 {
18978 group_insns[i] = 0;
18979 }
18980 }
cbe26ab8
DN
18981
18982 while (n_nops > 0)
c4ad648e
AM
18983 {
18984 nop = gen_nop ();
18985 emit_insn_before (nop, next_insn);
18986 if (can_issue_more == issue_rate - 1) /* new group begins */
18987 end = false;
18988 can_issue_more--;
18989 if (can_issue_more == 0)
18990 {
18991 can_issue_more = issue_rate - 1;
18992 (*group_count)++;
18993 end = true;
18994 for (i = 0; i < issue_rate; i++)
18995 {
18996 group_insns[i] = 0;
18997 }
18998 }
18999 n_nops--;
19000 }
cbe26ab8
DN
19001
19002 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19003 can_issue_more++;
cbe26ab8 19004
c4ad648e
AM
19005 /* Is next_insn going to start a new group? */
19006 *group_end
19007 = (end
cbe26ab8
DN
19008 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19009 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19010 || (can_issue_more < issue_rate &&
c4ad648e 19011 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19012 if (*group_end && end)
c4ad648e 19013 (*group_count)--;
cbe26ab8
DN
19014
19015 if (sched_verbose > 6)
c4ad648e
AM
19016 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19017 *group_count, can_issue_more);
f676971a
EC
19018 return can_issue_more;
19019 }
cbe26ab8
DN
19020
19021 return can_issue_more;
19022}
19023
19024/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19025 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19026 form in practice. It tries to achieve this synchronization by forcing the
19027 estimated processor grouping on the compiler (as opposed to the function
19028 'pad_goups' which tries to force the scheduler's grouping on the processor).
19029
19030 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19031 examines the (estimated) dispatch groups that will be formed by the processor
19032 dispatcher. It marks these group boundaries to reflect the estimated
19033 processor grouping, overriding the grouping that the scheduler had marked.
19034 Depending on the value of the flag '-minsert-sched-nops' this function can
19035 force certain insns into separate groups or force a certain distance between
19036 them by inserting nops, for example, if there exists a "costly dependence"
19037 between the insns.
19038
19039 The function estimates the group boundaries that the processor will form as
0fa2e4df 19040 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19041 each insn. A subsequent insn will start a new group if one of the following
19042 4 cases applies:
19043 - no more vacant issue slots remain in the current dispatch group.
19044 - only the last issue slot, which is the branch slot, is vacant, but the next
19045 insn is not a branch.
19046 - only the last 2 or less issue slots, including the branch slot, are vacant,
19047 which means that a cracked insn (which occupies two issue slots) can't be
19048 issued in this group.
f676971a 19049 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19050 start a new group. */
19051
19052static int
19053redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19054{
19055 rtx insn, next_insn;
19056 int issue_rate;
19057 int can_issue_more;
19058 int slot, i;
19059 bool group_end;
19060 int group_count = 0;
19061 rtx *group_insns;
19062
19063 /* Initialize. */
19064 issue_rate = rs6000_issue_rate ();
19065 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 19066 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19067 {
19068 group_insns[i] = 0;
19069 }
19070 can_issue_more = issue_rate;
19071 slot = 0;
19072 insn = get_next_active_insn (prev_head_insn, tail);
19073 group_end = false;
19074
19075 while (insn != NULL_RTX)
19076 {
19077 slot = (issue_rate - can_issue_more);
19078 group_insns[slot] = insn;
19079 can_issue_more =
c4ad648e 19080 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19081 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19082 can_issue_more = 0;
cbe26ab8
DN
19083
19084 next_insn = get_next_active_insn (insn, tail);
19085 if (next_insn == NULL_RTX)
c4ad648e 19086 return group_count + 1;
cbe26ab8 19087
c4ad648e
AM
19088 /* Is next_insn going to start a new group? */
19089 group_end
19090 = (can_issue_more == 0
19091 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19092 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19093 || (can_issue_more < issue_rate &&
19094 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19095
f676971a 19096 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19097 next_insn, &group_end, can_issue_more,
19098 &group_count);
cbe26ab8
DN
19099
19100 if (group_end)
c4ad648e
AM
19101 {
19102 group_count++;
19103 can_issue_more = 0;
19104 for (i = 0; i < issue_rate; i++)
19105 {
19106 group_insns[i] = 0;
19107 }
19108 }
cbe26ab8
DN
19109
19110 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19111 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19112 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19113 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19114
19115 insn = next_insn;
19116 if (can_issue_more == 0)
c4ad648e
AM
19117 can_issue_more = issue_rate;
19118 } /* while */
cbe26ab8
DN
19119
19120 return group_count;
19121}
19122
19123/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19124 dispatch group boundaries that the scheduler had marked. Pad with nops
19125 any dispatch groups which have vacant issue slots, in order to force the
19126 scheduler's grouping on the processor dispatcher. The function
19127 returns the number of dispatch groups found. */
19128
19129static int
19130pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19131{
19132 rtx insn, next_insn;
19133 rtx nop;
19134 int issue_rate;
19135 int can_issue_more;
19136 int group_end;
19137 int group_count = 0;
19138
19139 /* Initialize issue_rate. */
19140 issue_rate = rs6000_issue_rate ();
19141 can_issue_more = issue_rate;
19142
19143 insn = get_next_active_insn (prev_head_insn, tail);
19144 next_insn = get_next_active_insn (insn, tail);
19145
19146 while (insn != NULL_RTX)
19147 {
19148 can_issue_more =
19149 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19150
19151 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
19152
19153 if (next_insn == NULL_RTX)
c4ad648e 19154 break;
cbe26ab8
DN
19155
19156 if (group_end)
c4ad648e
AM
19157 {
19158 /* If the scheduler had marked group termination at this location
19159 (between insn and next_indn), and neither insn nor next_insn will
19160 force group termination, pad the group with nops to force group
19161 termination. */
19162 if (can_issue_more
19163 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
19164 && !insn_terminates_group_p (insn, current_group)
19165 && !insn_terminates_group_p (next_insn, previous_group))
19166 {
9390387d 19167 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
19168 can_issue_more--;
19169
19170 while (can_issue_more)
19171 {
19172 nop = gen_nop ();
19173 emit_insn_before (nop, next_insn);
19174 can_issue_more--;
19175 }
19176 }
19177
19178 can_issue_more = issue_rate;
19179 group_count++;
19180 }
cbe26ab8
DN
19181
19182 insn = next_insn;
19183 next_insn = get_next_active_insn (insn, tail);
19184 }
19185
19186 return group_count;
19187}
19188
44cd321e
PS
19189/* We're beginning a new block. Initialize data structures as necessary. */
19190
19191static void
19192rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
19193 int sched_verbose ATTRIBUTE_UNUSED,
19194 int max_ready ATTRIBUTE_UNUSED)
982afe02 19195{
44cd321e
PS
19196 last_scheduled_insn = NULL_RTX;
19197 load_store_pendulum = 0;
19198}
19199
cbe26ab8
DN
19200/* The following function is called at the end of scheduling BB.
19201 After reload, it inserts nops at insn group bundling. */
19202
19203static void
38f391a5 19204rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
19205{
19206 int n_groups;
19207
19208 if (sched_verbose)
19209 fprintf (dump, "=== Finishing schedule.\n");
19210
ec507f2d 19211 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
19212 {
19213 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 19214 return;
cbe26ab8
DN
19215
19216 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
19217 n_groups = pad_groups (dump, sched_verbose,
19218 current_sched_info->prev_head,
19219 current_sched_info->next_tail);
cbe26ab8 19220 else
c4ad648e
AM
19221 n_groups = redefine_groups (dump, sched_verbose,
19222 current_sched_info->prev_head,
19223 current_sched_info->next_tail);
cbe26ab8
DN
19224
19225 if (sched_verbose >= 6)
19226 {
19227 fprintf (dump, "ngroups = %d\n", n_groups);
19228 print_rtl (dump, current_sched_info->prev_head);
19229 fprintf (dump, "Done finish_sched\n");
19230 }
19231 }
19232}
b6c9286a 19233\f
b6c9286a
MM
19234/* Length in units of the trampoline for entering a nested function. */
19235
19236int
863d938c 19237rs6000_trampoline_size (void)
b6c9286a
MM
19238{
19239 int ret = 0;
19240
19241 switch (DEFAULT_ABI)
19242 {
19243 default:
37409796 19244 gcc_unreachable ();
b6c9286a
MM
19245
19246 case ABI_AIX:
8f802bfb 19247 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
19248 break;
19249
4dabc42d 19250 case ABI_DARWIN:
b6c9286a 19251 case ABI_V4:
03a7e1a5 19252 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 19253 break;
b6c9286a
MM
19254 }
19255
19256 return ret;
19257}
19258
19259/* Emit RTL insns to initialize the variable parts of a trampoline.
19260 FNADDR is an RTX for the address of the function's pure code.
19261 CXT is an RTX for the static chain value for the function. */
19262
19263void
a2369ed3 19264rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 19265{
8bd04c56 19266 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 19267 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
19268
19269 switch (DEFAULT_ABI)
19270 {
19271 default:
37409796 19272 gcc_unreachable ();
b6c9286a 19273
8bd04c56 19274/* Macros to shorten the code expansions below. */
9613eaff 19275#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 19276#define MEM_PLUS(addr,offset) \
9613eaff 19277 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 19278
b6c9286a
MM
19279 /* Under AIX, just build the 3 word function descriptor */
19280 case ABI_AIX:
8bd04c56 19281 {
9613eaff
SH
19282 rtx fn_reg = gen_reg_rtx (Pmode);
19283 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 19284 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 19285 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
19286 emit_move_insn (MEM_DEREF (addr), fn_reg);
19287 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
19288 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
19289 }
b6c9286a
MM
19290 break;
19291
4dabc42d
TC
19292 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
19293 case ABI_DARWIN:
b6c9286a 19294 case ABI_V4:
9613eaff 19295 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 19296 FALSE, VOIDmode, 4,
9613eaff 19297 addr, Pmode,
eaf1bcf1 19298 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
19299 fnaddr, Pmode,
19300 ctx_reg, Pmode);
b6c9286a 19301 break;
b6c9286a
MM
19302 }
19303
19304 return;
19305}
7509c759
MM
19306
19307\f
91d231cb 19308/* Table of valid machine attributes. */
a4f6c312 19309
91d231cb 19310const struct attribute_spec rs6000_attribute_table[] =
7509c759 19311{
91d231cb 19312 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 19313 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
19314 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
19315 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
19316 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
19317 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
19318#ifdef SUBTARGET_ATTRIBUTE_TABLE
19319 SUBTARGET_ATTRIBUTE_TABLE,
19320#endif
a5c76ee6 19321 { NULL, 0, 0, false, false, false, NULL }
91d231cb 19322};
7509c759 19323
8bb418a3
ZL
19324/* Handle the "altivec" attribute. The attribute may have
19325 arguments as follows:
f676971a 19326
8bb418a3
ZL
19327 __attribute__((altivec(vector__)))
19328 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
19329 __attribute__((altivec(bool__))) (always followed by 'unsigned')
19330
19331 and may appear more than once (e.g., 'vector bool char') in a
19332 given declaration. */
19333
19334static tree
f90ac3f0
UP
19335rs6000_handle_altivec_attribute (tree *node,
19336 tree name ATTRIBUTE_UNUSED,
19337 tree args,
8bb418a3
ZL
19338 int flags ATTRIBUTE_UNUSED,
19339 bool *no_add_attrs)
19340{
19341 tree type = *node, result = NULL_TREE;
19342 enum machine_mode mode;
19343 int unsigned_p;
19344 char altivec_type
19345 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
19346 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
19347 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 19348 : '?');
8bb418a3
ZL
19349
19350 while (POINTER_TYPE_P (type)
19351 || TREE_CODE (type) == FUNCTION_TYPE
19352 || TREE_CODE (type) == METHOD_TYPE
19353 || TREE_CODE (type) == ARRAY_TYPE)
19354 type = TREE_TYPE (type);
19355
19356 mode = TYPE_MODE (type);
19357
f90ac3f0
UP
19358 /* Check for invalid AltiVec type qualifiers. */
19359 if (type == long_unsigned_type_node || type == long_integer_type_node)
19360 {
19361 if (TARGET_64BIT)
19362 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
19363 else if (rs6000_warn_altivec_long)
d4ee4d25 19364 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
19365 }
19366 else if (type == long_long_unsigned_type_node
19367 || type == long_long_integer_type_node)
19368 error ("use of %<long long%> in AltiVec types is invalid");
19369 else if (type == double_type_node)
19370 error ("use of %<double%> in AltiVec types is invalid");
19371 else if (type == long_double_type_node)
19372 error ("use of %<long double%> in AltiVec types is invalid");
19373 else if (type == boolean_type_node)
19374 error ("use of boolean types in AltiVec types is invalid");
19375 else if (TREE_CODE (type) == COMPLEX_TYPE)
19376 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
19377 else if (DECIMAL_FLOAT_MODE_P (mode))
19378 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
19379
19380 switch (altivec_type)
19381 {
19382 case 'v':
8df83eae 19383 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
19384 switch (mode)
19385 {
c4ad648e
AM
19386 case SImode:
19387 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
19388 break;
19389 case HImode:
19390 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
19391 break;
19392 case QImode:
19393 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
19394 break;
19395 case SFmode: result = V4SF_type_node; break;
19396 /* If the user says 'vector int bool', we may be handed the 'bool'
19397 attribute _before_ the 'vector' attribute, and so select the
19398 proper type in the 'b' case below. */
19399 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
19400 result = type;
19401 default: break;
8bb418a3
ZL
19402 }
19403 break;
19404 case 'b':
19405 switch (mode)
19406 {
c4ad648e
AM
19407 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
19408 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
19409 case QImode: case V16QImode: result = bool_V16QI_type_node;
19410 default: break;
8bb418a3
ZL
19411 }
19412 break;
19413 case 'p':
19414 switch (mode)
19415 {
c4ad648e
AM
19416 case V8HImode: result = pixel_V8HI_type_node;
19417 default: break;
8bb418a3
ZL
19418 }
19419 default: break;
19420 }
19421
7958a2a6
FJ
19422 if (result && result != type && TYPE_READONLY (type))
19423 result = build_qualified_type (result, TYPE_QUAL_CONST);
19424
8bb418a3
ZL
19425 *no_add_attrs = true; /* No need to hang on to the attribute. */
19426
f90ac3f0 19427 if (result)
8bb418a3
ZL
19428 *node = reconstruct_complex_type (*node, result);
19429
19430 return NULL_TREE;
19431}
19432
f18eca82
ZL
19433/* AltiVec defines four built-in scalar types that serve as vector
19434 elements; we must teach the compiler how to mangle them. */
19435
19436static const char *
3101faab 19437rs6000_mangle_type (const_tree type)
f18eca82 19438{
608063c3
JB
19439 type = TYPE_MAIN_VARIANT (type);
19440
19441 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19442 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19443 return NULL;
19444
f18eca82
ZL
19445 if (type == bool_char_type_node) return "U6__boolc";
19446 if (type == bool_short_type_node) return "U6__bools";
19447 if (type == pixel_type_node) return "u7__pixel";
19448 if (type == bool_int_type_node) return "U6__booli";
19449
337bde91
DE
19450 /* Mangle IBM extended float long double as `g' (__float128) on
19451 powerpc*-linux where long-double-64 previously was the default. */
19452 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19453 && TARGET_ELF
19454 && TARGET_LONG_DOUBLE_128
19455 && !TARGET_IEEEQUAD)
19456 return "g";
19457
f18eca82
ZL
19458 /* For all other types, use normal C++ mangling. */
19459 return NULL;
19460}
19461
a5c76ee6
ZW
19462/* Handle a "longcall" or "shortcall" attribute; arguments as in
19463 struct attribute_spec.handler. */
a4f6c312 19464
91d231cb 19465static tree
f676971a
EC
19466rs6000_handle_longcall_attribute (tree *node, tree name,
19467 tree args ATTRIBUTE_UNUSED,
19468 int flags ATTRIBUTE_UNUSED,
a2369ed3 19469 bool *no_add_attrs)
91d231cb
JM
19470{
19471 if (TREE_CODE (*node) != FUNCTION_TYPE
19472 && TREE_CODE (*node) != FIELD_DECL
19473 && TREE_CODE (*node) != TYPE_DECL)
19474 {
5c498b10 19475 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19476 IDENTIFIER_POINTER (name));
19477 *no_add_attrs = true;
19478 }
6a4cee5f 19479
91d231cb 19480 return NULL_TREE;
7509c759
MM
19481}
19482
a5c76ee6
ZW
19483/* Set longcall attributes on all functions declared when
19484 rs6000_default_long_calls is true. */
19485static void
a2369ed3 19486rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19487{
19488 if (rs6000_default_long_calls
19489 && (TREE_CODE (type) == FUNCTION_TYPE
19490 || TREE_CODE (type) == METHOD_TYPE))
19491 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19492 NULL_TREE,
19493 TYPE_ATTRIBUTES (type));
16d6f994
EC
19494
19495#if TARGET_MACHO
19496 darwin_set_default_type_attributes (type);
19497#endif
a5c76ee6
ZW
19498}
19499
3cb999d8
DE
19500/* Return a reference suitable for calling a function with the
19501 longcall attribute. */
a4f6c312 19502
9390387d 19503rtx
a2369ed3 19504rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19505{
d330fd93 19506 const char *call_name;
6a4cee5f
MM
19507 tree node;
19508
19509 if (GET_CODE (call_ref) != SYMBOL_REF)
19510 return call_ref;
19511
19512 /* System V adds '.' to the internal name, so skip them. */
19513 call_name = XSTR (call_ref, 0);
19514 if (*call_name == '.')
19515 {
19516 while (*call_name == '.')
19517 call_name++;
19518
19519 node = get_identifier (call_name);
39403d82 19520 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19521 }
19522
19523 return force_reg (Pmode, call_ref);
19524}
7509c759 19525\f
77ccdfed
EC
19526#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19527#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19528#endif
19529
19530/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19531 struct attribute_spec.handler. */
19532static tree
19533rs6000_handle_struct_attribute (tree *node, tree name,
19534 tree args ATTRIBUTE_UNUSED,
19535 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19536{
19537 tree *type = NULL;
19538 if (DECL_P (*node))
19539 {
19540 if (TREE_CODE (*node) == TYPE_DECL)
19541 type = &TREE_TYPE (*node);
19542 }
19543 else
19544 type = node;
19545
19546 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19547 || TREE_CODE (*type) == UNION_TYPE)))
19548 {
19549 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19550 *no_add_attrs = true;
19551 }
19552
19553 else if ((is_attribute_p ("ms_struct", name)
19554 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19555 || ((is_attribute_p ("gcc_struct", name)
19556 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19557 {
19558 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19559 IDENTIFIER_POINTER (name));
19560 *no_add_attrs = true;
19561 }
19562
19563 return NULL_TREE;
19564}
19565
19566static bool
3101faab 19567rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
19568{
19569 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19570 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19571 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19572}
19573\f
b64a1b53
RH
19574#ifdef USING_ELFOS_H
19575
d6b5193b 19576/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19577
d6b5193b
RS
19578static void
19579rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19580{
19581 if (DEFAULT_ABI == ABI_AIX
19582 && TARGET_MINIMAL_TOC
19583 && !TARGET_RELOCATABLE)
19584 {
19585 if (!toc_initialized)
19586 {
19587 toc_initialized = 1;
19588 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19589 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19590 fprintf (asm_out_file, "\t.tc ");
19591 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19592 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19593 fprintf (asm_out_file, "\n");
19594
19595 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19596 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19597 fprintf (asm_out_file, " = .+32768\n");
19598 }
19599 else
19600 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19601 }
19602 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19603 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19604 else
19605 {
19606 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19607 if (!toc_initialized)
19608 {
19609 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19610 fprintf (asm_out_file, " = .+32768\n");
19611 toc_initialized = 1;
19612 }
19613 }
19614}
19615
19616/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19617
b64a1b53 19618static void
d6b5193b
RS
19619rs6000_elf_asm_init_sections (void)
19620{
19621 toc_section
19622 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19623
19624 sdata2_section
19625 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19626 SDATA2_SECTION_ASM_OP);
19627}
19628
19629/* Implement TARGET_SELECT_RTX_SECTION. */
19630
19631static section *
f676971a 19632rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19633 unsigned HOST_WIDE_INT align)
7509c759 19634{
a9098fd0 19635 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19636 return toc_section;
7509c759 19637 else
d6b5193b 19638 return default_elf_select_rtx_section (mode, x, align);
7509c759 19639}
d9407988 19640\f
d1908feb
JJ
19641/* For a SYMBOL_REF, set generic flags and then perform some
19642 target-specific processing.
19643
d1908feb
JJ
19644 When the AIX ABI is requested on a non-AIX system, replace the
19645 function name with the real name (with a leading .) rather than the
19646 function descriptor name. This saves a lot of overriding code to
19647 read the prefixes. */
d9407988 19648
fb49053f 19649static void
a2369ed3 19650rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19651{
d1908feb 19652 default_encode_section_info (decl, rtl, first);
b2003250 19653
d1908feb
JJ
19654 if (first
19655 && TREE_CODE (decl) == FUNCTION_DECL
19656 && !TARGET_AIX
19657 && DEFAULT_ABI == ABI_AIX)
d9407988 19658 {
c6a2438a 19659 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19660 size_t len = strlen (XSTR (sym_ref, 0));
19661 char *str = alloca (len + 2);
19662 str[0] = '.';
19663 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19664 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19665 }
d9407988
MM
19666}
19667
21d9bb3f
PB
19668static inline bool
19669compare_section_name (const char *section, const char *template)
19670{
19671 int len;
19672
19673 len = strlen (template);
19674 return (strncmp (section, template, len) == 0
19675 && (section[len] == 0 || section[len] == '.'));
19676}
19677
c1b7d95a 19678bool
3101faab 19679rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
19680{
19681 if (rs6000_sdata == SDATA_NONE)
19682 return false;
19683
7482ad25
AF
19684 /* We want to merge strings, so we never consider them small data. */
19685 if (TREE_CODE (decl) == STRING_CST)
19686 return false;
19687
19688 /* Functions are never in the small data area. */
19689 if (TREE_CODE (decl) == FUNCTION_DECL)
19690 return false;
19691
0e5dbd9b
DE
19692 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
19693 {
19694 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
19695 if (compare_section_name (section, ".sdata")
19696 || compare_section_name (section, ".sdata2")
19697 || compare_section_name (section, ".gnu.linkonce.s")
19698 || compare_section_name (section, ".sbss")
19699 || compare_section_name (section, ".sbss2")
19700 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
19701 || strcmp (section, ".PPC.EMB.sdata0") == 0
19702 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19703 return true;
19704 }
19705 else
19706 {
19707 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19708
19709 if (size > 0
307b599c 19710 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19711 /* If it's not public, and we're not going to reference it there,
19712 there's no need to put it in the small data section. */
0e5dbd9b
DE
19713 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19714 return true;
19715 }
19716
19717 return false;
19718}
19719
b91da81f 19720#endif /* USING_ELFOS_H */
aacd3885
RS
19721\f
19722/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19723
aacd3885 19724static bool
3101faab 19725rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
19726{
19727 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19728}
a6c2a102 19729\f
000034eb 19730/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19731 ADDR can be effectively incremented by incrementing REG.
19732
19733 r0 is special and we must not select it as an address
19734 register by this routine since our caller will try to
19735 increment the returned register via an "la" instruction. */
000034eb 19736
9390387d 19737rtx
a2369ed3 19738find_addr_reg (rtx addr)
000034eb
DE
19739{
19740 while (GET_CODE (addr) == PLUS)
19741 {
02441cd6
JL
19742 if (GET_CODE (XEXP (addr, 0)) == REG
19743 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19744 addr = XEXP (addr, 0);
02441cd6
JL
19745 else if (GET_CODE (XEXP (addr, 1)) == REG
19746 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19747 addr = XEXP (addr, 1);
19748 else if (CONSTANT_P (XEXP (addr, 0)))
19749 addr = XEXP (addr, 1);
19750 else if (CONSTANT_P (XEXP (addr, 1)))
19751 addr = XEXP (addr, 0);
19752 else
37409796 19753 gcc_unreachable ();
000034eb 19754 }
37409796
NS
19755 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19756 return addr;
000034eb
DE
19757}
19758
a6c2a102 19759void
a2369ed3 19760rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19761{
19762 fatal_insn ("bad address", op);
19763}
c8023011 19764
ee890fe2
SS
19765#if TARGET_MACHO
19766
efdba735 19767static tree branch_island_list = 0;
ee890fe2 19768
efdba735
SH
19769/* Remember to generate a branch island for far calls to the given
19770 function. */
ee890fe2 19771
f676971a 19772static void
c4ad648e
AM
19773add_compiler_branch_island (tree label_name, tree function_name,
19774 int line_number)
ee890fe2 19775{
efdba735 19776 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19777 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19778 TREE_CHAIN (branch_island) = branch_island_list;
19779 branch_island_list = branch_island;
ee890fe2
SS
19780}
19781
efdba735
SH
19782#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19783#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19784#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19785 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19786
efdba735
SH
19787/* Generate far-jump branch islands for everything on the
19788 branch_island_list. Invoked immediately after the last instruction
19789 of the epilogue has been emitted; the branch-islands must be
19790 appended to, and contiguous with, the function body. Mach-O stubs
19791 are generated in machopic_output_stub(). */
ee890fe2 19792
efdba735
SH
19793static void
19794macho_branch_islands (void)
19795{
19796 char tmp_buf[512];
19797 tree branch_island;
19798
19799 for (branch_island = branch_island_list;
19800 branch_island;
19801 branch_island = TREE_CHAIN (branch_island))
19802 {
19803 const char *label =
19804 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19805 const char *name =
11abc112 19806 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19807 char name_buf[512];
19808 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19809 if (name[0] == '*' || name[0] == '&')
19810 strcpy (name_buf, name+1);
19811 else
19812 {
19813 name_buf[0] = '_';
19814 strcpy (name_buf+1, name);
19815 }
19816 strcpy (tmp_buf, "\n");
19817 strcat (tmp_buf, label);
ee890fe2 19818#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19819 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19820 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19821#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19822 if (flag_pic)
19823 {
19824 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19825 strcat (tmp_buf, label);
19826 strcat (tmp_buf, "_pic\n");
19827 strcat (tmp_buf, label);
19828 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19829
efdba735
SH
19830 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19831 strcat (tmp_buf, name_buf);
19832 strcat (tmp_buf, " - ");
19833 strcat (tmp_buf, label);
19834 strcat (tmp_buf, "_pic)\n");
f676971a 19835
efdba735 19836 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19837
efdba735
SH
19838 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19839 strcat (tmp_buf, name_buf);
19840 strcat (tmp_buf, " - ");
19841 strcat (tmp_buf, label);
19842 strcat (tmp_buf, "_pic)\n");
f676971a 19843
efdba735
SH
19844 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19845 }
19846 else
19847 {
19848 strcat (tmp_buf, ":\nlis r12,hi16(");
19849 strcat (tmp_buf, name_buf);
19850 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19851 strcat (tmp_buf, name_buf);
19852 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19853 }
19854 output_asm_insn (tmp_buf, 0);
ee890fe2 19855#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19856 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19857 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19858#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19859 }
ee890fe2 19860
efdba735 19861 branch_island_list = 0;
ee890fe2
SS
19862}
19863
19864/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19865 already there or not. */
19866
efdba735 19867static int
a2369ed3 19868no_previous_def (tree function_name)
ee890fe2 19869{
efdba735
SH
19870 tree branch_island;
19871 for (branch_island = branch_island_list;
19872 branch_island;
19873 branch_island = TREE_CHAIN (branch_island))
19874 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
19875 return 0;
19876 return 1;
19877}
19878
19879/* GET_PREV_LABEL gets the label name from the previous definition of
19880 the function. */
19881
efdba735 19882static tree
a2369ed3 19883get_prev_label (tree function_name)
ee890fe2 19884{
efdba735
SH
19885 tree branch_island;
19886 for (branch_island = branch_island_list;
19887 branch_island;
19888 branch_island = TREE_CHAIN (branch_island))
19889 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
19890 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
19891 return 0;
19892}
19893
75b1b789
MS
19894#ifndef DARWIN_LINKER_GENERATES_ISLANDS
19895#define DARWIN_LINKER_GENERATES_ISLANDS 0
19896#endif
19897
19898/* KEXTs still need branch islands. */
19899#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
19900 || flag_mkernel || flag_apple_kext)
19901
ee890fe2 19902/* INSN is either a function call or a millicode call. It may have an
f676971a 19903 unconditional jump in its delay slot.
ee890fe2
SS
19904
19905 CALL_DEST is the routine we are calling. */
19906
19907char *
c4ad648e
AM
19908output_call (rtx insn, rtx *operands, int dest_operand_number,
19909 int cookie_operand_number)
ee890fe2
SS
19910{
19911 static char buf[256];
75b1b789
MS
19912 if (DARWIN_GENERATE_ISLANDS
19913 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 19914 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
19915 {
19916 tree labelname;
efdba735 19917 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 19918
ee890fe2
SS
19919 if (no_previous_def (funname))
19920 {
ee890fe2
SS
19921 rtx label_rtx = gen_label_rtx ();
19922 char *label_buf, temp_buf[256];
19923 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
19924 CODE_LABEL_NUMBER (label_rtx));
19925 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
19926 labelname = get_identifier (label_buf);
a38e7aa5 19927 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
19928 }
19929 else
19930 labelname = get_prev_label (funname);
19931
efdba735
SH
19932 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
19933 instruction will reach 'foo', otherwise link as 'bl L42'".
19934 "L42" should be a 'branch island', that will do a far jump to
19935 'foo'. Branch islands are generated in
19936 macho_branch_islands(). */
ee890fe2 19937 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 19938 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
19939 }
19940 else
efdba735
SH
19941 sprintf (buf, "bl %%z%d", dest_operand_number);
19942 return buf;
ee890fe2
SS
19943}
19944
ee890fe2
SS
19945/* Generate PIC and indirect symbol stubs. */
19946
19947void
a2369ed3 19948machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
19949{
19950 unsigned int length;
a4f6c312
SS
19951 char *symbol_name, *lazy_ptr_name;
19952 char *local_label_0;
ee890fe2
SS
19953 static int label = 0;
19954
df56a27f 19955 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 19956 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 19957
ee890fe2 19958
ee890fe2
SS
19959 length = strlen (symb);
19960 symbol_name = alloca (length + 32);
19961 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
19962
19963 lazy_ptr_name = alloca (length + 32);
19964 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
19965
ee890fe2 19966 if (flag_pic == 2)
56c779bc 19967 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 19968 else
56c779bc 19969 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
19970
19971 if (flag_pic == 2)
19972 {
d974312d
DJ
19973 fprintf (file, "\t.align 5\n");
19974
19975 fprintf (file, "%s:\n", stub);
19976 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19977
876455fa 19978 label++;
89da1f32 19979 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 19980 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 19981
ee890fe2
SS
19982 fprintf (file, "\tmflr r0\n");
19983 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
19984 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
19985 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
19986 lazy_ptr_name, local_label_0);
19987 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
19988 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
19989 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
19990 lazy_ptr_name, local_label_0);
19991 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
19992 fprintf (file, "\tbctr\n");
19993 }
19994 else
d974312d
DJ
19995 {
19996 fprintf (file, "\t.align 4\n");
19997
19998 fprintf (file, "%s:\n", stub);
19999 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20000
20001 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20002 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20003 (TARGET_64BIT ? "ldu" : "lwzu"),
20004 lazy_ptr_name);
d974312d
DJ
20005 fprintf (file, "\tmtctr r12\n");
20006 fprintf (file, "\tbctr\n");
20007 }
f676971a 20008
56c779bc 20009 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20010 fprintf (file, "%s:\n", lazy_ptr_name);
20011 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20012 fprintf (file, "%sdyld_stub_binding_helper\n",
20013 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20014}
20015
20016/* Legitimize PIC addresses. If the address is already
20017 position-independent, we return ORIG. Newly generated
20018 position-independent addresses go into a reg. This is REG if non
20019 zero, otherwise we allocate register(s) as necessary. */
20020
4fbbe694 20021#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20022
20023rtx
f676971a 20024rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20025 rtx reg)
ee890fe2
SS
20026{
20027 rtx base, offset;
20028
20029 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20030 reg = gen_reg_rtx (Pmode);
20031
20032 if (GET_CODE (orig) == CONST)
20033 {
37409796
NS
20034 rtx reg_temp;
20035
ee890fe2
SS
20036 if (GET_CODE (XEXP (orig, 0)) == PLUS
20037 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20038 return orig;
20039
37409796 20040 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20041
37409796
NS
20042 /* Use a different reg for the intermediate value, as
20043 it will be marked UNCHANGING. */
b3a13419 20044 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20045 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20046 Pmode, reg_temp);
20047 offset =
20048 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20049 Pmode, reg);
bb8df8a6 20050
ee890fe2
SS
20051 if (GET_CODE (offset) == CONST_INT)
20052 {
20053 if (SMALL_INT (offset))
ed8908e7 20054 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20055 else if (! reload_in_progress && ! reload_completed)
20056 offset = force_reg (Pmode, offset);
20057 else
c859cda6
DJ
20058 {
20059 rtx mem = force_const_mem (Pmode, orig);
20060 return machopic_legitimize_pic_address (mem, Pmode, reg);
20061 }
ee890fe2 20062 }
f1c25d3b 20063 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20064 }
20065
20066 /* Fall back on generic machopic code. */
20067 return machopic_legitimize_pic_address (orig, mode, reg);
20068}
20069
c4e18b1c
GK
20070/* Output a .machine directive for the Darwin assembler, and call
20071 the generic start_file routine. */
20072
20073static void
20074rs6000_darwin_file_start (void)
20075{
94ff898d 20076 static const struct
c4e18b1c
GK
20077 {
20078 const char *arg;
20079 const char *name;
20080 int if_set;
20081 } mapping[] = {
55dbfb48 20082 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20083 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20084 { "power4", "ppc970", 0 },
20085 { "G5", "ppc970", 0 },
20086 { "7450", "ppc7450", 0 },
20087 { "7400", "ppc7400", MASK_ALTIVEC },
20088 { "G4", "ppc7400", 0 },
20089 { "750", "ppc750", 0 },
20090 { "740", "ppc750", 0 },
20091 { "G3", "ppc750", 0 },
20092 { "604e", "ppc604e", 0 },
20093 { "604", "ppc604", 0 },
20094 { "603e", "ppc603", 0 },
20095 { "603", "ppc603", 0 },
20096 { "601", "ppc601", 0 },
20097 { NULL, "ppc", 0 } };
20098 const char *cpu_id = "";
20099 size_t i;
94ff898d 20100
9390387d 20101 rs6000_file_start ();
192d0f89 20102 darwin_file_start ();
c4e18b1c
GK
20103
20104 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20105 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20106 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20107 && rs6000_select[i].string[0] != '\0')
20108 cpu_id = rs6000_select[i].string;
20109
20110 /* Look through the mapping array. Pick the first name that either
20111 matches the argument, has a bit set in IF_SET that is also set
20112 in the target flags, or has a NULL name. */
20113
20114 i = 0;
20115 while (mapping[i].arg != NULL
20116 && strcmp (mapping[i].arg, cpu_id) != 0
20117 && (mapping[i].if_set & target_flags) == 0)
20118 i++;
20119
20120 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20121}
20122
ee890fe2 20123#endif /* TARGET_MACHO */
7c262518
RH
20124
20125#if TARGET_ELF
9b580a0b
RH
20126static int
20127rs6000_elf_reloc_rw_mask (void)
7c262518 20128{
9b580a0b
RH
20129 if (flag_pic)
20130 return 3;
20131 else if (DEFAULT_ABI == ABI_AIX)
20132 return 2;
20133 else
20134 return 0;
7c262518 20135}
d9f6800d
RH
20136
20137/* Record an element in the table of global constructors. SYMBOL is
20138 a SYMBOL_REF of the function to be called; PRIORITY is a number
20139 between 0 and MAX_INIT_PRIORITY.
20140
20141 This differs from default_named_section_asm_out_constructor in
20142 that we have special handling for -mrelocatable. */
20143
20144static void
a2369ed3 20145rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20146{
20147 const char *section = ".ctors";
20148 char buf[16];
20149
20150 if (priority != DEFAULT_INIT_PRIORITY)
20151 {
20152 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
20153 /* Invert the numbering so the linker puts us in the proper
20154 order; constructors are run from right to left, and the
20155 linker sorts in increasing order. */
20156 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20157 section = buf;
20158 }
20159
d6b5193b 20160 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20161 assemble_align (POINTER_SIZE);
d9f6800d
RH
20162
20163 if (TARGET_RELOCATABLE)
20164 {
20165 fputs ("\t.long (", asm_out_file);
20166 output_addr_const (asm_out_file, symbol);
20167 fputs (")@fixup\n", asm_out_file);
20168 }
20169 else
c8af3574 20170 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
20171}
20172
20173static void
a2369ed3 20174rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
20175{
20176 const char *section = ".dtors";
20177 char buf[16];
20178
20179 if (priority != DEFAULT_INIT_PRIORITY)
20180 {
20181 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
20182 /* Invert the numbering so the linker puts us in the proper
20183 order; constructors are run from right to left, and the
20184 linker sorts in increasing order. */
20185 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
20186 section = buf;
20187 }
20188
d6b5193b 20189 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 20190 assemble_align (POINTER_SIZE);
d9f6800d
RH
20191
20192 if (TARGET_RELOCATABLE)
20193 {
20194 fputs ("\t.long (", asm_out_file);
20195 output_addr_const (asm_out_file, symbol);
20196 fputs (")@fixup\n", asm_out_file);
20197 }
20198 else
c8af3574 20199 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 20200}
9739c90c
JJ
20201
20202void
a2369ed3 20203rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
20204{
20205 if (TARGET_64BIT)
20206 {
20207 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
20208 ASM_OUTPUT_LABEL (file, name);
20209 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
20210 rs6000_output_function_entry (file, name);
20211 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
20212 if (DOT_SYMBOLS)
9739c90c 20213 {
85b776df 20214 fputs ("\t.size\t", file);
9739c90c 20215 assemble_name (file, name);
85b776df
AM
20216 fputs (",24\n\t.type\t.", file);
20217 assemble_name (file, name);
20218 fputs (",@function\n", file);
20219 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
20220 {
20221 fputs ("\t.globl\t.", file);
20222 assemble_name (file, name);
20223 putc ('\n', file);
20224 }
9739c90c 20225 }
85b776df
AM
20226 else
20227 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 20228 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
20229 rs6000_output_function_entry (file, name);
20230 fputs (":\n", file);
9739c90c
JJ
20231 return;
20232 }
20233
20234 if (TARGET_RELOCATABLE
7f970b70 20235 && !TARGET_SECURE_PLT
9739c90c 20236 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 20237 && uses_TOC ())
9739c90c
JJ
20238 {
20239 char buf[256];
20240
20241 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
20242
20243 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
20244 fprintf (file, "\t.long ");
20245 assemble_name (file, buf);
20246 putc ('-', file);
20247 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
20248 assemble_name (file, buf);
20249 putc ('\n', file);
20250 }
20251
20252 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
20253 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
20254
20255 if (DEFAULT_ABI == ABI_AIX)
20256 {
20257 const char *desc_name, *orig_name;
20258
20259 orig_name = (*targetm.strip_name_encoding) (name);
20260 desc_name = orig_name;
20261 while (*desc_name == '.')
20262 desc_name++;
20263
20264 if (TREE_PUBLIC (decl))
20265 fprintf (file, "\t.globl %s\n", desc_name);
20266
20267 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20268 fprintf (file, "%s:\n", desc_name);
20269 fprintf (file, "\t.long %s\n", orig_name);
20270 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
20271 if (DEFAULT_ABI == ABI_AIX)
20272 fputs ("\t.long 0\n", file);
20273 fprintf (file, "\t.previous\n");
20274 }
20275 ASM_OUTPUT_LABEL (file, name);
20276}
1334b570
AM
20277
20278static void
20279rs6000_elf_end_indicate_exec_stack (void)
20280{
20281 if (TARGET_32BIT)
20282 file_end_indicate_exec_stack ();
20283}
7c262518
RH
20284#endif
20285
cbaaba19 20286#if TARGET_XCOFF
0d5817b2
DE
20287static void
20288rs6000_xcoff_asm_output_anchor (rtx symbol)
20289{
20290 char buffer[100];
20291
20292 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
20293 SYMBOL_REF_BLOCK_OFFSET (symbol));
20294 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
20295}
20296
7c262518 20297static void
a2369ed3 20298rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
20299{
20300 fputs (GLOBAL_ASM_OP, stream);
20301 RS6000_OUTPUT_BASENAME (stream, name);
20302 putc ('\n', stream);
20303}
20304
d6b5193b
RS
20305/* A get_unnamed_decl callback, used for read-only sections. PTR
20306 points to the section string variable. */
20307
20308static void
20309rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
20310{
890f9edf
OH
20311 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
20312 *(const char *const *) directive,
20313 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20314}
20315
20316/* Likewise for read-write sections. */
20317
20318static void
20319rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
20320{
890f9edf
OH
20321 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
20322 *(const char *const *) directive,
20323 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
20324}
20325
20326/* A get_unnamed_section callback, used for switching to toc_section. */
20327
20328static void
20329rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20330{
20331 if (TARGET_MINIMAL_TOC)
20332 {
20333 /* toc_section is always selected at least once from
20334 rs6000_xcoff_file_start, so this is guaranteed to
20335 always be defined once and only once in each file. */
20336 if (!toc_initialized)
20337 {
20338 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
20339 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
20340 toc_initialized = 1;
20341 }
20342 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
20343 (TARGET_32BIT ? "" : ",3"));
20344 }
20345 else
20346 fputs ("\t.toc\n", asm_out_file);
20347}
20348
20349/* Implement TARGET_ASM_INIT_SECTIONS. */
20350
20351static void
20352rs6000_xcoff_asm_init_sections (void)
20353{
20354 read_only_data_section
20355 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20356 &xcoff_read_only_section_name);
20357
20358 private_data_section
20359 = get_unnamed_section (SECTION_WRITE,
20360 rs6000_xcoff_output_readwrite_section_asm_op,
20361 &xcoff_private_data_section_name);
20362
20363 read_only_private_data_section
20364 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
20365 &xcoff_private_data_section_name);
20366
20367 toc_section
20368 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
20369
20370 readonly_data_section = read_only_data_section;
20371 exception_section = data_section;
20372}
20373
9b580a0b
RH
20374static int
20375rs6000_xcoff_reloc_rw_mask (void)
20376{
20377 return 3;
20378}
20379
b275d088 20380static void
c18a5b6c
MM
20381rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
20382 tree decl ATTRIBUTE_UNUSED)
7c262518 20383{
0e5dbd9b
DE
20384 int smclass;
20385 static const char * const suffix[3] = { "PR", "RO", "RW" };
20386
20387 if (flags & SECTION_CODE)
20388 smclass = 0;
20389 else if (flags & SECTION_WRITE)
20390 smclass = 2;
20391 else
20392 smclass = 1;
20393
5b5198f7 20394 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 20395 (flags & SECTION_CODE) ? "." : "",
5b5198f7 20396 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 20397}
ae46c4e0 20398
d6b5193b 20399static section *
f676971a 20400rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 20401 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 20402{
9b580a0b 20403 if (decl_readonly_section (decl, reloc))
ae46c4e0 20404 {
0e5dbd9b 20405 if (TREE_PUBLIC (decl))
d6b5193b 20406 return read_only_data_section;
ae46c4e0 20407 else
d6b5193b 20408 return read_only_private_data_section;
ae46c4e0
RH
20409 }
20410 else
20411 {
0e5dbd9b 20412 if (TREE_PUBLIC (decl))
d6b5193b 20413 return data_section;
ae46c4e0 20414 else
d6b5193b 20415 return private_data_section;
ae46c4e0
RH
20416 }
20417}
20418
20419static void
a2369ed3 20420rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
20421{
20422 const char *name;
ae46c4e0 20423
5b5198f7
DE
20424 /* Use select_section for private and uninitialized data. */
20425 if (!TREE_PUBLIC (decl)
20426 || DECL_COMMON (decl)
0e5dbd9b
DE
20427 || DECL_INITIAL (decl) == NULL_TREE
20428 || DECL_INITIAL (decl) == error_mark_node
20429 || (flag_zero_initialized_in_bss
20430 && initializer_zerop (DECL_INITIAL (decl))))
20431 return;
20432
20433 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
20434 name = (*targetm.strip_name_encoding) (name);
20435 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 20436}
b64a1b53 20437
fb49053f
RH
20438/* Select section for constant in constant pool.
20439
20440 On RS/6000, all constants are in the private read-only data area.
20441 However, if this is being placed in the TOC it must be output as a
20442 toc entry. */
20443
d6b5193b 20444static section *
f676971a 20445rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20446 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20447{
20448 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20449 return toc_section;
b64a1b53 20450 else
d6b5193b 20451 return read_only_private_data_section;
b64a1b53 20452}
772c5265
RH
20453
20454/* Remove any trailing [DS] or the like from the symbol name. */
20455
20456static const char *
a2369ed3 20457rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20458{
20459 size_t len;
20460 if (*name == '*')
20461 name++;
20462 len = strlen (name);
20463 if (name[len - 1] == ']')
20464 return ggc_alloc_string (name, len - 4);
20465 else
20466 return name;
20467}
20468
5add3202
DE
20469/* Section attributes. AIX is always PIC. */
20470
20471static unsigned int
a2369ed3 20472rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20473{
5b5198f7 20474 unsigned int align;
9b580a0b 20475 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20476
20477 /* Align to at least UNIT size. */
20478 if (flags & SECTION_CODE)
20479 align = MIN_UNITS_PER_WORD;
20480 else
20481 /* Increase alignment of large objects if not already stricter. */
20482 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20483 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20484 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20485
20486 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20487}
a5fe455b 20488
1bc7c5b6
ZW
20489/* Output at beginning of assembler file.
20490
20491 Initialize the section names for the RS/6000 at this point.
20492
20493 Specify filename, including full path, to assembler.
20494
20495 We want to go into the TOC section so at least one .toc will be emitted.
20496 Also, in order to output proper .bs/.es pairs, we need at least one static
20497 [RW] section emitted.
20498
20499 Finally, declare mcount when profiling to make the assembler happy. */
20500
20501static void
863d938c 20502rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20503{
20504 rs6000_gen_section_name (&xcoff_bss_section_name,
20505 main_input_filename, ".bss_");
20506 rs6000_gen_section_name (&xcoff_private_data_section_name,
20507 main_input_filename, ".rw_");
20508 rs6000_gen_section_name (&xcoff_read_only_section_name,
20509 main_input_filename, ".ro_");
20510
20511 fputs ("\t.file\t", asm_out_file);
20512 output_quoted_string (asm_out_file, main_input_filename);
20513 fputc ('\n', asm_out_file);
1bc7c5b6 20514 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20515 switch_to_section (private_data_section);
20516 switch_to_section (text_section);
1bc7c5b6
ZW
20517 if (profile_flag)
20518 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20519 rs6000_file_start ();
20520}
20521
a5fe455b
ZW
20522/* Output at end of assembler file.
20523 On the RS/6000, referencing data should automatically pull in text. */
20524
20525static void
863d938c 20526rs6000_xcoff_file_end (void)
a5fe455b 20527{
d6b5193b 20528 switch_to_section (text_section);
a5fe455b 20529 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20530 switch_to_section (data_section);
a5fe455b
ZW
20531 fputs (TARGET_32BIT
20532 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20533 asm_out_file);
20534}
f1384257 20535#endif /* TARGET_XCOFF */
0e5dbd9b 20536
3c50106f
RH
20537/* Compute a (partial) cost for rtx X. Return true if the complete
20538 cost has been computed, and false if subexpressions should be
20539 scanned. In either case, *TOTAL contains the cost result. */
20540
20541static bool
1494c534 20542rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20543{
f0517163
RS
20544 enum machine_mode mode = GET_MODE (x);
20545
3c50106f
RH
20546 switch (code)
20547 {
30a555d9 20548 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20549 case CONST_INT:
066cd967
DE
20550 if (((outer_code == SET
20551 || outer_code == PLUS
20552 || outer_code == MINUS)
279bb624
DE
20553 && (satisfies_constraint_I (x)
20554 || satisfies_constraint_L (x)))
066cd967 20555 || (outer_code == AND
279bb624
DE
20556 && (satisfies_constraint_K (x)
20557 || (mode == SImode
20558 ? satisfies_constraint_L (x)
20559 : satisfies_constraint_J (x))
1990cd79
AM
20560 || mask_operand (x, mode)
20561 || (mode == DImode
20562 && mask64_operand (x, DImode))))
22e54023 20563 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20564 && (satisfies_constraint_K (x)
20565 || (mode == SImode
20566 ? satisfies_constraint_L (x)
20567 : satisfies_constraint_J (x))))
066cd967
DE
20568 || outer_code == ASHIFT
20569 || outer_code == ASHIFTRT
20570 || outer_code == LSHIFTRT
20571 || outer_code == ROTATE
20572 || outer_code == ROTATERT
d5861a7a 20573 || outer_code == ZERO_EXTRACT
066cd967 20574 || (outer_code == MULT
279bb624 20575 && satisfies_constraint_I (x))
22e54023
DE
20576 || ((outer_code == DIV || outer_code == UDIV
20577 || outer_code == MOD || outer_code == UMOD)
20578 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20579 || (outer_code == COMPARE
279bb624
DE
20580 && (satisfies_constraint_I (x)
20581 || satisfies_constraint_K (x)))
22e54023 20582 || (outer_code == EQ
279bb624
DE
20583 && (satisfies_constraint_I (x)
20584 || satisfies_constraint_K (x)
20585 || (mode == SImode
20586 ? satisfies_constraint_L (x)
20587 : satisfies_constraint_J (x))))
22e54023 20588 || (outer_code == GTU
279bb624 20589 && satisfies_constraint_I (x))
22e54023 20590 || (outer_code == LTU
279bb624 20591 && satisfies_constraint_P (x)))
066cd967
DE
20592 {
20593 *total = 0;
20594 return true;
20595 }
20596 else if ((outer_code == PLUS
4ae234b0 20597 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20598 || (outer_code == MINUS
4ae234b0 20599 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20600 || ((outer_code == SET
20601 || outer_code == IOR
20602 || outer_code == XOR)
20603 && (INTVAL (x)
20604 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20605 {
20606 *total = COSTS_N_INSNS (1);
20607 return true;
20608 }
20609 /* FALLTHRU */
20610
20611 case CONST_DOUBLE:
f6fe3a22 20612 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20613 {
f6fe3a22
DE
20614 if ((outer_code == IOR || outer_code == XOR)
20615 && CONST_DOUBLE_HIGH (x) == 0
20616 && (CONST_DOUBLE_LOW (x)
20617 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20618 {
20619 *total = 0;
20620 return true;
20621 }
20622 else if ((outer_code == AND && and64_2_operand (x, DImode))
20623 || ((outer_code == SET
20624 || outer_code == IOR
20625 || outer_code == XOR)
20626 && CONST_DOUBLE_HIGH (x) == 0))
20627 {
20628 *total = COSTS_N_INSNS (1);
20629 return true;
20630 }
066cd967
DE
20631 }
20632 /* FALLTHRU */
20633
3c50106f 20634 case CONST:
066cd967 20635 case HIGH:
3c50106f 20636 case SYMBOL_REF:
066cd967
DE
20637 case MEM:
20638 /* When optimizing for size, MEM should be slightly more expensive
20639 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20640 L1 cache latency is about two instructions. */
066cd967 20641 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20642 return true;
20643
30a555d9
DE
20644 case LABEL_REF:
20645 *total = 0;
20646 return true;
20647
3c50106f 20648 case PLUS:
f0517163 20649 if (mode == DFmode)
066cd967
DE
20650 {
20651 if (GET_CODE (XEXP (x, 0)) == MULT)
20652 {
20653 /* FNMA accounted in outer NEG. */
20654 if (outer_code == NEG)
20655 *total = rs6000_cost->dmul - rs6000_cost->fp;
20656 else
20657 *total = rs6000_cost->dmul;
20658 }
20659 else
20660 *total = rs6000_cost->fp;
20661 }
f0517163 20662 else if (mode == SFmode)
066cd967
DE
20663 {
20664 /* FNMA accounted in outer NEG. */
20665 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20666 *total = 0;
20667 else
20668 *total = rs6000_cost->fp;
20669 }
f0517163 20670 else
066cd967
DE
20671 *total = COSTS_N_INSNS (1);
20672 return false;
3c50106f 20673
52190329 20674 case MINUS:
f0517163 20675 if (mode == DFmode)
066cd967 20676 {
762c919f
JM
20677 if (GET_CODE (XEXP (x, 0)) == MULT
20678 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20679 {
20680 /* FNMA accounted in outer NEG. */
20681 if (outer_code == NEG)
762c919f 20682 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20683 else
20684 *total = rs6000_cost->dmul;
20685 }
20686 else
20687 *total = rs6000_cost->fp;
20688 }
f0517163 20689 else if (mode == SFmode)
066cd967
DE
20690 {
20691 /* FNMA accounted in outer NEG. */
20692 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20693 *total = 0;
20694 else
20695 *total = rs6000_cost->fp;
20696 }
f0517163 20697 else
c4ad648e 20698 *total = COSTS_N_INSNS (1);
066cd967 20699 return false;
3c50106f
RH
20700
20701 case MULT:
c9dbf840 20702 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 20703 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 20704 {
8b897cfa
RS
20705 if (INTVAL (XEXP (x, 1)) >= -256
20706 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20707 *total = rs6000_cost->mulsi_const9;
8b897cfa 20708 else
06a67bdd 20709 *total = rs6000_cost->mulsi_const;
3c50106f 20710 }
066cd967
DE
20711 /* FMA accounted in outer PLUS/MINUS. */
20712 else if ((mode == DFmode || mode == SFmode)
20713 && (outer_code == PLUS || outer_code == MINUS))
20714 *total = 0;
f0517163 20715 else if (mode == DFmode)
06a67bdd 20716 *total = rs6000_cost->dmul;
f0517163 20717 else if (mode == SFmode)
06a67bdd 20718 *total = rs6000_cost->fp;
f0517163 20719 else if (mode == DImode)
06a67bdd 20720 *total = rs6000_cost->muldi;
8b897cfa 20721 else
06a67bdd 20722 *total = rs6000_cost->mulsi;
066cd967 20723 return false;
3c50106f
RH
20724
20725 case DIV:
20726 case MOD:
f0517163
RS
20727 if (FLOAT_MODE_P (mode))
20728 {
06a67bdd
RS
20729 *total = mode == DFmode ? rs6000_cost->ddiv
20730 : rs6000_cost->sdiv;
066cd967 20731 return false;
f0517163 20732 }
5efb1046 20733 /* FALLTHRU */
3c50106f
RH
20734
20735 case UDIV:
20736 case UMOD:
627b6fe2
DJ
20737 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20738 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20739 {
20740 if (code == DIV || code == MOD)
20741 /* Shift, addze */
20742 *total = COSTS_N_INSNS (2);
20743 else
20744 /* Shift */
20745 *total = COSTS_N_INSNS (1);
20746 }
c4ad648e 20747 else
627b6fe2
DJ
20748 {
20749 if (GET_MODE (XEXP (x, 1)) == DImode)
20750 *total = rs6000_cost->divdi;
20751 else
20752 *total = rs6000_cost->divsi;
20753 }
20754 /* Add in shift and subtract for MOD. */
20755 if (code == MOD || code == UMOD)
20756 *total += COSTS_N_INSNS (2);
066cd967 20757 return false;
3c50106f 20758
32f56aad 20759 case CTZ:
3c50106f
RH
20760 case FFS:
20761 *total = COSTS_N_INSNS (4);
066cd967 20762 return false;
3c50106f 20763
32f56aad
DE
20764 case POPCOUNT:
20765 *total = COSTS_N_INSNS (6);
20766 return false;
20767
06a67bdd 20768 case NOT:
066cd967
DE
20769 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20770 {
20771 *total = 0;
20772 return false;
20773 }
20774 /* FALLTHRU */
20775
20776 case AND:
32f56aad 20777 case CLZ:
066cd967
DE
20778 case IOR:
20779 case XOR:
d5861a7a
DE
20780 case ZERO_EXTRACT:
20781 *total = COSTS_N_INSNS (1);
20782 return false;
20783
066cd967
DE
20784 case ASHIFT:
20785 case ASHIFTRT:
20786 case LSHIFTRT:
20787 case ROTATE:
20788 case ROTATERT:
d5861a7a 20789 /* Handle mul_highpart. */
066cd967
DE
20790 if (outer_code == TRUNCATE
20791 && GET_CODE (XEXP (x, 0)) == MULT)
20792 {
20793 if (mode == DImode)
20794 *total = rs6000_cost->muldi;
20795 else
20796 *total = rs6000_cost->mulsi;
20797 return true;
20798 }
d5861a7a
DE
20799 else if (outer_code == AND)
20800 *total = 0;
20801 else
20802 *total = COSTS_N_INSNS (1);
20803 return false;
20804
20805 case SIGN_EXTEND:
20806 case ZERO_EXTEND:
20807 if (GET_CODE (XEXP (x, 0)) == MEM)
20808 *total = 0;
20809 else
20810 *total = COSTS_N_INSNS (1);
066cd967 20811 return false;
06a67bdd 20812
066cd967
DE
20813 case COMPARE:
20814 case NEG:
20815 case ABS:
20816 if (!FLOAT_MODE_P (mode))
20817 {
20818 *total = COSTS_N_INSNS (1);
20819 return false;
20820 }
20821 /* FALLTHRU */
20822
20823 case FLOAT:
20824 case UNSIGNED_FLOAT:
20825 case FIX:
20826 case UNSIGNED_FIX:
06a67bdd
RS
20827 case FLOAT_TRUNCATE:
20828 *total = rs6000_cost->fp;
066cd967 20829 return false;
06a67bdd 20830
a2af5043
DJ
20831 case FLOAT_EXTEND:
20832 if (mode == DFmode)
20833 *total = 0;
20834 else
20835 *total = rs6000_cost->fp;
20836 return false;
20837
06a67bdd
RS
20838 case UNSPEC:
20839 switch (XINT (x, 1))
20840 {
20841 case UNSPEC_FRSP:
20842 *total = rs6000_cost->fp;
20843 return true;
20844
20845 default:
20846 break;
20847 }
20848 break;
20849
20850 case CALL:
20851 case IF_THEN_ELSE:
20852 if (optimize_size)
20853 {
20854 *total = COSTS_N_INSNS (1);
20855 return true;
20856 }
066cd967
DE
20857 else if (FLOAT_MODE_P (mode)
20858 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20859 {
20860 *total = rs6000_cost->fp;
20861 return false;
20862 }
06a67bdd
RS
20863 break;
20864
c0600ecd
DE
20865 case EQ:
20866 case GTU:
20867 case LTU:
22e54023
DE
20868 /* Carry bit requires mode == Pmode.
20869 NEG or PLUS already counted so only add one. */
20870 if (mode == Pmode
20871 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 20872 {
22e54023
DE
20873 *total = COSTS_N_INSNS (1);
20874 return true;
20875 }
20876 if (outer_code == SET)
20877 {
20878 if (XEXP (x, 1) == const0_rtx)
c0600ecd 20879 {
22e54023 20880 *total = COSTS_N_INSNS (2);
c0600ecd 20881 return true;
c0600ecd 20882 }
22e54023
DE
20883 else if (mode == Pmode)
20884 {
20885 *total = COSTS_N_INSNS (3);
20886 return false;
20887 }
20888 }
20889 /* FALLTHRU */
20890
20891 case GT:
20892 case LT:
20893 case UNORDERED:
20894 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
20895 {
20896 *total = COSTS_N_INSNS (2);
20897 return true;
c0600ecd 20898 }
22e54023
DE
20899 /* CC COMPARE. */
20900 if (outer_code == COMPARE)
20901 {
20902 *total = 0;
20903 return true;
20904 }
20905 break;
c0600ecd 20906
3c50106f 20907 default:
06a67bdd 20908 break;
3c50106f 20909 }
06a67bdd
RS
20910
20911 return false;
3c50106f
RH
20912}
20913
34bb030a
DE
20914/* A C expression returning the cost of moving data from a register of class
20915 CLASS1 to one of CLASS2. */
20916
20917int
f676971a 20918rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 20919 enum reg_class from, enum reg_class to)
34bb030a
DE
20920{
20921 /* Moves from/to GENERAL_REGS. */
20922 if (reg_classes_intersect_p (to, GENERAL_REGS)
20923 || reg_classes_intersect_p (from, GENERAL_REGS))
20924 {
20925 if (! reg_classes_intersect_p (to, GENERAL_REGS))
20926 from = to;
20927
20928 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
20929 return (rs6000_memory_move_cost (mode, from, 0)
20930 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
20931
c4ad648e
AM
20932 /* It's more expensive to move CR_REGS than CR0_REGS because of the
20933 shift. */
34bb030a
DE
20934 else if (from == CR_REGS)
20935 return 4;
20936
20937 else
c4ad648e 20938 /* A move will cost one instruction per GPR moved. */
c8b622ff 20939 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
20940 }
20941
c4ad648e 20942 /* Moving between two similar registers is just one instruction. */
34bb030a 20943 else if (reg_classes_intersect_p (to, from))
7393f7f8 20944 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 20945
c4ad648e 20946 /* Everything else has to go through GENERAL_REGS. */
34bb030a 20947 else
f676971a 20948 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
20949 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
20950}
20951
20952/* A C expressions returning the cost of moving data of MODE from a register to
20953 or from memory. */
20954
20955int
f676971a 20956rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 20957 int in ATTRIBUTE_UNUSED)
34bb030a
DE
20958{
20959 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 20960 return 4 * hard_regno_nregs[0][mode];
34bb030a 20961 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 20962 return 4 * hard_regno_nregs[32][mode];
34bb030a 20963 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 20964 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
20965 else
20966 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
20967}
20968
9c78b944
DE
20969/* Returns a code for a target-specific builtin that implements
20970 reciprocal of the function, or NULL_TREE if not available. */
20971
20972static tree
20973rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
20974 bool sqrt ATTRIBUTE_UNUSED)
20975{
20976 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
20977 && flag_finite_math_only && !flag_trapping_math
20978 && flag_unsafe_math_optimizations))
20979 return NULL_TREE;
20980
20981 if (md_fn)
20982 return NULL_TREE;
20983 else
20984 switch (fn)
20985 {
20986 case BUILT_IN_SQRTF:
20987 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
20988
20989 default:
20990 return NULL_TREE;
20991 }
20992}
20993
ef765ea9
DE
20994/* Newton-Raphson approximation of single-precision floating point divide n/d.
20995 Assumes no trapping math and finite arguments. */
20996
20997void
9c78b944 20998rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
20999{
21000 rtx x0, e0, e1, y1, u0, v0, one;
21001
21002 x0 = gen_reg_rtx (SFmode);
21003 e0 = gen_reg_rtx (SFmode);
21004 e1 = gen_reg_rtx (SFmode);
21005 y1 = gen_reg_rtx (SFmode);
21006 u0 = gen_reg_rtx (SFmode);
21007 v0 = gen_reg_rtx (SFmode);
21008 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21009
21010 /* x0 = 1./d estimate */
21011 emit_insn (gen_rtx_SET (VOIDmode, x0,
21012 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21013 UNSPEC_FRES)));
21014 /* e0 = 1. - d * x0 */
21015 emit_insn (gen_rtx_SET (VOIDmode, e0,
21016 gen_rtx_MINUS (SFmode, one,
21017 gen_rtx_MULT (SFmode, d, x0))));
21018 /* e1 = e0 + e0 * e0 */
21019 emit_insn (gen_rtx_SET (VOIDmode, e1,
21020 gen_rtx_PLUS (SFmode,
21021 gen_rtx_MULT (SFmode, e0, e0), e0)));
21022 /* y1 = x0 + e1 * x0 */
21023 emit_insn (gen_rtx_SET (VOIDmode, y1,
21024 gen_rtx_PLUS (SFmode,
21025 gen_rtx_MULT (SFmode, e1, x0), x0)));
21026 /* u0 = n * y1 */
21027 emit_insn (gen_rtx_SET (VOIDmode, u0,
21028 gen_rtx_MULT (SFmode, n, y1)));
21029 /* v0 = n - d * u0 */
21030 emit_insn (gen_rtx_SET (VOIDmode, v0,
21031 gen_rtx_MINUS (SFmode, n,
21032 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21033 /* dst = u0 + v0 * y1 */
21034 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21035 gen_rtx_PLUS (SFmode,
21036 gen_rtx_MULT (SFmode, v0, y1), u0)));
21037}
21038
21039/* Newton-Raphson approximation of double-precision floating point divide n/d.
21040 Assumes no trapping math and finite arguments. */
21041
21042void
9c78b944 21043rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21044{
21045 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21046
21047 x0 = gen_reg_rtx (DFmode);
21048 e0 = gen_reg_rtx (DFmode);
21049 e1 = gen_reg_rtx (DFmode);
21050 e2 = gen_reg_rtx (DFmode);
21051 y1 = gen_reg_rtx (DFmode);
21052 y2 = gen_reg_rtx (DFmode);
21053 y3 = gen_reg_rtx (DFmode);
21054 u0 = gen_reg_rtx (DFmode);
21055 v0 = gen_reg_rtx (DFmode);
21056 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21057
21058 /* x0 = 1./d estimate */
21059 emit_insn (gen_rtx_SET (VOIDmode, x0,
21060 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21061 UNSPEC_FRES)));
21062 /* e0 = 1. - d * x0 */
21063 emit_insn (gen_rtx_SET (VOIDmode, e0,
21064 gen_rtx_MINUS (DFmode, one,
21065 gen_rtx_MULT (SFmode, d, x0))));
21066 /* y1 = x0 + e0 * x0 */
21067 emit_insn (gen_rtx_SET (VOIDmode, y1,
21068 gen_rtx_PLUS (DFmode,
21069 gen_rtx_MULT (DFmode, e0, x0), x0)));
21070 /* e1 = e0 * e0 */
21071 emit_insn (gen_rtx_SET (VOIDmode, e1,
21072 gen_rtx_MULT (DFmode, e0, e0)));
21073 /* y2 = y1 + e1 * y1 */
21074 emit_insn (gen_rtx_SET (VOIDmode, y2,
21075 gen_rtx_PLUS (DFmode,
21076 gen_rtx_MULT (DFmode, e1, y1), y1)));
21077 /* e2 = e1 * e1 */
21078 emit_insn (gen_rtx_SET (VOIDmode, e2,
21079 gen_rtx_MULT (DFmode, e1, e1)));
21080 /* y3 = y2 + e2 * y2 */
21081 emit_insn (gen_rtx_SET (VOIDmode, y3,
21082 gen_rtx_PLUS (DFmode,
21083 gen_rtx_MULT (DFmode, e2, y2), y2)));
21084 /* u0 = n * y3 */
21085 emit_insn (gen_rtx_SET (VOIDmode, u0,
21086 gen_rtx_MULT (DFmode, n, y3)));
21087 /* v0 = n - d * u0 */
21088 emit_insn (gen_rtx_SET (VOIDmode, v0,
21089 gen_rtx_MINUS (DFmode, n,
21090 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21091 /* dst = u0 + v0 * y3 */
21092 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21093 gen_rtx_PLUS (DFmode,
21094 gen_rtx_MULT (DFmode, v0, y3), u0)));
21095}
21096
565ef4ba 21097
9c78b944
DE
21098/* Newton-Raphson approximation of single-precision floating point rsqrt.
21099 Assumes no trapping math and finite arguments. */
21100
21101void
21102rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21103{
21104 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21105 half, one, halfthree, c1, cond, label;
21106
21107 x0 = gen_reg_rtx (SFmode);
21108 x1 = gen_reg_rtx (SFmode);
21109 x2 = gen_reg_rtx (SFmode);
21110 y1 = gen_reg_rtx (SFmode);
21111 u0 = gen_reg_rtx (SFmode);
21112 u1 = gen_reg_rtx (SFmode);
21113 u2 = gen_reg_rtx (SFmode);
21114 v0 = gen_reg_rtx (SFmode);
21115 v1 = gen_reg_rtx (SFmode);
21116 v2 = gen_reg_rtx (SFmode);
21117 t0 = gen_reg_rtx (SFmode);
21118 halfthree = gen_reg_rtx (SFmode);
21119 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21120 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21121
21122 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21123 emit_insn (gen_rtx_SET (VOIDmode, t0,
21124 gen_rtx_MULT (SFmode, src, src)));
21125
21126 emit_insn (gen_rtx_SET (VOIDmode, cond,
21127 gen_rtx_COMPARE (CCFPmode, t0, src)));
21128 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21129 emit_unlikely_jump (c1, label);
21130
21131 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21132 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21133
21134 /* halfthree = 1.5 = 1.0 + 0.5 */
21135 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21136 gen_rtx_PLUS (SFmode, one, half)));
21137
21138 /* x0 = rsqrt estimate */
21139 emit_insn (gen_rtx_SET (VOIDmode, x0,
21140 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21141 UNSPEC_RSQRT)));
21142
21143 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21144 emit_insn (gen_rtx_SET (VOIDmode, y1,
21145 gen_rtx_MINUS (SFmode,
21146 gen_rtx_MULT (SFmode, src, halfthree),
21147 src)));
21148
21149 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
21150 emit_insn (gen_rtx_SET (VOIDmode, u0,
21151 gen_rtx_MULT (SFmode, x0, x0)));
21152 emit_insn (gen_rtx_SET (VOIDmode, v0,
21153 gen_rtx_MINUS (SFmode,
21154 halfthree,
21155 gen_rtx_MULT (SFmode, y1, u0))));
21156 emit_insn (gen_rtx_SET (VOIDmode, x1,
21157 gen_rtx_MULT (SFmode, x0, v0)));
21158
21159 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
21160 emit_insn (gen_rtx_SET (VOIDmode, u1,
21161 gen_rtx_MULT (SFmode, x1, x1)));
21162 emit_insn (gen_rtx_SET (VOIDmode, v1,
21163 gen_rtx_MINUS (SFmode,
21164 halfthree,
21165 gen_rtx_MULT (SFmode, y1, u1))));
21166 emit_insn (gen_rtx_SET (VOIDmode, x2,
21167 gen_rtx_MULT (SFmode, x1, v1)));
21168
21169 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
21170 emit_insn (gen_rtx_SET (VOIDmode, u2,
21171 gen_rtx_MULT (SFmode, x2, x2)));
21172 emit_insn (gen_rtx_SET (VOIDmode, v2,
21173 gen_rtx_MINUS (SFmode,
21174 halfthree,
21175 gen_rtx_MULT (SFmode, y1, u2))));
21176 emit_insn (gen_rtx_SET (VOIDmode, dst,
21177 gen_rtx_MULT (SFmode, x2, v2)));
21178
21179 emit_label (XEXP (label, 0));
21180}
21181
565ef4ba
RS
21182/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
21183 target, and SRC is the argument operand. */
21184
21185void
21186rs6000_emit_popcount (rtx dst, rtx src)
21187{
21188 enum machine_mode mode = GET_MODE (dst);
21189 rtx tmp1, tmp2;
21190
21191 tmp1 = gen_reg_rtx (mode);
21192
21193 if (mode == SImode)
21194 {
21195 emit_insn (gen_popcntbsi2 (tmp1, src));
21196 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
21197 NULL_RTX, 0);
21198 tmp2 = force_reg (SImode, tmp2);
21199 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
21200 }
21201 else
21202 {
21203 emit_insn (gen_popcntbdi2 (tmp1, src));
21204 tmp2 = expand_mult (DImode, tmp1,
21205 GEN_INT ((HOST_WIDE_INT)
21206 0x01010101 << 32 | 0x01010101),
21207 NULL_RTX, 0);
21208 tmp2 = force_reg (DImode, tmp2);
21209 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
21210 }
21211}
21212
21213
21214/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
21215 target, and SRC is the argument operand. */
21216
21217void
21218rs6000_emit_parity (rtx dst, rtx src)
21219{
21220 enum machine_mode mode = GET_MODE (dst);
21221 rtx tmp;
21222
21223 tmp = gen_reg_rtx (mode);
21224 if (mode == SImode)
21225 {
21226 /* Is mult+shift >= shift+xor+shift+xor? */
21227 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
21228 {
21229 rtx tmp1, tmp2, tmp3, tmp4;
21230
21231 tmp1 = gen_reg_rtx (SImode);
21232 emit_insn (gen_popcntbsi2 (tmp1, src));
21233
21234 tmp2 = gen_reg_rtx (SImode);
21235 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
21236 tmp3 = gen_reg_rtx (SImode);
21237 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
21238
21239 tmp4 = gen_reg_rtx (SImode);
21240 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
21241 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
21242 }
21243 else
21244 rs6000_emit_popcount (tmp, src);
21245 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
21246 }
21247 else
21248 {
21249 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
21250 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
21251 {
21252 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
21253
21254 tmp1 = gen_reg_rtx (DImode);
21255 emit_insn (gen_popcntbdi2 (tmp1, src));
21256
21257 tmp2 = gen_reg_rtx (DImode);
21258 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
21259 tmp3 = gen_reg_rtx (DImode);
21260 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
21261
21262 tmp4 = gen_reg_rtx (DImode);
21263 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
21264 tmp5 = gen_reg_rtx (DImode);
21265 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
21266
21267 tmp6 = gen_reg_rtx (DImode);
21268 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
21269 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
21270 }
21271 else
21272 rs6000_emit_popcount (tmp, src);
21273 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
21274 }
21275}
21276
ded9bf77
AH
21277/* Return an RTX representing where to find the function value of a
21278 function returning MODE. */
21279static rtx
21280rs6000_complex_function_value (enum machine_mode mode)
21281{
21282 unsigned int regno;
21283 rtx r1, r2;
21284 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 21285 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 21286
18f63bfa
AH
21287 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
21288 regno = FP_ARG_RETURN;
354ed18f
AH
21289 else
21290 {
18f63bfa 21291 regno = GP_ARG_RETURN;
ded9bf77 21292
18f63bfa
AH
21293 /* 32-bit is OK since it'll go in r3/r4. */
21294 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
21295 return gen_rtx_REG (mode, regno);
21296 }
21297
18f63bfa
AH
21298 if (inner_bytes >= 8)
21299 return gen_rtx_REG (mode, regno);
21300
ded9bf77
AH
21301 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
21302 const0_rtx);
21303 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 21304 GEN_INT (inner_bytes));
ded9bf77
AH
21305 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
21306}
21307
a6ebc39a
AH
21308/* Define how to find the value returned by a function.
21309 VALTYPE is the data type of the value (as a tree).
21310 If the precise function being called is known, FUNC is its FUNCTION_DECL;
21311 otherwise, FUNC is 0.
21312
21313 On the SPE, both FPs and vectors are returned in r3.
21314
21315 On RS/6000 an integer value is in r3 and a floating-point value is in
21316 fp1, unless -msoft-float. */
21317
21318rtx
586de218 21319rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
21320{
21321 enum machine_mode mode;
2a8fa26c 21322 unsigned int regno;
a6ebc39a 21323
594a51fe
SS
21324 /* Special handling for structs in darwin64. */
21325 if (rs6000_darwin64_abi
21326 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
21327 && TREE_CODE (valtype) == RECORD_TYPE
21328 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
21329 {
21330 CUMULATIVE_ARGS valcum;
21331 rtx valret;
21332
0b5383eb 21333 valcum.words = 0;
594a51fe
SS
21334 valcum.fregno = FP_ARG_MIN_REG;
21335 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
21336 /* Do a trial code generation as if this were going to be passed as
21337 an argument; if any part goes in memory, we return NULL. */
21338 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
21339 if (valret)
21340 return valret;
21341 /* Otherwise fall through to standard ABI rules. */
21342 }
21343
0e67400a
FJ
21344 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
21345 {
21346 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21347 return gen_rtx_PARALLEL (DImode,
21348 gen_rtvec (2,
21349 gen_rtx_EXPR_LIST (VOIDmode,
21350 gen_rtx_REG (SImode, GP_ARG_RETURN),
21351 const0_rtx),
21352 gen_rtx_EXPR_LIST (VOIDmode,
21353 gen_rtx_REG (SImode,
21354 GP_ARG_RETURN + 1),
21355 GEN_INT (4))));
21356 }
0f086e42
FJ
21357 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
21358 {
21359 return gen_rtx_PARALLEL (DCmode,
21360 gen_rtvec (4,
21361 gen_rtx_EXPR_LIST (VOIDmode,
21362 gen_rtx_REG (SImode, GP_ARG_RETURN),
21363 const0_rtx),
21364 gen_rtx_EXPR_LIST (VOIDmode,
21365 gen_rtx_REG (SImode,
21366 GP_ARG_RETURN + 1),
21367 GEN_INT (4)),
21368 gen_rtx_EXPR_LIST (VOIDmode,
21369 gen_rtx_REG (SImode,
21370 GP_ARG_RETURN + 2),
21371 GEN_INT (8)),
21372 gen_rtx_EXPR_LIST (VOIDmode,
21373 gen_rtx_REG (SImode,
21374 GP_ARG_RETURN + 3),
21375 GEN_INT (12))));
21376 }
602ea4d3 21377
7348aa7f
FXC
21378 mode = TYPE_MODE (valtype);
21379 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 21380 || POINTER_TYPE_P (valtype))
b78d48dd 21381 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 21382
00b79d54 21383 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21384 {
21385 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21386 {
21387 switch (mode)
21388 {
21389 default:
21390 gcc_unreachable ();
21391 case SDmode:
21392 regno = GP_ARG_RETURN;
21393 break;
21394 case DDmode:
21395 regno = FP_ARG_RETURN;
21396 break;
21397 case TDmode:
21398 /* Use f2:f3 specified by the ABI. */
21399 regno = FP_ARG_RETURN + 1;
21400 break;
21401 }
21402 }
21403 else
21404 regno = GP_ARG_RETURN;
21405 }
00b79d54 21406 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 21407 regno = FP_ARG_RETURN;
ded9bf77 21408 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 21409 && targetm.calls.split_complex_arg)
ded9bf77 21410 return rs6000_complex_function_value (mode);
44688022 21411 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 21412 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 21413 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 21414 regno = ALTIVEC_ARG_RETURN;
18f63bfa 21415 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
21416 && (mode == DFmode || mode == DCmode
21417 || mode == TFmode || mode == TCmode))
18f63bfa 21418 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
21419 else
21420 regno = GP_ARG_RETURN;
21421
21422 return gen_rtx_REG (mode, regno);
21423}
21424
ded9bf77
AH
21425/* Define how to find the value returned by a library function
21426 assuming the value has mode MODE. */
21427rtx
21428rs6000_libcall_value (enum machine_mode mode)
21429{
21430 unsigned int regno;
21431
2e6c9641
FJ
21432 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
21433 {
21434 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
21435 return gen_rtx_PARALLEL (DImode,
21436 gen_rtvec (2,
21437 gen_rtx_EXPR_LIST (VOIDmode,
21438 gen_rtx_REG (SImode, GP_ARG_RETURN),
21439 const0_rtx),
21440 gen_rtx_EXPR_LIST (VOIDmode,
21441 gen_rtx_REG (SImode,
21442 GP_ARG_RETURN + 1),
21443 GEN_INT (4))));
21444 }
21445
00b79d54 21446 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
21447 {
21448 if (TARGET_HARD_FLOAT && TARGET_FPRS)
21449 {
21450 switch (mode)
21451 {
21452 default:
21453 gcc_unreachable ();
21454 case SDmode:
21455 regno = GP_ARG_RETURN;
21456 break;
21457 case DDmode:
21458 regno = FP_ARG_RETURN;
21459 break;
21460 case TDmode:
21461 /* Use f2:f3 specified by the ABI. */
21462 regno = FP_ARG_RETURN + 1;
21463 break;
21464 }
21465 }
21466 else
21467 regno = GP_ARG_RETURN;
21468 }
00b79d54 21469 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
21470 && TARGET_HARD_FLOAT && TARGET_FPRS)
21471 regno = FP_ARG_RETURN;
44688022
AM
21472 else if (ALTIVEC_VECTOR_MODE (mode)
21473 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 21474 regno = ALTIVEC_ARG_RETURN;
42ba5130 21475 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 21476 return rs6000_complex_function_value (mode);
18f63bfa 21477 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
21478 && (mode == DFmode || mode == DCmode
21479 || mode == TFmode || mode == TCmode))
18f63bfa 21480 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
21481 else
21482 regno = GP_ARG_RETURN;
21483
21484 return gen_rtx_REG (mode, regno);
21485}
21486
d1d0c603
JJ
21487/* Define the offset between two registers, FROM to be eliminated and its
21488 replacement TO, at the start of a routine. */
21489HOST_WIDE_INT
21490rs6000_initial_elimination_offset (int from, int to)
21491{
21492 rs6000_stack_t *info = rs6000_stack_info ();
21493 HOST_WIDE_INT offset;
21494
7d5175e1 21495 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 21496 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
21497 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21498 {
21499 offset = info->push_p ? 0 : -info->total_size;
21500 if (FRAME_GROWS_DOWNWARD)
5b667039 21501 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
21502 }
21503 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
21504 offset = FRAME_GROWS_DOWNWARD
5b667039 21505 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
21506 : 0;
21507 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
21508 offset = info->total_size;
21509 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
21510 offset = info->push_p ? info->total_size : 0;
21511 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
21512 offset = 0;
21513 else
37409796 21514 gcc_unreachable ();
d1d0c603
JJ
21515
21516 return offset;
21517}
21518
58646b77 21519/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 21520
c8e4f0e9 21521static bool
3101faab 21522rs6000_is_opaque_type (const_tree type)
62e1dfcf 21523{
58646b77 21524 return (type == opaque_V2SI_type_node
2abe3e28 21525 || type == opaque_V2SF_type_node
58646b77
PB
21526 || type == opaque_p_V2SI_type_node
21527 || type == opaque_V4SI_type_node);
62e1dfcf
NC
21528}
21529
96714395 21530static rtx
a2369ed3 21531rs6000_dwarf_register_span (rtx reg)
96714395
AH
21532{
21533 unsigned regno;
21534
4d4cbc0e
AH
21535 if (TARGET_SPE
21536 && (SPE_VECTOR_MODE (GET_MODE (reg))
21537 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
21538 ;
21539 else
96714395
AH
21540 return NULL_RTX;
21541
21542 regno = REGNO (reg);
21543
21544 /* The duality of the SPE register size wreaks all kinds of havoc.
21545 This is a way of distinguishing r0 in 32-bits from r0 in
21546 64-bits. */
21547 return
21548 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
21549 BYTES_BIG_ENDIAN
21550 ? gen_rtvec (2,
21551 gen_rtx_REG (SImode, regno + 1200),
21552 gen_rtx_REG (SImode, regno))
21553 : gen_rtvec (2,
21554 gen_rtx_REG (SImode, regno),
21555 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21556}
21557
37ea0b7e
JM
21558/* Fill in sizes for SPE register high parts in table used by unwinder. */
21559
21560static void
21561rs6000_init_dwarf_reg_sizes_extra (tree address)
21562{
21563 if (TARGET_SPE)
21564 {
21565 int i;
21566 enum machine_mode mode = TYPE_MODE (char_type_node);
21567 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21568 rtx mem = gen_rtx_MEM (BLKmode, addr);
21569 rtx value = gen_int_mode (4, mode);
21570
21571 for (i = 1201; i < 1232; i++)
21572 {
21573 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21574 HOST_WIDE_INT offset
21575 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21576
21577 emit_move_insn (adjust_address (mem, mode, offset), value);
21578 }
21579 }
21580}
21581
93c9d1ba
AM
21582/* Map internal gcc register numbers to DWARF2 register numbers. */
21583
21584unsigned int
21585rs6000_dbx_register_number (unsigned int regno)
21586{
21587 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21588 return regno;
21589 if (regno == MQ_REGNO)
21590 return 100;
1de43f85 21591 if (regno == LR_REGNO)
93c9d1ba 21592 return 108;
1de43f85 21593 if (regno == CTR_REGNO)
93c9d1ba
AM
21594 return 109;
21595 if (CR_REGNO_P (regno))
21596 return regno - CR0_REGNO + 86;
21597 if (regno == XER_REGNO)
21598 return 101;
21599 if (ALTIVEC_REGNO_P (regno))
21600 return regno - FIRST_ALTIVEC_REGNO + 1124;
21601 if (regno == VRSAVE_REGNO)
21602 return 356;
21603 if (regno == VSCR_REGNO)
21604 return 67;
21605 if (regno == SPE_ACC_REGNO)
21606 return 99;
21607 if (regno == SPEFSCR_REGNO)
21608 return 612;
21609 /* SPE high reg number. We get these values of regno from
21610 rs6000_dwarf_register_span. */
37409796
NS
21611 gcc_assert (regno >= 1200 && regno < 1232);
21612 return regno;
93c9d1ba
AM
21613}
21614
93f90be6 21615/* target hook eh_return_filter_mode */
f676971a 21616static enum machine_mode
93f90be6
FJ
21617rs6000_eh_return_filter_mode (void)
21618{
21619 return TARGET_32BIT ? SImode : word_mode;
21620}
21621
00b79d54
BE
21622/* Target hook for scalar_mode_supported_p. */
21623static bool
21624rs6000_scalar_mode_supported_p (enum machine_mode mode)
21625{
21626 if (DECIMAL_FLOAT_MODE_P (mode))
21627 return true;
21628 else
21629 return default_scalar_mode_supported_p (mode);
21630}
21631
f676971a
EC
21632/* Target hook for vector_mode_supported_p. */
21633static bool
21634rs6000_vector_mode_supported_p (enum machine_mode mode)
21635{
21636
96038623
DE
21637 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
21638 return true;
21639
f676971a
EC
21640 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21641 return true;
21642
21643 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21644 return true;
21645
21646 else
21647 return false;
21648}
21649
bb8df8a6
EC
21650/* Target hook for invalid_arg_for_unprototyped_fn. */
21651static const char *
3101faab 21652invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
21653{
21654 return (!rs6000_darwin64_abi
21655 && typelist == 0
21656 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21657 && (funcdecl == NULL_TREE
21658 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21659 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21660 ? N_("AltiVec argument passed to unprototyped function")
21661 : NULL;
21662}
21663
3aebbe5f
JJ
21664/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21665 setup by using __stack_chk_fail_local hidden function instead of
21666 calling __stack_chk_fail directly. Otherwise it is better to call
21667 __stack_chk_fail directly. */
21668
21669static tree
21670rs6000_stack_protect_fail (void)
21671{
21672 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21673 ? default_hidden_stack_protect_fail ()
21674 : default_external_stack_protect_fail ();
21675}
21676
17211ab5 21677#include "gt-rs6000.h"