]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
acinclude.m4: Fix duplicate variable names.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
279bb624 58#include "tm-constrs.h"
1bc7c5b6
ZW
59#if TARGET_XCOFF
60#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61#endif
93a27b7b
ZW
62#if TARGET_MACHO
63#include "gstab.h" /* for N_SLINE */
64#endif
9b30bae2 65
7509c759
MM
66#ifndef TARGET_NO_PROTOTYPE
67#define TARGET_NO_PROTOTYPE 0
68#endif
69
9878760c
RK
70#define min(A,B) ((A) < (B) ? (A) : (B))
71#define max(A,B) ((A) > (B) ? (A) : (B))
72
d1d0c603
JJ
73/* Structure used to define the rs6000 stack */
74typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
d1d0c603
JJ
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
111} rs6000_stack_t;
112
5b667039
JJ
113/* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */
115typedef struct machine_function GTY(())
116{
117 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
118 int ra_needs_full_frame;
119 /* Some local-dynamic symbol. */
120 const char *some_ld_name;
121 /* Whether the instruction chain has been scanned already. */
122 int insn_chain_scanned_p;
123 /* Flags if __builtin_return_address (0) was used. */
124 int ra_need_lr;
125 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126 varargs save area. */
127 HOST_WIDE_INT varargs_save_offset;
128} machine_function;
129
5248c961
RK
130/* Target cpu type */
131
132enum processor_type rs6000_cpu;
8e3f41e7
MM
133struct rs6000_cpu_select rs6000_select[3] =
134{
815cdc52
MM
135 /* switch name, tune arch */
136 { (const char *)0, "--with-cpu=", 1, 1 },
137 { (const char *)0, "-mcpu=", 1, 1 },
138 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 139};
5248c961 140
d296e02e
AP
141static GTY(()) bool rs6000_cell_dont_microcode;
142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
44cd321e
PS
149/* Align branch targets. */
150static GTY(()) bool rs6000_align_branch_targets;
151
569fa502
DN
152/* Support for -msched-costly-dep option. */
153const char *rs6000_sched_costly_dep_str;
154enum rs6000_dependence_cost rs6000_sched_costly_dep;
155
cbe26ab8
DN
156/* Support for -minsert-sched-nops option. */
157const char *rs6000_sched_insert_nops_str;
158enum rs6000_nop_insertion rs6000_sched_insert_nops;
159
7ccf35ed 160/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 161static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 162
602ea4d3 163/* Size of long double. */
6fa3f289
ZW
164int rs6000_long_double_type_size;
165
602ea4d3
JJ
166/* IEEE quad extended precision long double. */
167int rs6000_ieeequad;
168
169/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
170int rs6000_altivec_abi;
171
a3170dc6
AH
172/* Nonzero if we want SPE ABI extensions. */
173int rs6000_spe_abi;
174
5da702b1
AH
175/* Nonzero if floating point operations are done in the GPRs. */
176int rs6000_float_gprs = 0;
177
594a51fe
SS
178/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
179int rs6000_darwin64_abi;
180
a0ab749a 181/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 182static GTY(()) int common_mode_defined;
c81bebd7 183
9878760c
RK
184/* Save information from a "cmpxx" operation until the branch or scc is
185 emitted. */
9878760c
RK
186rtx rs6000_compare_op0, rs6000_compare_op1;
187int rs6000_compare_fp_p;
874a0744 188
874a0744
MM
189/* Label number of label created for -mrelocatable, to call to so we can
190 get the address of the GOT section */
191int rs6000_pic_labelno;
c81bebd7 192
b91da81f 193#ifdef USING_ELFOS_H
c81bebd7 194/* Which abi to adhere to */
9739c90c 195const char *rs6000_abi_name;
d9407988
MM
196
197/* Semantics of the small data area */
198enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199
200/* Which small data model to use */
815cdc52 201const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
202
203/* Counter for labels which are to be placed in .fixup. */
204int fixuplabelno = 0;
874a0744 205#endif
4697a36c 206
c4501e62
JJ
207/* Bit size of immediate TLS offsets and string from which it is decoded. */
208int rs6000_tls_size = 32;
209const char *rs6000_tls_size_string;
210
b6c9286a
MM
211/* ABI enumeration available for subtarget to use. */
212enum rs6000_abi rs6000_current_abi;
213
85b776df
AM
214/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
215int dot_symbols;
216
38c1f2d7 217/* Debug flags */
815cdc52 218const char *rs6000_debug_name;
38c1f2d7
MM
219int rs6000_debug_stack; /* debug stack applications */
220int rs6000_debug_arg; /* debug argument handling */
221
aabcd309 222/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
223bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
224
58646b77
PB
225/* Built in types. */
226
227tree rs6000_builtin_types[RS6000_BTI_MAX];
228tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 229
57ac7be9
AM
230const char *rs6000_traceback_name;
231static enum {
232 traceback_default = 0,
233 traceback_none,
234 traceback_part,
235 traceback_full
236} rs6000_traceback;
237
38c1f2d7
MM
238/* Flag to say the TOC is initialized */
239int toc_initialized;
9ebbca7d 240char toc_label_name[10];
38c1f2d7 241
44cd321e
PS
242/* Cached value of rs6000_variable_issue. This is cached in
243 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
244static short cached_can_issue_more;
245
d6b5193b
RS
246static GTY(()) section *read_only_data_section;
247static GTY(()) section *private_data_section;
248static GTY(()) section *read_only_private_data_section;
249static GTY(()) section *sdata2_section;
250static GTY(()) section *toc_section;
251
a3c9585f
KH
252/* Control alignment for fields within structures. */
253/* String from -malign-XXXXX. */
025d9908
KH
254int rs6000_alignment_flags;
255
78f5898b
AH
256/* True for any options that were explicitly set. */
257struct {
df01da37 258 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 259 bool alignment; /* True if -malign- was used. */
d3603e8c 260 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
261 bool spe; /* True if -mspe= was used. */
262 bool float_gprs; /* True if -mfloat-gprs= was used. */
263 bool isel; /* True if -misel was used. */
264 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 265 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
266} rs6000_explicit_options;
267
a3170dc6
AH
268struct builtin_description
269{
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
273 unsigned int mask;
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
277};
8b897cfa
RS
278\f
279/* Target cpu costs. */
280
281struct processor_costs {
c4ad648e 282 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
283 const int mulsi_const; /* cost of SImode multiplication by constant. */
284 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
285 const int muldi; /* cost of DImode multiplication. */
286 const int divsi; /* cost of SImode division. */
287 const int divdi; /* cost of DImode division. */
288 const int fp; /* cost of simple SFmode and DFmode insns. */
289 const int dmul; /* cost of DFmode multiplication (and fmadd). */
290 const int sdiv; /* cost of SFmode division (fdivs). */
291 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
292 const int cache_line_size; /* cache line size in bytes. */
293 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
294 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
295 const int simultaneous_prefetches; /* number of parallel prefetch
296 operations. */
8b897cfa
RS
297};
298
299const struct processor_costs *rs6000_cost;
300
301/* Processor costs (relative to an add) */
302
303/* Instruction size costs on 32bit processors. */
304static const
305struct processor_costs size32_cost = {
06a67bdd
RS
306 COSTS_N_INSNS (1), /* mulsi */
307 COSTS_N_INSNS (1), /* mulsi_const */
308 COSTS_N_INSNS (1), /* mulsi_const9 */
309 COSTS_N_INSNS (1), /* muldi */
310 COSTS_N_INSNS (1), /* divsi */
311 COSTS_N_INSNS (1), /* divdi */
312 COSTS_N_INSNS (1), /* fp */
313 COSTS_N_INSNS (1), /* dmul */
314 COSTS_N_INSNS (1), /* sdiv */
315 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
316 32,
317 0,
318 0,
5f732aba 319 0,
8b897cfa
RS
320};
321
322/* Instruction size costs on 64bit processors. */
323static const
324struct processor_costs size64_cost = {
06a67bdd
RS
325 COSTS_N_INSNS (1), /* mulsi */
326 COSTS_N_INSNS (1), /* mulsi_const */
327 COSTS_N_INSNS (1), /* mulsi_const9 */
328 COSTS_N_INSNS (1), /* muldi */
329 COSTS_N_INSNS (1), /* divsi */
330 COSTS_N_INSNS (1), /* divdi */
331 COSTS_N_INSNS (1), /* fp */
332 COSTS_N_INSNS (1), /* dmul */
333 COSTS_N_INSNS (1), /* sdiv */
334 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
335 128,
336 0,
337 0,
5f732aba 338 0,
8b897cfa
RS
339};
340
341/* Instruction costs on RIOS1 processors. */
342static const
343struct processor_costs rios1_cost = {
06a67bdd
RS
344 COSTS_N_INSNS (5), /* mulsi */
345 COSTS_N_INSNS (4), /* mulsi_const */
346 COSTS_N_INSNS (3), /* mulsi_const9 */
347 COSTS_N_INSNS (5), /* muldi */
348 COSTS_N_INSNS (19), /* divsi */
349 COSTS_N_INSNS (19), /* divdi */
350 COSTS_N_INSNS (2), /* fp */
351 COSTS_N_INSNS (2), /* dmul */
352 COSTS_N_INSNS (19), /* sdiv */
353 COSTS_N_INSNS (19), /* ddiv */
5f732aba
DE
354 128,
355 64, /* l1 cache */
356 512, /* l2 cache */
0b11da67 357 0, /* streams */
8b897cfa
RS
358};
359
360/* Instruction costs on RIOS2 processors. */
361static const
362struct processor_costs rios2_cost = {
06a67bdd
RS
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (13), /* divsi */
368 COSTS_N_INSNS (13), /* divdi */
369 COSTS_N_INSNS (2), /* fp */
370 COSTS_N_INSNS (2), /* dmul */
371 COSTS_N_INSNS (17), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
373 256,
374 256, /* l1 cache */
375 1024, /* l2 cache */
0b11da67 376 0, /* streams */
8b897cfa
RS
377};
378
379/* Instruction costs on RS64A processors. */
380static const
381struct processor_costs rs64a_cost = {
06a67bdd
RS
382 COSTS_N_INSNS (20), /* mulsi */
383 COSTS_N_INSNS (12), /* mulsi_const */
384 COSTS_N_INSNS (8), /* mulsi_const9 */
385 COSTS_N_INSNS (34), /* muldi */
386 COSTS_N_INSNS (65), /* divsi */
387 COSTS_N_INSNS (67), /* divdi */
388 COSTS_N_INSNS (4), /* fp */
389 COSTS_N_INSNS (4), /* dmul */
390 COSTS_N_INSNS (31), /* sdiv */
391 COSTS_N_INSNS (31), /* ddiv */
0b11da67 392 128,
5f732aba
DE
393 128, /* l1 cache */
394 2048, /* l2 cache */
0b11da67 395 1, /* streams */
8b897cfa
RS
396};
397
398/* Instruction costs on MPCCORE processors. */
399static const
400struct processor_costs mpccore_cost = {
06a67bdd
RS
401 COSTS_N_INSNS (2), /* mulsi */
402 COSTS_N_INSNS (2), /* mulsi_const */
403 COSTS_N_INSNS (2), /* mulsi_const9 */
404 COSTS_N_INSNS (2), /* muldi */
405 COSTS_N_INSNS (6), /* divsi */
406 COSTS_N_INSNS (6), /* divdi */
407 COSTS_N_INSNS (4), /* fp */
408 COSTS_N_INSNS (5), /* dmul */
409 COSTS_N_INSNS (10), /* sdiv */
410 COSTS_N_INSNS (17), /* ddiv */
5f732aba
DE
411 32,
412 4, /* l1 cache */
413 16, /* l2 cache */
0b11da67 414 1, /* streams */
8b897cfa
RS
415};
416
417/* Instruction costs on PPC403 processors. */
418static const
419struct processor_costs ppc403_cost = {
06a67bdd
RS
420 COSTS_N_INSNS (4), /* mulsi */
421 COSTS_N_INSNS (4), /* mulsi_const */
422 COSTS_N_INSNS (4), /* mulsi_const9 */
423 COSTS_N_INSNS (4), /* muldi */
424 COSTS_N_INSNS (33), /* divsi */
425 COSTS_N_INSNS (33), /* divdi */
426 COSTS_N_INSNS (11), /* fp */
427 COSTS_N_INSNS (11), /* dmul */
428 COSTS_N_INSNS (11), /* sdiv */
429 COSTS_N_INSNS (11), /* ddiv */
0b11da67 430 32,
5f732aba
DE
431 4, /* l1 cache */
432 16, /* l2 cache */
0b11da67 433 1, /* streams */
8b897cfa
RS
434};
435
436/* Instruction costs on PPC405 processors. */
437static const
438struct processor_costs ppc405_cost = {
06a67bdd
RS
439 COSTS_N_INSNS (5), /* mulsi */
440 COSTS_N_INSNS (4), /* mulsi_const */
441 COSTS_N_INSNS (3), /* mulsi_const9 */
442 COSTS_N_INSNS (5), /* muldi */
443 COSTS_N_INSNS (35), /* divsi */
444 COSTS_N_INSNS (35), /* divdi */
445 COSTS_N_INSNS (11), /* fp */
446 COSTS_N_INSNS (11), /* dmul */
447 COSTS_N_INSNS (11), /* sdiv */
448 COSTS_N_INSNS (11), /* ddiv */
0b11da67 449 32,
5f732aba
DE
450 16, /* l1 cache */
451 128, /* l2 cache */
0b11da67 452 1, /* streams */
8b897cfa
RS
453};
454
455/* Instruction costs on PPC440 processors. */
456static const
457struct processor_costs ppc440_cost = {
06a67bdd
RS
458 COSTS_N_INSNS (3), /* mulsi */
459 COSTS_N_INSNS (2), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (3), /* muldi */
462 COSTS_N_INSNS (34), /* divsi */
463 COSTS_N_INSNS (34), /* divdi */
464 COSTS_N_INSNS (5), /* fp */
465 COSTS_N_INSNS (5), /* dmul */
466 COSTS_N_INSNS (19), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
0b11da67 468 32,
5f732aba
DE
469 32, /* l1 cache */
470 256, /* l2 cache */
0b11da67 471 1, /* streams */
8b897cfa
RS
472};
473
474/* Instruction costs on PPC601 processors. */
475static const
476struct processor_costs ppc601_cost = {
06a67bdd
RS
477 COSTS_N_INSNS (5), /* mulsi */
478 COSTS_N_INSNS (5), /* mulsi_const */
479 COSTS_N_INSNS (5), /* mulsi_const9 */
480 COSTS_N_INSNS (5), /* muldi */
481 COSTS_N_INSNS (36), /* divsi */
482 COSTS_N_INSNS (36), /* divdi */
483 COSTS_N_INSNS (4), /* fp */
484 COSTS_N_INSNS (5), /* dmul */
485 COSTS_N_INSNS (17), /* sdiv */
486 COSTS_N_INSNS (31), /* ddiv */
0b11da67 487 32,
5f732aba
DE
488 32, /* l1 cache */
489 256, /* l2 cache */
0b11da67 490 1, /* streams */
8b897cfa
RS
491};
492
493/* Instruction costs on PPC603 processors. */
494static const
495struct processor_costs ppc603_cost = {
06a67bdd
RS
496 COSTS_N_INSNS (5), /* mulsi */
497 COSTS_N_INSNS (3), /* mulsi_const */
498 COSTS_N_INSNS (2), /* mulsi_const9 */
499 COSTS_N_INSNS (5), /* muldi */
500 COSTS_N_INSNS (37), /* divsi */
501 COSTS_N_INSNS (37), /* divdi */
502 COSTS_N_INSNS (3), /* fp */
503 COSTS_N_INSNS (4), /* dmul */
504 COSTS_N_INSNS (18), /* sdiv */
505 COSTS_N_INSNS (33), /* ddiv */
0b11da67 506 32,
5f732aba
DE
507 8, /* l1 cache */
508 64, /* l2 cache */
0b11da67 509 1, /* streams */
8b897cfa
RS
510};
511
512/* Instruction costs on PPC604 processors. */
513static const
514struct processor_costs ppc604_cost = {
06a67bdd
RS
515 COSTS_N_INSNS (4), /* mulsi */
516 COSTS_N_INSNS (4), /* mulsi_const */
517 COSTS_N_INSNS (4), /* mulsi_const9 */
518 COSTS_N_INSNS (4), /* muldi */
519 COSTS_N_INSNS (20), /* divsi */
520 COSTS_N_INSNS (20), /* divdi */
521 COSTS_N_INSNS (3), /* fp */
522 COSTS_N_INSNS (3), /* dmul */
523 COSTS_N_INSNS (18), /* sdiv */
524 COSTS_N_INSNS (32), /* ddiv */
0b11da67 525 32,
5f732aba
DE
526 16, /* l1 cache */
527 512, /* l2 cache */
0b11da67 528 1, /* streams */
8b897cfa
RS
529};
530
531/* Instruction costs on PPC604e processors. */
532static const
533struct processor_costs ppc604e_cost = {
06a67bdd
RS
534 COSTS_N_INSNS (2), /* mulsi */
535 COSTS_N_INSNS (2), /* mulsi_const */
536 COSTS_N_INSNS (2), /* mulsi_const9 */
537 COSTS_N_INSNS (2), /* muldi */
538 COSTS_N_INSNS (20), /* divsi */
539 COSTS_N_INSNS (20), /* divdi */
540 COSTS_N_INSNS (3), /* fp */
541 COSTS_N_INSNS (3), /* dmul */
542 COSTS_N_INSNS (18), /* sdiv */
543 COSTS_N_INSNS (32), /* ddiv */
0b11da67 544 32,
5f732aba
DE
545 32, /* l1 cache */
546 1024, /* l2 cache */
0b11da67 547 1, /* streams */
8b897cfa
RS
548};
549
f0517163 550/* Instruction costs on PPC620 processors. */
8b897cfa
RS
551static const
552struct processor_costs ppc620_cost = {
06a67bdd
RS
553 COSTS_N_INSNS (5), /* mulsi */
554 COSTS_N_INSNS (4), /* mulsi_const */
555 COSTS_N_INSNS (3), /* mulsi_const9 */
556 COSTS_N_INSNS (7), /* muldi */
557 COSTS_N_INSNS (21), /* divsi */
558 COSTS_N_INSNS (37), /* divdi */
559 COSTS_N_INSNS (3), /* fp */
560 COSTS_N_INSNS (3), /* dmul */
561 COSTS_N_INSNS (18), /* sdiv */
562 COSTS_N_INSNS (32), /* ddiv */
0b11da67 563 128,
5f732aba
DE
564 32, /* l1 cache */
565 1024, /* l2 cache */
0b11da67 566 1, /* streams */
f0517163
RS
567};
568
569/* Instruction costs on PPC630 processors. */
570static const
571struct processor_costs ppc630_cost = {
06a67bdd
RS
572 COSTS_N_INSNS (5), /* mulsi */
573 COSTS_N_INSNS (4), /* mulsi_const */
574 COSTS_N_INSNS (3), /* mulsi_const9 */
575 COSTS_N_INSNS (7), /* muldi */
576 COSTS_N_INSNS (21), /* divsi */
577 COSTS_N_INSNS (37), /* divdi */
578 COSTS_N_INSNS (3), /* fp */
579 COSTS_N_INSNS (3), /* dmul */
580 COSTS_N_INSNS (17), /* sdiv */
581 COSTS_N_INSNS (21), /* ddiv */
0b11da67 582 128,
5f732aba
DE
583 64, /* l1 cache */
584 1024, /* l2 cache */
0b11da67 585 1, /* streams */
8b897cfa
RS
586};
587
d296e02e
AP
588/* Instruction costs on Cell processor. */
589/* COSTS_N_INSNS (1) ~ one add. */
590static const
591struct processor_costs ppccell_cost = {
592 COSTS_N_INSNS (9/2)+2, /* mulsi */
593 COSTS_N_INSNS (6/2), /* mulsi_const */
594 COSTS_N_INSNS (6/2), /* mulsi_const9 */
595 COSTS_N_INSNS (15/2)+2, /* muldi */
596 COSTS_N_INSNS (38/2), /* divsi */
597 COSTS_N_INSNS (70/2), /* divdi */
598 COSTS_N_INSNS (10/2), /* fp */
599 COSTS_N_INSNS (10/2), /* dmul */
600 COSTS_N_INSNS (74/2), /* sdiv */
601 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67 602 128,
5f732aba
DE
603 32, /* l1 cache */
604 512, /* l2 cache */
605 6, /* streams */
d296e02e
AP
606};
607
8b897cfa
RS
608/* Instruction costs on PPC750 and PPC7400 processors. */
609static const
610struct processor_costs ppc750_cost = {
06a67bdd
RS
611 COSTS_N_INSNS (5), /* mulsi */
612 COSTS_N_INSNS (3), /* mulsi_const */
613 COSTS_N_INSNS (2), /* mulsi_const9 */
614 COSTS_N_INSNS (5), /* muldi */
615 COSTS_N_INSNS (17), /* divsi */
616 COSTS_N_INSNS (17), /* divdi */
617 COSTS_N_INSNS (3), /* fp */
618 COSTS_N_INSNS (3), /* dmul */
619 COSTS_N_INSNS (17), /* sdiv */
620 COSTS_N_INSNS (31), /* ddiv */
0b11da67 621 32,
5f732aba
DE
622 32, /* l1 cache */
623 512, /* l2 cache */
0b11da67 624 1, /* streams */
8b897cfa
RS
625};
626
627/* Instruction costs on PPC7450 processors. */
628static const
629struct processor_costs ppc7450_cost = {
06a67bdd
RS
630 COSTS_N_INSNS (4), /* mulsi */
631 COSTS_N_INSNS (3), /* mulsi_const */
632 COSTS_N_INSNS (3), /* mulsi_const9 */
633 COSTS_N_INSNS (4), /* muldi */
634 COSTS_N_INSNS (23), /* divsi */
635 COSTS_N_INSNS (23), /* divdi */
636 COSTS_N_INSNS (5), /* fp */
637 COSTS_N_INSNS (5), /* dmul */
638 COSTS_N_INSNS (21), /* sdiv */
639 COSTS_N_INSNS (35), /* ddiv */
0b11da67 640 32,
5f732aba
DE
641 32, /* l1 cache */
642 1024, /* l2 cache */
0b11da67 643 1, /* streams */
8b897cfa 644};
a3170dc6 645
8b897cfa
RS
646/* Instruction costs on PPC8540 processors. */
647static const
648struct processor_costs ppc8540_cost = {
06a67bdd
RS
649 COSTS_N_INSNS (4), /* mulsi */
650 COSTS_N_INSNS (4), /* mulsi_const */
651 COSTS_N_INSNS (4), /* mulsi_const9 */
652 COSTS_N_INSNS (4), /* muldi */
653 COSTS_N_INSNS (19), /* divsi */
654 COSTS_N_INSNS (19), /* divdi */
655 COSTS_N_INSNS (4), /* fp */
656 COSTS_N_INSNS (4), /* dmul */
657 COSTS_N_INSNS (29), /* sdiv */
658 COSTS_N_INSNS (29), /* ddiv */
0b11da67 659 32,
5f732aba
DE
660 32, /* l1 cache */
661 256, /* l2 cache */
0b11da67 662 1, /* prefetch streams /*/
8b897cfa
RS
663};
664
665/* Instruction costs on POWER4 and POWER5 processors. */
666static const
667struct processor_costs power4_cost = {
06a67bdd
RS
668 COSTS_N_INSNS (3), /* mulsi */
669 COSTS_N_INSNS (2), /* mulsi_const */
670 COSTS_N_INSNS (2), /* mulsi_const9 */
671 COSTS_N_INSNS (4), /* muldi */
672 COSTS_N_INSNS (18), /* divsi */
673 COSTS_N_INSNS (34), /* divdi */
674 COSTS_N_INSNS (3), /* fp */
675 COSTS_N_INSNS (3), /* dmul */
676 COSTS_N_INSNS (17), /* sdiv */
677 COSTS_N_INSNS (17), /* ddiv */
0b11da67 678 128,
5f732aba
DE
679 32, /* l1 cache */
680 1024, /* l2 cache */
0b11da67 681 8, /* prefetch streams /*/
8b897cfa
RS
682};
683
44cd321e
PS
684/* Instruction costs on POWER6 processors. */
685static const
686struct processor_costs power6_cost = {
687 COSTS_N_INSNS (8), /* mulsi */
688 COSTS_N_INSNS (8), /* mulsi_const */
689 COSTS_N_INSNS (8), /* mulsi_const9 */
690 COSTS_N_INSNS (8), /* muldi */
691 COSTS_N_INSNS (22), /* divsi */
692 COSTS_N_INSNS (28), /* divdi */
693 COSTS_N_INSNS (3), /* fp */
694 COSTS_N_INSNS (3), /* dmul */
695 COSTS_N_INSNS (13), /* sdiv */
696 COSTS_N_INSNS (16), /* ddiv */
0b11da67 697 128,
5f732aba
DE
698 64, /* l1 cache */
699 2048, /* l2 cache */
0b11da67 700 16, /* prefetch streams */
44cd321e
PS
701};
702
8b897cfa 703\f
a2369ed3 704static bool rs6000_function_ok_for_sibcall (tree, tree);
2ffa9a0c 705static const char *rs6000_invalid_within_doloop (rtx);
a2369ed3 706static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
707static void rs6000_emit_stack_tie (void);
708static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
709static rtx spe_synthesize_frame_save (rtx);
710static bool spe_func_has_64bit_regs_p (void);
b20a9cca 711static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 712 int, HOST_WIDE_INT);
a2369ed3
DJ
713static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
714static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
715static unsigned rs6000_hash_constant (rtx);
716static unsigned toc_hash_function (const void *);
717static int toc_hash_eq (const void *, const void *);
718static int constant_pool_expr_1 (rtx, int *, int *);
719static bool constant_pool_expr_p (rtx);
d04b6e6e 720static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
721static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
722static struct machine_function * rs6000_init_machine_status (void);
723static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 724static bool no_global_regs_above (int);
5add3202 725#ifdef HAVE_GAS_HIDDEN
a2369ed3 726static void rs6000_assemble_visibility (tree, int);
5add3202 727#endif
a2369ed3
DJ
728static int rs6000_ra_ever_killed (void);
729static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 730static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
77ccdfed
EC
731static bool rs6000_ms_bitfield_layout_p (tree);
732static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 733static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
608063c3 734static const char *rs6000_mangle_type (tree);
b86fe7b4 735extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 736static void rs6000_set_default_type_attributes (tree);
52ff33d0 737static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
738static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
739static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
740static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
741 tree);
a2369ed3 742static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 743static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 744static void rs6000_file_start (void);
7c262518 745#if TARGET_ELF
9b580a0b 746static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
747static void rs6000_elf_asm_out_constructor (rtx, int);
748static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 749static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 750static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
751static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
752 unsigned HOST_WIDE_INT);
a56d7372 753static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 754 ATTRIBUTE_UNUSED;
7c262518 755#endif
aacd3885 756static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
cbaaba19 757#if TARGET_XCOFF
0d5817b2 758static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 759static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 760static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 761static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 762static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 763static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 764 unsigned HOST_WIDE_INT);
d6b5193b
RS
765static void rs6000_xcoff_unique_section (tree, int);
766static section *rs6000_xcoff_select_rtx_section
767 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
768static const char * rs6000_xcoff_strip_name_encoding (const char *);
769static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
770static void rs6000_xcoff_file_start (void);
771static void rs6000_xcoff_file_end (void);
f1384257 772#endif
a2369ed3
DJ
773static int rs6000_variable_issue (FILE *, int, rtx, int);
774static bool rs6000_rtx_costs (rtx, int, int, int *);
775static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 776static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 777static bool is_microcoded_insn (rtx);
d296e02e 778static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
779static bool is_cracked_insn (rtx);
780static bool is_branch_slot_insn (rtx);
44cd321e 781static bool is_load_insn (rtx);
e3a0e200 782static rtx get_store_dest (rtx pat);
44cd321e
PS
783static bool is_store_insn (rtx);
784static bool set_to_load_agen (rtx,rtx);
982afe02 785static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
786static int rs6000_adjust_priority (rtx, int);
787static int rs6000_issue_rate (void);
b198261f 788static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
789static rtx get_next_active_insn (rtx, rtx);
790static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
791static bool insn_must_be_first_in_group (rtx);
792static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
793static bool is_costly_group (rtx *, rtx);
794static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
795static int redefine_groups (FILE *, int, rtx, rtx);
796static int pad_groups (FILE *, int, rtx, rtx);
797static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
798static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
799static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 800static int rs6000_use_sched_lookahead (void);
d296e02e 801static int rs6000_use_sched_lookahead_guard (rtx);
7ccf35ed 802static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
803static tree rs6000_builtin_mul_widen_even (tree);
804static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 805static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 806
58646b77 807static void def_builtin (int, const char *, tree, int);
5b900a4c 808static bool rs6000_vector_alignment_reachable (tree, bool);
a2369ed3
DJ
809static void rs6000_init_builtins (void);
810static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
811static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
812static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
813static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
814static void altivec_init_builtins (void);
815static void rs6000_common_init_builtins (void);
c15c90bb 816static void rs6000_init_libfuncs (void);
a2369ed3 817
b20a9cca
AM
818static void enable_mask_for_builtins (struct builtin_description *, int,
819 enum rs6000_builtins,
820 enum rs6000_builtins);
7c62e993 821static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
822static void spe_init_builtins (void);
823static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 824static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
825static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
826static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
827static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
828static rs6000_stack_t *rs6000_stack_info (void);
829static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
830
831static rtx altivec_expand_builtin (tree, rtx, bool *);
832static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
833static rtx altivec_expand_st_builtin (tree, rtx, bool *);
834static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
835static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 836static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 837 const char *, tree, rtx);
b4a62fa0 838static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 839static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
840static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
841static rtx altivec_expand_vec_set_builtin (tree);
842static rtx altivec_expand_vec_ext_builtin (tree, rtx);
843static int get_element_number (tree, tree);
78f5898b 844static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 845static void rs6000_parse_tls_size_option (void);
5da702b1 846static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
847static int first_altivec_reg_to_save (void);
848static unsigned int compute_vrsave_mask (void);
9390387d 849static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
850static void is_altivec_return_reg (rtx, void *);
851static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
852int easy_vector_constant (rtx, enum machine_mode);
58646b77 853static bool rs6000_is_opaque_type (tree);
a2369ed3 854static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 855static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 856static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 857static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
858static rtx rs6000_tls_get_addr (void);
859static rtx rs6000_got_sym (void);
9390387d 860static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
861static const char *rs6000_get_some_local_dynamic_name (void);
862static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 863static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 864static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 865 enum machine_mode, tree);
0b5383eb
DJ
866static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
867 HOST_WIDE_INT);
868static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
869 tree, HOST_WIDE_INT);
870static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
871 HOST_WIDE_INT,
872 rtx[], int *);
873static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
874 const_tree, HOST_WIDE_INT,
875 rtx[], int *);
876static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 877static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 878static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
879static void setup_incoming_varargs (CUMULATIVE_ARGS *,
880 enum machine_mode, tree,
881 int *, int);
8cd5a4e0 882static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 883 const_tree, bool);
78a52f11
RH
884static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
885 tree, bool);
4d3e6fae 886static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
efdba735
SH
887#if TARGET_MACHO
888static void macho_branch_islands (void);
efdba735
SH
889static int no_previous_def (tree function_name);
890static tree get_prev_label (tree function_name);
c4e18b1c 891static void rs6000_darwin_file_start (void);
efdba735
SH
892#endif
893
c35d187f 894static tree rs6000_build_builtin_va_list (void);
23a60a04 895static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 896static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 897static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 898static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 899static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 900 enum machine_mode);
94ff898d 901static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
902 enum machine_mode);
903static int get_vsel_insn (enum machine_mode);
904static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 905static tree rs6000_stack_protect_fail (void);
21213b4c
DP
906
907const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
908static enum machine_mode rs6000_eh_return_filter_mode (void);
909
17211ab5
GK
910/* Hash table stuff for keeping track of TOC entries. */
911
912struct toc_hash_struct GTY(())
913{
914 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
915 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
916 rtx key;
917 enum machine_mode key_mode;
918 int labelno;
919};
920
921static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
922\f
923/* Default register names. */
924char rs6000_reg_names[][8] =
925{
802a0058
MM
926 "0", "1", "2", "3", "4", "5", "6", "7",
927 "8", "9", "10", "11", "12", "13", "14", "15",
928 "16", "17", "18", "19", "20", "21", "22", "23",
929 "24", "25", "26", "27", "28", "29", "30", "31",
930 "0", "1", "2", "3", "4", "5", "6", "7",
931 "8", "9", "10", "11", "12", "13", "14", "15",
932 "16", "17", "18", "19", "20", "21", "22", "23",
933 "24", "25", "26", "27", "28", "29", "30", "31",
934 "mq", "lr", "ctr","ap",
935 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
936 "xer",
937 /* AltiVec registers. */
0cd5e3a1
AH
938 "0", "1", "2", "3", "4", "5", "6", "7",
939 "8", "9", "10", "11", "12", "13", "14", "15",
940 "16", "17", "18", "19", "20", "21", "22", "23",
941 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
942 "vrsave", "vscr",
943 /* SPE registers. */
7d5175e1
JJ
944 "spe_acc", "spefscr",
945 /* Soft frame pointer. */
946 "sfp"
c81bebd7
MM
947};
948
949#ifdef TARGET_REGNAMES
8b60264b 950static const char alt_reg_names[][8] =
c81bebd7 951{
802a0058
MM
952 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
953 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
954 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
955 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
956 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
957 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
958 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
959 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
960 "mq", "lr", "ctr", "ap",
961 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 962 "xer",
59a4c851 963 /* AltiVec registers. */
0ac081f6 964 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
965 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
966 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
967 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
968 "vrsave", "vscr",
969 /* SPE registers. */
7d5175e1
JJ
970 "spe_acc", "spefscr",
971 /* Soft frame pointer. */
972 "sfp"
c81bebd7
MM
973};
974#endif
9878760c 975\f
daf11973
MM
976#ifndef MASK_STRICT_ALIGN
977#define MASK_STRICT_ALIGN 0
978#endif
ffcfcb5f
AM
979#ifndef TARGET_PROFILE_KERNEL
980#define TARGET_PROFILE_KERNEL 0
981#endif
3961e8fe
RH
982
983/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
984#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
985\f
986/* Initialize the GCC target structure. */
91d231cb
JM
987#undef TARGET_ATTRIBUTE_TABLE
988#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
989#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
990#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 991
301d03af
RS
992#undef TARGET_ASM_ALIGNED_DI_OP
993#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
994
995/* Default unaligned ops are only provided for ELF. Find the ops needed
996 for non-ELF systems. */
997#ifndef OBJECT_FORMAT_ELF
cbaaba19 998#if TARGET_XCOFF
ae6c1efd 999/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1000 64-bit targets. */
1001#undef TARGET_ASM_UNALIGNED_HI_OP
1002#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1003#undef TARGET_ASM_UNALIGNED_SI_OP
1004#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1005#undef TARGET_ASM_UNALIGNED_DI_OP
1006#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1007#else
1008/* For Darwin. */
1009#undef TARGET_ASM_UNALIGNED_HI_OP
1010#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1011#undef TARGET_ASM_UNALIGNED_SI_OP
1012#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1013#undef TARGET_ASM_UNALIGNED_DI_OP
1014#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1015#undef TARGET_ASM_ALIGNED_DI_OP
1016#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1017#endif
1018#endif
1019
1020/* This hook deals with fixups for relocatable code and DI-mode objects
1021 in 64-bit code. */
1022#undef TARGET_ASM_INTEGER
1023#define TARGET_ASM_INTEGER rs6000_assemble_integer
1024
93638d7a
AM
1025#ifdef HAVE_GAS_HIDDEN
1026#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1027#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1028#endif
1029
c4501e62
JJ
1030#undef TARGET_HAVE_TLS
1031#define TARGET_HAVE_TLS HAVE_AS_TLS
1032
1033#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1034#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1035
08c148a8
NB
1036#undef TARGET_ASM_FUNCTION_PROLOGUE
1037#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1038#undef TARGET_ASM_FUNCTION_EPILOGUE
1039#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1040
b54cf83a
DE
1041#undef TARGET_SCHED_VARIABLE_ISSUE
1042#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1043
c237e94a
ZW
1044#undef TARGET_SCHED_ISSUE_RATE
1045#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1046#undef TARGET_SCHED_ADJUST_COST
1047#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1048#undef TARGET_SCHED_ADJUST_PRIORITY
1049#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1050#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1051#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1052#undef TARGET_SCHED_INIT
1053#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1054#undef TARGET_SCHED_FINISH
1055#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1056#undef TARGET_SCHED_REORDER
1057#define TARGET_SCHED_REORDER rs6000_sched_reorder
1058#undef TARGET_SCHED_REORDER2
1059#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1060
be12c2b0
VM
1061#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1062#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1063
d296e02e
AP
1064#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1065#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1066
7ccf35ed
DN
1067#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1068#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1069#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1070#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1071#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1072#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1073#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1074#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1075
5b900a4c
DN
1076#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1077#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1078
0ac081f6
AH
1079#undef TARGET_INIT_BUILTINS
1080#define TARGET_INIT_BUILTINS rs6000_init_builtins
1081
1082#undef TARGET_EXPAND_BUILTIN
1083#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1084
608063c3
JB
1085#undef TARGET_MANGLE_TYPE
1086#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1087
c15c90bb
ZW
1088#undef TARGET_INIT_LIBFUNCS
1089#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1090
f1384257 1091#if TARGET_MACHO
0e5dbd9b 1092#undef TARGET_BINDS_LOCAL_P
31920d83 1093#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1094#endif
0e5dbd9b 1095
77ccdfed
EC
1096#undef TARGET_MS_BITFIELD_LAYOUT_P
1097#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1098
3961e8fe
RH
1099#undef TARGET_ASM_OUTPUT_MI_THUNK
1100#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1101
3961e8fe 1102#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5b71a4e7 1103#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
00b960c7 1104
4977bab6
ZW
1105#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1106#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1107
2e3f0db6
DJ
1108#undef TARGET_INVALID_WITHIN_DOLOOP
1109#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1110
3c50106f
RH
1111#undef TARGET_RTX_COSTS
1112#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1113#undef TARGET_ADDRESS_COST
1114#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1115
c8e4f0e9 1116#undef TARGET_VECTOR_OPAQUE_P
58646b77 1117#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1118
96714395
AH
1119#undef TARGET_DWARF_REGISTER_SPAN
1120#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1121
37ea0b7e
JM
1122#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1123#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1124
c6e8c921
GK
1125/* On rs6000, function arguments are promoted, as are function return
1126 values. */
1127#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1128#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1129#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1130#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1131
c6e8c921
GK
1132#undef TARGET_RETURN_IN_MEMORY
1133#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1134
1135#undef TARGET_SETUP_INCOMING_VARARGS
1136#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1137
1138/* Always strict argument naming on rs6000. */
1139#undef TARGET_STRICT_ARGUMENT_NAMING
1140#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1141#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1142#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130
RH
1143#undef TARGET_SPLIT_COMPLEX_ARG
1144#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
fe984136
RH
1145#undef TARGET_MUST_PASS_IN_STACK
1146#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1147#undef TARGET_PASS_BY_REFERENCE
1148#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1149#undef TARGET_ARG_PARTIAL_BYTES
1150#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1151
c35d187f
RH
1152#undef TARGET_BUILD_BUILTIN_VA_LIST
1153#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1154
cd3ce9b4
JM
1155#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1156#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1157
93f90be6
FJ
1158#undef TARGET_EH_RETURN_FILTER_MODE
1159#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1160
00b79d54
BE
1161#undef TARGET_SCALAR_MODE_SUPPORTED_P
1162#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1163
f676971a
EC
1164#undef TARGET_VECTOR_MODE_SUPPORTED_P
1165#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1166
4d3e6fae
FJ
1167#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1168#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1169
78f5898b
AH
1170#undef TARGET_HANDLE_OPTION
1171#define TARGET_HANDLE_OPTION rs6000_handle_option
1172
1173#undef TARGET_DEFAULT_TARGET_FLAGS
1174#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1175 (TARGET_DEFAULT)
78f5898b 1176
3aebbe5f
JJ
1177#undef TARGET_STACK_PROTECT_FAIL
1178#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1179
445cf5eb
JM
1180/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1181 The PowerPC architecture requires only weak consistency among
1182 processors--that is, memory accesses between processors need not be
1183 sequentially consistent and memory accesses among processors can occur
1184 in any order. The ability to order memory accesses weakly provides
1185 opportunities for more efficient use of the system bus. Unless a
1186 dependency exists, the 604e allows read operations to precede store
1187 operations. */
1188#undef TARGET_RELAXED_ORDERING
1189#define TARGET_RELAXED_ORDERING true
1190
fdbe66f2
EB
1191#ifdef HAVE_AS_TLS
1192#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1193#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1194#endif
1195
aacd3885
RS
1196/* Use a 32-bit anchor range. This leads to sequences like:
1197
1198 addis tmp,anchor,high
1199 add dest,tmp,low
1200
1201 where tmp itself acts as an anchor, and can be shared between
1202 accesses to the same 64k page. */
1203#undef TARGET_MIN_ANCHOR_OFFSET
1204#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1205#undef TARGET_MAX_ANCHOR_OFFSET
1206#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1207#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1208#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1209
f6897b10 1210struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1211\f
0d1fbc8c
AH
1212
1213/* Value is 1 if hard register REGNO can hold a value of machine-mode
1214 MODE. */
1215static int
1216rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1217{
1218 /* The GPRs can hold any mode, but values bigger than one register
1219 cannot go past R31. */
1220 if (INT_REGNO_P (regno))
1221 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1222
a5a97921 1223 /* The float registers can only hold floating modes and DImode.
7393f7f8 1224 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1225 if (FP_REGNO_P (regno))
1226 return
ebb109ad 1227 (SCALAR_FLOAT_MODE_P (mode)
c092b045 1228 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1229 && mode != SDmode
0d1fbc8c
AH
1230 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1231 || (GET_MODE_CLASS (mode) == MODE_INT
1232 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1233
1234 /* The CR register can only hold CC modes. */
1235 if (CR_REGNO_P (regno))
1236 return GET_MODE_CLASS (mode) == MODE_CC;
1237
1238 if (XER_REGNO_P (regno))
1239 return mode == PSImode;
1240
1241 /* AltiVec only in AldyVec registers. */
1242 if (ALTIVEC_REGNO_P (regno))
1243 return ALTIVEC_VECTOR_MODE (mode);
1244
1245 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1246 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1247 return 1;
1248
1249 /* We cannot put TImode anywhere except general register and it must be
1250 able to fit within the register set. */
1251
1252 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1253}
1254
1255/* Initialize rs6000_hard_regno_mode_ok_p table. */
1256static void
1257rs6000_init_hard_regno_mode_ok (void)
1258{
1259 int r, m;
1260
1261 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1262 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1263 if (rs6000_hard_regno_mode_ok (r, m))
1264 rs6000_hard_regno_mode_ok_p[m][r] = true;
1265}
1266
e4cad568
GK
1267#if TARGET_MACHO
1268/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1269
1270static void
1271darwin_rs6000_override_options (void)
1272{
1273 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1274 off. */
1275 rs6000_altivec_abi = 1;
1276 TARGET_ALTIVEC_VRSAVE = 1;
1277 if (DEFAULT_ABI == ABI_DARWIN)
1278 {
1279 if (MACHO_DYNAMIC_NO_PIC_P)
1280 {
1281 if (flag_pic)
1282 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1283 flag_pic = 0;
1284 }
1285 else if (flag_pic == 1)
1286 {
1287 flag_pic = 2;
1288 }
1289 }
1290 if (TARGET_64BIT && ! TARGET_POWERPC64)
1291 {
1292 target_flags |= MASK_POWERPC64;
1293 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1294 }
1295 if (flag_mkernel)
1296 {
1297 rs6000_default_long_calls = 1;
1298 target_flags |= MASK_SOFT_FLOAT;
1299 }
1300
1301 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1302 Altivec. */
1303 if (!flag_mkernel && !flag_apple_kext
1304 && TARGET_64BIT
1305 && ! (target_flags_explicit & MASK_ALTIVEC))
1306 target_flags |= MASK_ALTIVEC;
1307
1308 /* Unless the user (not the configurer) has explicitly overridden
1309 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1310 G4 unless targetting the kernel. */
1311 if (!flag_mkernel
1312 && !flag_apple_kext
1313 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1314 && ! (target_flags_explicit & MASK_ALTIVEC)
1315 && ! rs6000_select[1].string)
1316 {
1317 target_flags |= MASK_ALTIVEC;
1318 }
1319}
1320#endif
1321
c1e55850
GK
1322/* If not otherwise specified by a target, make 'long double' equivalent to
1323 'double'. */
1324
1325#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1326#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1327#endif
1328
5248c961
RK
1329/* Override command line options. Mostly we process the processor
1330 type and sometimes adjust other TARGET_ options. */
1331
1332void
d779d0dc 1333rs6000_override_options (const char *default_cpu)
5248c961 1334{
c4d38ccb 1335 size_t i, j;
8e3f41e7 1336 struct rs6000_cpu_select *ptr;
66188a7e 1337 int set_masks;
5248c961 1338
66188a7e 1339 /* Simplifications for entries below. */
85638c0d 1340
66188a7e
GK
1341 enum {
1342 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1343 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1344 };
85638c0d 1345
66188a7e
GK
1346 /* This table occasionally claims that a processor does not support
1347 a particular feature even though it does, but the feature is slower
1348 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1349 complete description of the processor's support.
66188a7e
GK
1350
1351 Please keep this list in order, and don't forget to update the
1352 documentation in invoke.texi when adding a new processor or
1353 flag. */
5248c961
RK
1354 static struct ptt
1355 {
8b60264b
KG
1356 const char *const name; /* Canonical processor name. */
1357 const enum processor_type processor; /* Processor type enum value. */
1358 const int target_enable; /* Target flags to enable. */
8b60264b 1359 } const processor_target_table[]
66188a7e 1360 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1361 {"403", PROCESSOR_PPC403,
66188a7e 1362 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1363 {"405", PROCESSOR_PPC405,
716019c0
JM
1364 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1365 {"405fp", PROCESSOR_PPC405,
1366 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1367 {"440", PROCESSOR_PPC440,
716019c0
JM
1368 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1369 {"440fp", PROCESSOR_PPC440,
1370 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1371 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1372 {"601", PROCESSOR_PPC601,
66188a7e
GK
1373 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1374 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1375 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1376 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1377 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1378 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1379 {"620", PROCESSOR_PPC620,
1380 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1381 {"630", PROCESSOR_PPC630,
1382 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1383 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1384 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1385 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1386 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1387 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1388 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1389 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1390 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1391 /* 8548 has a dummy entry for now. */
a45bce6e 1392 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1393 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1394 {"970", PROCESSOR_POWER4,
66188a7e 1395 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1396 {"cell", PROCESSOR_CELL,
1397 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1398 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1399 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1400 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1401 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1402 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1403 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1404 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1405 {"power2", PROCESSOR_POWER,
1406 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1407 {"power3", PROCESSOR_PPC630,
1408 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1409 {"power4", PROCESSOR_POWER4,
fc091c8e 1410 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1411 {"power5", PROCESSOR_POWER5,
432218ba
DE
1412 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1413 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1414 {"power5+", PROCESSOR_POWER5,
1415 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1416 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1417 {"power6", PROCESSOR_POWER6,
e118597e 1418 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1419 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1420 {"power6x", PROCESSOR_POWER6,
1421 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1422 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1423 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1424 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1425 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1426 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1427 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1428 {"rios2", PROCESSOR_RIOS2,
1429 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1430 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1431 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1432 {"rs64", PROCESSOR_RS64A,
1433 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1434 };
5248c961 1435
ca7558fc 1436 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1437
66188a7e
GK
1438 /* Some OSs don't support saving the high part of 64-bit registers on
1439 context switch. Other OSs don't support saving Altivec registers.
1440 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1441 settings; if the user wants either, the user must explicitly specify
1442 them and we won't interfere with the user's specification. */
1443
1444 enum {
1445 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1446 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1447 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1448 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1449 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1450 };
0d1fbc8c
AH
1451
1452 rs6000_init_hard_regno_mode_ok ();
1453
c4ad648e 1454 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1455#ifdef OS_MISSING_POWERPC64
1456 if (OS_MISSING_POWERPC64)
1457 set_masks &= ~MASK_POWERPC64;
1458#endif
1459#ifdef OS_MISSING_ALTIVEC
1460 if (OS_MISSING_ALTIVEC)
1461 set_masks &= ~MASK_ALTIVEC;
1462#endif
1463
768875a8
AM
1464 /* Don't override by the processor default if given explicitly. */
1465 set_masks &= ~target_flags_explicit;
957211c3 1466
a4f6c312 1467 /* Identify the processor type. */
8e3f41e7 1468 rs6000_select[0].string = default_cpu;
3cb999d8 1469 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1470
b6a1cbae 1471 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1472 {
8e3f41e7
MM
1473 ptr = &rs6000_select[i];
1474 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1475 {
8e3f41e7
MM
1476 for (j = 0; j < ptt_size; j++)
1477 if (! strcmp (ptr->string, processor_target_table[j].name))
1478 {
1479 if (ptr->set_tune_p)
1480 rs6000_cpu = processor_target_table[j].processor;
1481
1482 if (ptr->set_arch_p)
1483 {
66188a7e
GK
1484 target_flags &= ~set_masks;
1485 target_flags |= (processor_target_table[j].target_enable
1486 & set_masks);
8e3f41e7
MM
1487 }
1488 break;
1489 }
1490
4406229e 1491 if (j == ptt_size)
8e3f41e7 1492 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1493 }
1494 }
8a61d227 1495
993f19a8 1496 if (TARGET_E500)
a3170dc6
AH
1497 rs6000_isel = 1;
1498
dff9f1b6
DE
1499 /* If we are optimizing big endian systems for space, use the load/store
1500 multiple and string instructions. */
ef792183 1501 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1502 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1503
a4f6c312
SS
1504 /* Don't allow -mmultiple or -mstring on little endian systems
1505 unless the cpu is a 750, because the hardware doesn't support the
1506 instructions used in little endian mode, and causes an alignment
1507 trap. The 750 does not cause an alignment trap (except when the
1508 target is unaligned). */
bef84347 1509
b21fb038 1510 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1511 {
1512 if (TARGET_MULTIPLE)
1513 {
1514 target_flags &= ~MASK_MULTIPLE;
b21fb038 1515 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1516 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1517 }
1518
1519 if (TARGET_STRING)
1520 {
1521 target_flags &= ~MASK_STRING;
b21fb038 1522 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1523 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1524 }
1525 }
3933e0e1 1526
38c1f2d7
MM
1527 /* Set debug flags */
1528 if (rs6000_debug_name)
1529 {
bfc79d3b 1530 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1531 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1532 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1533 rs6000_debug_stack = 1;
bfc79d3b 1534 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1535 rs6000_debug_arg = 1;
1536 else
c725bd79 1537 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1538 }
1539
57ac7be9
AM
1540 if (rs6000_traceback_name)
1541 {
1542 if (! strncmp (rs6000_traceback_name, "full", 4))
1543 rs6000_traceback = traceback_full;
1544 else if (! strncmp (rs6000_traceback_name, "part", 4))
1545 rs6000_traceback = traceback_part;
1546 else if (! strncmp (rs6000_traceback_name, "no", 2))
1547 rs6000_traceback = traceback_none;
1548 else
9e637a26 1549 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1550 rs6000_traceback_name);
1551 }
1552
78f5898b
AH
1553 if (!rs6000_explicit_options.long_double)
1554 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1555
602ea4d3 1556#ifndef POWERPC_LINUX
d3603e8c 1557 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1558 rs6000_ieeequad = 1;
1559#endif
1560
6d0ef01e
HP
1561 /* Set Altivec ABI as default for powerpc64 linux. */
1562 if (TARGET_ELF && TARGET_64BIT)
1563 {
1564 rs6000_altivec_abi = 1;
78f5898b 1565 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1566 }
1567
594a51fe
SS
1568 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1569 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1570 {
1571 rs6000_darwin64_abi = 1;
9c7956fd 1572#if TARGET_MACHO
6ac49599 1573 darwin_one_byte_bool = 1;
9c7956fd 1574#endif
d9168963
SS
1575 /* Default to natural alignment, for better performance. */
1576 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1577 }
1578
194c524a
DE
1579 /* Place FP constants in the constant pool instead of TOC
1580 if section anchors enabled. */
1581 if (flag_section_anchors)
1582 TARGET_NO_FP_IN_TOC = 1;
1583
c4501e62
JJ
1584 /* Handle -mtls-size option. */
1585 rs6000_parse_tls_size_option ();
1586
a7ae18e2
AH
1587#ifdef SUBTARGET_OVERRIDE_OPTIONS
1588 SUBTARGET_OVERRIDE_OPTIONS;
1589#endif
1590#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1591 SUBSUBTARGET_OVERRIDE_OPTIONS;
1592#endif
4d4cbc0e
AH
1593#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1594 SUB3TARGET_OVERRIDE_OPTIONS;
1595#endif
a7ae18e2 1596
5da702b1
AH
1597 if (TARGET_E500)
1598 {
1599 /* The e500 does not have string instructions, and we set
1600 MASK_STRING above when optimizing for size. */
1601 if ((target_flags & MASK_STRING) != 0)
1602 target_flags = target_flags & ~MASK_STRING;
1603 }
1604 else if (rs6000_select[1].string != NULL)
1605 {
1606 /* For the powerpc-eabispe configuration, we set all these by
1607 default, so let's unset them if we manually set another
1608 CPU that is not the E500. */
78f5898b 1609 if (!rs6000_explicit_options.abi)
5da702b1 1610 rs6000_spe_abi = 0;
78f5898b 1611 if (!rs6000_explicit_options.spe)
5da702b1 1612 rs6000_spe = 0;
78f5898b 1613 if (!rs6000_explicit_options.float_gprs)
5da702b1 1614 rs6000_float_gprs = 0;
78f5898b 1615 if (!rs6000_explicit_options.isel)
5da702b1
AH
1616 rs6000_isel = 0;
1617 }
b5044283 1618
eca0d5e8
JM
1619 /* Detect invalid option combinations with E500. */
1620 CHECK_E500_OPTIONS;
1621
ec507f2d 1622 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1623 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1624 && rs6000_cpu != PROCESSOR_POWER6
1625 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1626 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1627 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1628 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1629 || rs6000_cpu == PROCESSOR_POWER5
1630 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1631
ec507f2d
DE
1632 rs6000_sched_restricted_insns_priority
1633 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1634
569fa502 1635 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1636 rs6000_sched_costly_dep
1637 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1638
569fa502
DN
1639 if (rs6000_sched_costly_dep_str)
1640 {
f676971a 1641 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1642 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1643 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1644 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1645 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1646 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1647 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1648 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1649 else
c4ad648e 1650 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1651 }
1652
1653 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1654 rs6000_sched_insert_nops
1655 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1656
cbe26ab8
DN
1657 if (rs6000_sched_insert_nops_str)
1658 {
1659 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1660 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1661 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1662 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1663 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1664 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1665 else
c4ad648e 1666 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1667 }
1668
c81bebd7 1669#ifdef TARGET_REGNAMES
a4f6c312
SS
1670 /* If the user desires alternate register names, copy in the
1671 alternate names now. */
c81bebd7 1672 if (TARGET_REGNAMES)
4e135bdd 1673 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1674#endif
1675
df01da37 1676 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1677 If -maix-struct-return or -msvr4-struct-return was explicitly
1678 used, don't override with the ABI default. */
df01da37
DE
1679 if (!rs6000_explicit_options.aix_struct_ret)
1680 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1681
602ea4d3 1682 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1683 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1684
f676971a 1685 if (TARGET_TOC)
9ebbca7d 1686 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1687
301d03af
RS
1688 /* We can only guarantee the availability of DI pseudo-ops when
1689 assembling for 64-bit targets. */
ae6c1efd 1690 if (!TARGET_64BIT)
301d03af
RS
1691 {
1692 targetm.asm_out.aligned_op.di = NULL;
1693 targetm.asm_out.unaligned_op.di = NULL;
1694 }
1695
1494c534
DE
1696 /* Set branch target alignment, if not optimizing for size. */
1697 if (!optimize_size)
1698 {
d296e02e
AP
1699 /* Cell wants to be aligned 8byte for dual issue. */
1700 if (rs6000_cpu == PROCESSOR_CELL)
1701 {
1702 if (align_functions <= 0)
1703 align_functions = 8;
1704 if (align_jumps <= 0)
1705 align_jumps = 8;
1706 if (align_loops <= 0)
1707 align_loops = 8;
1708 }
44cd321e 1709 if (rs6000_align_branch_targets)
1494c534
DE
1710 {
1711 if (align_functions <= 0)
1712 align_functions = 16;
1713 if (align_jumps <= 0)
1714 align_jumps = 16;
1715 if (align_loops <= 0)
1716 align_loops = 16;
1717 }
1718 if (align_jumps_max_skip <= 0)
1719 align_jumps_max_skip = 15;
1720 if (align_loops_max_skip <= 0)
1721 align_loops_max_skip = 15;
1722 }
2792d578 1723
71f123ca
FS
1724 /* Arrange to save and restore machine status around nested functions. */
1725 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1726
1727 /* We should always be splitting complex arguments, but we can't break
1728 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1729 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1730 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1731
1732 /* Initialize rs6000_cost with the appropriate target costs. */
1733 if (optimize_size)
1734 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1735 else
1736 switch (rs6000_cpu)
1737 {
1738 case PROCESSOR_RIOS1:
1739 rs6000_cost = &rios1_cost;
1740 break;
1741
1742 case PROCESSOR_RIOS2:
1743 rs6000_cost = &rios2_cost;
1744 break;
1745
1746 case PROCESSOR_RS64A:
1747 rs6000_cost = &rs64a_cost;
1748 break;
1749
1750 case PROCESSOR_MPCCORE:
1751 rs6000_cost = &mpccore_cost;
1752 break;
1753
1754 case PROCESSOR_PPC403:
1755 rs6000_cost = &ppc403_cost;
1756 break;
1757
1758 case PROCESSOR_PPC405:
1759 rs6000_cost = &ppc405_cost;
1760 break;
1761
1762 case PROCESSOR_PPC440:
1763 rs6000_cost = &ppc440_cost;
1764 break;
1765
1766 case PROCESSOR_PPC601:
1767 rs6000_cost = &ppc601_cost;
1768 break;
1769
1770 case PROCESSOR_PPC603:
1771 rs6000_cost = &ppc603_cost;
1772 break;
1773
1774 case PROCESSOR_PPC604:
1775 rs6000_cost = &ppc604_cost;
1776 break;
1777
1778 case PROCESSOR_PPC604e:
1779 rs6000_cost = &ppc604e_cost;
1780 break;
1781
1782 case PROCESSOR_PPC620:
8b897cfa
RS
1783 rs6000_cost = &ppc620_cost;
1784 break;
1785
f0517163
RS
1786 case PROCESSOR_PPC630:
1787 rs6000_cost = &ppc630_cost;
1788 break;
1789
982afe02 1790 case PROCESSOR_CELL:
d296e02e
AP
1791 rs6000_cost = &ppccell_cost;
1792 break;
1793
8b897cfa
RS
1794 case PROCESSOR_PPC750:
1795 case PROCESSOR_PPC7400:
1796 rs6000_cost = &ppc750_cost;
1797 break;
1798
1799 case PROCESSOR_PPC7450:
1800 rs6000_cost = &ppc7450_cost;
1801 break;
1802
1803 case PROCESSOR_PPC8540:
1804 rs6000_cost = &ppc8540_cost;
1805 break;
1806
1807 case PROCESSOR_POWER4:
1808 case PROCESSOR_POWER5:
1809 rs6000_cost = &power4_cost;
1810 break;
1811
44cd321e
PS
1812 case PROCESSOR_POWER6:
1813 rs6000_cost = &power6_cost;
1814 break;
1815
8b897cfa 1816 default:
37409796 1817 gcc_unreachable ();
8b897cfa 1818 }
0b11da67
DE
1819
1820 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1821 set_param_value ("simultaneous-prefetches",
1822 rs6000_cost->simultaneous_prefetches);
1823 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1824 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1825 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1826 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1827 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1828 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
5248c961 1829}
5accd822 1830
7ccf35ed
DN
1831/* Implement targetm.vectorize.builtin_mask_for_load. */
1832static tree
1833rs6000_builtin_mask_for_load (void)
1834{
1835 if (TARGET_ALTIVEC)
1836 return altivec_builtin_mask_for_load;
1837 else
1838 return 0;
1839}
1840
f57d17f1
TM
1841/* Implement targetm.vectorize.builtin_conversion. */
1842static tree
1843rs6000_builtin_conversion (enum tree_code code, tree type)
1844{
1845 if (!TARGET_ALTIVEC)
1846 return NULL_TREE;
982afe02 1847
f57d17f1
TM
1848 switch (code)
1849 {
1850 case FLOAT_EXPR:
1851 switch (TYPE_MODE (type))
1852 {
1853 case V4SImode:
982afe02 1854 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1855 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1856 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1857 default:
1858 return NULL_TREE;
1859 }
1860 default:
1861 return NULL_TREE;
1862 }
1863}
1864
89d67cca
DN
1865/* Implement targetm.vectorize.builtin_mul_widen_even. */
1866static tree
1867rs6000_builtin_mul_widen_even (tree type)
1868{
1869 if (!TARGET_ALTIVEC)
1870 return NULL_TREE;
1871
1872 switch (TYPE_MODE (type))
1873 {
1874 case V8HImode:
982afe02 1875 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1876 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1877 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1878
1879 case V16QImode:
1880 return TYPE_UNSIGNED (type) ?
1881 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1882 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1883 default:
1884 return NULL_TREE;
1885 }
1886}
1887
1888/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1889static tree
1890rs6000_builtin_mul_widen_odd (tree type)
1891{
1892 if (!TARGET_ALTIVEC)
1893 return NULL_TREE;
1894
1895 switch (TYPE_MODE (type))
1896 {
1897 case V8HImode:
1898 return TYPE_UNSIGNED (type) ?
1899 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1900 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1901
1902 case V16QImode:
1903 return TYPE_UNSIGNED (type) ?
1904 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1905 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1906 default:
1907 return NULL_TREE;
1908 }
1909}
1910
5b900a4c
DN
1911
1912/* Return true iff, data reference of TYPE can reach vector alignment (16)
1913 after applying N number of iterations. This routine does not determine
1914 how may iterations are required to reach desired alignment. */
1915
1916static bool
1917rs6000_vector_alignment_reachable (tree type ATTRIBUTE_UNUSED, bool is_packed)
1918{
1919 if (is_packed)
1920 return false;
1921
1922 if (TARGET_32BIT)
1923 {
1924 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1925 return true;
1926
1927 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1928 return true;
1929
1930 return false;
1931 }
1932 else
1933 {
1934 if (TARGET_MACHO)
1935 return false;
1936
1937 /* Assuming that all other types are naturally aligned. CHECKME! */
1938 return true;
1939 }
1940}
1941
5da702b1
AH
1942/* Handle generic options of the form -mfoo=yes/no.
1943 NAME is the option name.
1944 VALUE is the option value.
1945 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1946 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1947static void
5da702b1 1948rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1949{
5da702b1 1950 if (value == 0)
993f19a8 1951 return;
5da702b1
AH
1952 else if (!strcmp (value, "yes"))
1953 *flag = 1;
1954 else if (!strcmp (value, "no"))
1955 *flag = 0;
08b57fb3 1956 else
5da702b1 1957 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1958}
1959
c4501e62
JJ
1960/* Validate and record the size specified with the -mtls-size option. */
1961
1962static void
863d938c 1963rs6000_parse_tls_size_option (void)
c4501e62
JJ
1964{
1965 if (rs6000_tls_size_string == 0)
1966 return;
1967 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1968 rs6000_tls_size = 16;
1969 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1970 rs6000_tls_size = 32;
1971 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1972 rs6000_tls_size = 64;
1973 else
9e637a26 1974 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1975}
1976
5accd822 1977void
a2369ed3 1978optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1979{
2e3f0db6
DJ
1980 if (DEFAULT_ABI == ABI_DARWIN)
1981 /* The Darwin libraries never set errno, so we might as well
1982 avoid calling them when that's the only reason we would. */
1983 flag_errno_math = 0;
59d6560b
DE
1984
1985 /* Double growth factor to counter reduced min jump length. */
1986 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
1987
1988 /* Enable section anchors by default.
1989 Skip section anchors for Objective C and Objective C++
1990 until front-ends fixed. */
23f99493 1991 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 1992 flag_section_anchors = 1;
5accd822 1993}
78f5898b
AH
1994
1995/* Implement TARGET_HANDLE_OPTION. */
1996
1997static bool
1998rs6000_handle_option (size_t code, const char *arg, int value)
1999{
2000 switch (code)
2001 {
2002 case OPT_mno_power:
2003 target_flags &= ~(MASK_POWER | MASK_POWER2
2004 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2005 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2006 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2007 break;
2008 case OPT_mno_powerpc:
2009 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2010 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2011 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2012 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2013 break;
2014 case OPT_mfull_toc:
d2894ab5
DE
2015 target_flags &= ~MASK_MINIMAL_TOC;
2016 TARGET_NO_FP_IN_TOC = 0;
2017 TARGET_NO_SUM_IN_TOC = 0;
2018 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2019#ifdef TARGET_USES_SYSV4_OPT
2020 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2021 just the same as -mminimal-toc. */
2022 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2023 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2024#endif
2025 break;
2026
2027#ifdef TARGET_USES_SYSV4_OPT
2028 case OPT_mtoc:
2029 /* Make -mtoc behave like -mminimal-toc. */
2030 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2031 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2032 break;
2033#endif
2034
2035#ifdef TARGET_USES_AIX64_OPT
2036 case OPT_maix64:
2037#else
2038 case OPT_m64:
2039#endif
2c9c9afd
AM
2040 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2041 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2042 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2043 break;
2044
2045#ifdef TARGET_USES_AIX64_OPT
2046 case OPT_maix32:
2047#else
2048 case OPT_m32:
2049#endif
2050 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2051 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2052 break;
2053
2054 case OPT_minsert_sched_nops_:
2055 rs6000_sched_insert_nops_str = arg;
2056 break;
2057
2058 case OPT_mminimal_toc:
2059 if (value == 1)
2060 {
d2894ab5
DE
2061 TARGET_NO_FP_IN_TOC = 0;
2062 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2063 }
2064 break;
2065
2066 case OPT_mpower:
2067 if (value == 1)
c2dba4ab
AH
2068 {
2069 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2070 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2071 }
78f5898b
AH
2072 break;
2073
2074 case OPT_mpower2:
2075 if (value == 1)
c2dba4ab
AH
2076 {
2077 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2078 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2079 }
78f5898b
AH
2080 break;
2081
2082 case OPT_mpowerpc_gpopt:
2083 case OPT_mpowerpc_gfxopt:
2084 if (value == 1)
c2dba4ab
AH
2085 {
2086 target_flags |= MASK_POWERPC;
2087 target_flags_explicit |= MASK_POWERPC;
2088 }
78f5898b
AH
2089 break;
2090
df01da37
DE
2091 case OPT_maix_struct_return:
2092 case OPT_msvr4_struct_return:
2093 rs6000_explicit_options.aix_struct_ret = true;
2094 break;
2095
78f5898b
AH
2096 case OPT_mvrsave_:
2097 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2098 break;
78f5898b
AH
2099
2100 case OPT_misel_:
2101 rs6000_explicit_options.isel = true;
2102 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2103 break;
2104
2105 case OPT_mspe_:
2106 rs6000_explicit_options.spe = true;
2107 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2108 break;
2109
2110 case OPT_mdebug_:
2111 rs6000_debug_name = arg;
2112 break;
2113
2114#ifdef TARGET_USES_SYSV4_OPT
2115 case OPT_mcall_:
2116 rs6000_abi_name = arg;
2117 break;
2118
2119 case OPT_msdata_:
2120 rs6000_sdata_name = arg;
2121 break;
2122
2123 case OPT_mtls_size_:
2124 rs6000_tls_size_string = arg;
2125 break;
2126
2127 case OPT_mrelocatable:
2128 if (value == 1)
c2dba4ab 2129 {
e0bf274f
AM
2130 target_flags |= MASK_MINIMAL_TOC;
2131 target_flags_explicit |= MASK_MINIMAL_TOC;
2132 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2133 }
78f5898b
AH
2134 break;
2135
2136 case OPT_mrelocatable_lib:
2137 if (value == 1)
c2dba4ab 2138 {
e0bf274f
AM
2139 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2140 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2141 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2142 }
78f5898b 2143 else
c2dba4ab
AH
2144 {
2145 target_flags &= ~MASK_RELOCATABLE;
2146 target_flags_explicit |= MASK_RELOCATABLE;
2147 }
78f5898b
AH
2148 break;
2149#endif
2150
2151 case OPT_mabi_:
78f5898b
AH
2152 if (!strcmp (arg, "altivec"))
2153 {
d3603e8c 2154 rs6000_explicit_options.abi = true;
78f5898b
AH
2155 rs6000_altivec_abi = 1;
2156 rs6000_spe_abi = 0;
2157 }
2158 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2159 {
2160 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2161 the default for rs6000_spe_abi to be chosen later. */
2162 rs6000_altivec_abi = 0;
2163 }
78f5898b
AH
2164 else if (! strcmp (arg, "spe"))
2165 {
d3603e8c 2166 rs6000_explicit_options.abi = true;
78f5898b
AH
2167 rs6000_spe_abi = 1;
2168 rs6000_altivec_abi = 0;
2169 if (!TARGET_SPE_ABI)
2170 error ("not configured for ABI: '%s'", arg);
2171 }
2172 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2173 {
2174 rs6000_explicit_options.abi = true;
2175 rs6000_spe_abi = 0;
2176 }
78f5898b
AH
2177
2178 /* These are here for testing during development only, do not
2179 document in the manual please. */
2180 else if (! strcmp (arg, "d64"))
2181 {
2182 rs6000_darwin64_abi = 1;
2183 warning (0, "Using darwin64 ABI");
2184 }
2185 else if (! strcmp (arg, "d32"))
2186 {
2187 rs6000_darwin64_abi = 0;
2188 warning (0, "Using old darwin ABI");
2189 }
2190
602ea4d3
JJ
2191 else if (! strcmp (arg, "ibmlongdouble"))
2192 {
d3603e8c 2193 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2194 rs6000_ieeequad = 0;
2195 warning (0, "Using IBM extended precision long double");
2196 }
2197 else if (! strcmp (arg, "ieeelongdouble"))
2198 {
d3603e8c 2199 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2200 rs6000_ieeequad = 1;
2201 warning (0, "Using IEEE extended precision long double");
2202 }
2203
78f5898b
AH
2204 else
2205 {
2206 error ("unknown ABI specified: '%s'", arg);
2207 return false;
2208 }
2209 break;
2210
2211 case OPT_mcpu_:
2212 rs6000_select[1].string = arg;
2213 break;
2214
2215 case OPT_mtune_:
2216 rs6000_select[2].string = arg;
2217 break;
2218
2219 case OPT_mtraceback_:
2220 rs6000_traceback_name = arg;
2221 break;
2222
2223 case OPT_mfloat_gprs_:
2224 rs6000_explicit_options.float_gprs = true;
2225 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2226 rs6000_float_gprs = 1;
2227 else if (! strcmp (arg, "double"))
2228 rs6000_float_gprs = 2;
2229 else if (! strcmp (arg, "no"))
2230 rs6000_float_gprs = 0;
2231 else
2232 {
2233 error ("invalid option for -mfloat-gprs: '%s'", arg);
2234 return false;
2235 }
2236 break;
2237
2238 case OPT_mlong_double_:
2239 rs6000_explicit_options.long_double = true;
2240 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2241 if (value != 64 && value != 128)
2242 {
2243 error ("Unknown switch -mlong-double-%s", arg);
2244 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2245 return false;
2246 }
2247 else
2248 rs6000_long_double_type_size = value;
2249 break;
2250
2251 case OPT_msched_costly_dep_:
2252 rs6000_sched_costly_dep_str = arg;
2253 break;
2254
2255 case OPT_malign_:
2256 rs6000_explicit_options.alignment = true;
2257 if (! strcmp (arg, "power"))
2258 {
2259 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2260 some C library functions, so warn about it. The flag may be
2261 useful for performance studies from time to time though, so
2262 don't disable it entirely. */
2263 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2264 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2265 " it is incompatible with the installed C and C++ libraries");
2266 rs6000_alignment_flags = MASK_ALIGN_POWER;
2267 }
2268 else if (! strcmp (arg, "natural"))
2269 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2270 else
2271 {
2272 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2273 return false;
2274 }
2275 break;
2276 }
2277 return true;
2278}
3cfa4909
MM
2279\f
2280/* Do anything needed at the start of the asm file. */
2281
1bc7c5b6 2282static void
863d938c 2283rs6000_file_start (void)
3cfa4909 2284{
c4d38ccb 2285 size_t i;
3cfa4909 2286 char buffer[80];
d330fd93 2287 const char *start = buffer;
3cfa4909 2288 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2289 const char *default_cpu = TARGET_CPU_DEFAULT;
2290 FILE *file = asm_out_file;
2291
2292 default_file_start ();
2293
2294#ifdef TARGET_BI_ARCH
2295 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2296 default_cpu = 0;
2297#endif
3cfa4909
MM
2298
2299 if (flag_verbose_asm)
2300 {
2301 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2302 rs6000_select[0].string = default_cpu;
2303
b6a1cbae 2304 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2305 {
2306 ptr = &rs6000_select[i];
2307 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2308 {
2309 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2310 start = "";
2311 }
2312 }
2313
9c6b4ed9 2314 if (PPC405_ERRATUM77)
b0bfee6e 2315 {
9c6b4ed9 2316 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2317 start = "";
2318 }
b0bfee6e 2319
b91da81f 2320#ifdef USING_ELFOS_H
3cfa4909
MM
2321 switch (rs6000_sdata)
2322 {
2323 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2324 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2325 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2326 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2327 }
2328
2329 if (rs6000_sdata && g_switch_value)
2330 {
307b599c
MK
2331 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2332 g_switch_value);
3cfa4909
MM
2333 start = "";
2334 }
2335#endif
2336
2337 if (*start == '\0')
949ea356 2338 putc ('\n', file);
3cfa4909 2339 }
b723e82f 2340
e51917ae
JM
2341#ifdef HAVE_AS_GNU_ATTRIBUTE
2342 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2343 {
2344 fprintf (file, "\t.gnu_attribute 4, %d\n",
2345 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2346 fprintf (file, "\t.gnu_attribute 8, %d\n",
2347 (TARGET_ALTIVEC_ABI ? 2
2348 : TARGET_SPE_ABI ? 3
2349 : 1));
2350 }
e51917ae
JM
2351#endif
2352
b723e82f
JJ
2353 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2354 {
d6b5193b
RS
2355 switch_to_section (toc_section);
2356 switch_to_section (text_section);
b723e82f 2357 }
3cfa4909 2358}
c4e18b1c 2359
5248c961 2360\f
a0ab749a 2361/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2362
2363int
863d938c 2364direct_return (void)
9878760c 2365{
4697a36c
MM
2366 if (reload_completed)
2367 {
2368 rs6000_stack_t *info = rs6000_stack_info ();
2369
2370 if (info->first_gp_reg_save == 32
2371 && info->first_fp_reg_save == 64
00b960c7 2372 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2373 && ! info->lr_save_p
2374 && ! info->cr_save_p
00b960c7 2375 && info->vrsave_mask == 0
c81fc13e 2376 && ! info->push_p)
4697a36c
MM
2377 return 1;
2378 }
2379
2380 return 0;
9878760c
RK
2381}
2382
4e74d8ec
MM
2383/* Return the number of instructions it takes to form a constant in an
2384 integer register. */
2385
48d72335 2386int
a2369ed3 2387num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2388{
2389 /* signed constant loadable with {cal|addi} */
547b216d 2390 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2391 return 1;
2392
4e74d8ec 2393 /* constant loadable with {cau|addis} */
547b216d
DE
2394 else if ((value & 0xffff) == 0
2395 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2396 return 1;
2397
5f59ecb7 2398#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2399 else if (TARGET_POWERPC64)
4e74d8ec 2400 {
a65c591c
DE
2401 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2402 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2403
a65c591c 2404 if (high == 0 || high == -1)
4e74d8ec
MM
2405 return 2;
2406
a65c591c 2407 high >>= 1;
4e74d8ec 2408
a65c591c 2409 if (low == 0)
4e74d8ec 2410 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2411 else
2412 return (num_insns_constant_wide (high)
e396202a 2413 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2414 }
2415#endif
2416
2417 else
2418 return 2;
2419}
2420
2421int
a2369ed3 2422num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2423{
37409796 2424 HOST_WIDE_INT low, high;
bb8df8a6 2425
37409796 2426 switch (GET_CODE (op))
0d30d435 2427 {
37409796 2428 case CONST_INT:
0d30d435 2429#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2430 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2431 && mask64_operand (op, mode))
c4ad648e 2432 return 2;
0d30d435
DE
2433 else
2434#endif
2435 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2436
37409796
NS
2437 case CONST_DOUBLE:
2438 if (mode == SFmode)
2439 {
2440 long l;
2441 REAL_VALUE_TYPE rv;
bb8df8a6 2442
37409796
NS
2443 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2444 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2445 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2446 }
a260abc9 2447
37409796
NS
2448 if (mode == VOIDmode || mode == DImode)
2449 {
2450 high = CONST_DOUBLE_HIGH (op);
2451 low = CONST_DOUBLE_LOW (op);
2452 }
2453 else
2454 {
2455 long l[2];
2456 REAL_VALUE_TYPE rv;
bb8df8a6 2457
37409796 2458 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2459 if (DECIMAL_FLOAT_MODE_P (mode))
2460 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2461 else
2462 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2463 high = l[WORDS_BIG_ENDIAN == 0];
2464 low = l[WORDS_BIG_ENDIAN != 0];
2465 }
47ad8c61 2466
37409796
NS
2467 if (TARGET_32BIT)
2468 return (num_insns_constant_wide (low)
2469 + num_insns_constant_wide (high));
2470 else
2471 {
2472 if ((high == 0 && low >= 0)
2473 || (high == -1 && low < 0))
2474 return num_insns_constant_wide (low);
bb8df8a6 2475
1990cd79 2476 else if (mask64_operand (op, mode))
37409796 2477 return 2;
bb8df8a6 2478
37409796
NS
2479 else if (low == 0)
2480 return num_insns_constant_wide (high) + 1;
bb8df8a6 2481
37409796
NS
2482 else
2483 return (num_insns_constant_wide (high)
2484 + num_insns_constant_wide (low) + 1);
2485 }
bb8df8a6 2486
37409796
NS
2487 default:
2488 gcc_unreachable ();
4e74d8ec 2489 }
4e74d8ec
MM
2490}
2491
0972012c
RS
2492/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2493 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2494 corresponding element of the vector, but for V4SFmode and V2SFmode,
2495 the corresponding "float" is interpreted as an SImode integer. */
2496
2497static HOST_WIDE_INT
2498const_vector_elt_as_int (rtx op, unsigned int elt)
2499{
2500 rtx tmp = CONST_VECTOR_ELT (op, elt);
2501 if (GET_MODE (op) == V4SFmode
2502 || GET_MODE (op) == V2SFmode)
2503 tmp = gen_lowpart (SImode, tmp);
2504 return INTVAL (tmp);
2505}
452a7d36 2506
77ccdfed 2507/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2508 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2509 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2510 all items are set to the same value and contain COPIES replicas of the
2511 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2512 operand and the others are set to the value of the operand's msb. */
2513
2514static bool
2515vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2516{
66180ff3
PB
2517 enum machine_mode mode = GET_MODE (op);
2518 enum machine_mode inner = GET_MODE_INNER (mode);
2519
2520 unsigned i;
2521 unsigned nunits = GET_MODE_NUNITS (mode);
2522 unsigned bitsize = GET_MODE_BITSIZE (inner);
2523 unsigned mask = GET_MODE_MASK (inner);
2524
0972012c 2525 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2526 HOST_WIDE_INT splat_val = val;
2527 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2528
2529 /* Construct the value to be splatted, if possible. If not, return 0. */
2530 for (i = 2; i <= copies; i *= 2)
452a7d36 2531 {
66180ff3
PB
2532 HOST_WIDE_INT small_val;
2533 bitsize /= 2;
2534 small_val = splat_val >> bitsize;
2535 mask >>= bitsize;
2536 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2537 return false;
2538 splat_val = small_val;
2539 }
c4ad648e 2540
66180ff3
PB
2541 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2542 if (EASY_VECTOR_15 (splat_val))
2543 ;
2544
2545 /* Also check if we can splat, and then add the result to itself. Do so if
2546 the value is positive, of if the splat instruction is using OP's mode;
2547 for splat_val < 0, the splat and the add should use the same mode. */
2548 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2549 && (splat_val >= 0 || (step == 1 && copies == 1)))
2550 ;
2551
2552 else
2553 return false;
2554
2555 /* Check if VAL is present in every STEP-th element, and the
2556 other elements are filled with its most significant bit. */
2557 for (i = 0; i < nunits - 1; ++i)
2558 {
2559 HOST_WIDE_INT desired_val;
2560 if (((i + 1) & (step - 1)) == 0)
2561 desired_val = val;
2562 else
2563 desired_val = msb_val;
2564
0972012c 2565 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2566 return false;
452a7d36 2567 }
66180ff3
PB
2568
2569 return true;
452a7d36
HP
2570}
2571
69ef87e2 2572
77ccdfed 2573/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2574 with a vspltisb, vspltish or vspltisw. */
2575
2576bool
2577easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2578{
66180ff3 2579 unsigned step, copies;
d744e06e 2580
66180ff3
PB
2581 if (mode == VOIDmode)
2582 mode = GET_MODE (op);
2583 else if (mode != GET_MODE (op))
2584 return false;
d744e06e 2585
66180ff3
PB
2586 /* Start with a vspltisw. */
2587 step = GET_MODE_NUNITS (mode) / 4;
2588 copies = 1;
2589
2590 if (vspltis_constant (op, step, copies))
2591 return true;
2592
2593 /* Then try with a vspltish. */
2594 if (step == 1)
2595 copies <<= 1;
2596 else
2597 step >>= 1;
2598
2599 if (vspltis_constant (op, step, copies))
2600 return true;
2601
2602 /* And finally a vspltisb. */
2603 if (step == 1)
2604 copies <<= 1;
2605 else
2606 step >>= 1;
2607
2608 if (vspltis_constant (op, step, copies))
2609 return true;
2610
2611 return false;
d744e06e
AH
2612}
2613
66180ff3
PB
2614/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2615 result is OP. Abort if it is not possible. */
d744e06e 2616
f676971a 2617rtx
66180ff3 2618gen_easy_altivec_constant (rtx op)
452a7d36 2619{
66180ff3
PB
2620 enum machine_mode mode = GET_MODE (op);
2621 int nunits = GET_MODE_NUNITS (mode);
2622 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2623 unsigned step = nunits / 4;
2624 unsigned copies = 1;
2625
2626 /* Start with a vspltisw. */
2627 if (vspltis_constant (op, step, copies))
2628 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2629
2630 /* Then try with a vspltish. */
2631 if (step == 1)
2632 copies <<= 1;
2633 else
2634 step >>= 1;
2635
2636 if (vspltis_constant (op, step, copies))
2637 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2638
2639 /* And finally a vspltisb. */
2640 if (step == 1)
2641 copies <<= 1;
2642 else
2643 step >>= 1;
2644
2645 if (vspltis_constant (op, step, copies))
2646 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2647
2648 gcc_unreachable ();
d744e06e
AH
2649}
2650
2651const char *
a2369ed3 2652output_vec_const_move (rtx *operands)
d744e06e
AH
2653{
2654 int cst, cst2;
2655 enum machine_mode mode;
2656 rtx dest, vec;
2657
2658 dest = operands[0];
2659 vec = operands[1];
d744e06e 2660 mode = GET_MODE (dest);
69ef87e2 2661
d744e06e
AH
2662 if (TARGET_ALTIVEC)
2663 {
66180ff3 2664 rtx splat_vec;
d744e06e
AH
2665 if (zero_constant (vec, mode))
2666 return "vxor %0,%0,%0";
37409796 2667
66180ff3
PB
2668 splat_vec = gen_easy_altivec_constant (vec);
2669 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2670 operands[1] = XEXP (splat_vec, 0);
2671 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2672 return "#";
bb8df8a6 2673
66180ff3 2674 switch (GET_MODE (splat_vec))
98ef3137 2675 {
37409796 2676 case V4SImode:
66180ff3 2677 return "vspltisw %0,%1";
c4ad648e 2678
37409796 2679 case V8HImode:
66180ff3 2680 return "vspltish %0,%1";
c4ad648e 2681
37409796 2682 case V16QImode:
66180ff3 2683 return "vspltisb %0,%1";
bb8df8a6 2684
37409796
NS
2685 default:
2686 gcc_unreachable ();
98ef3137 2687 }
69ef87e2
AH
2688 }
2689
37409796 2690 gcc_assert (TARGET_SPE);
bb8df8a6 2691
37409796
NS
2692 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2693 pattern of V1DI, V4HI, and V2SF.
2694
2695 FIXME: We should probably return # and add post reload
2696 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2697 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2698 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2699 operands[1] = CONST_VECTOR_ELT (vec, 0);
2700 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2701 if (cst == cst2)
2702 return "li %0,%1\n\tevmergelo %0,%0,%0";
2703 else
2704 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2705}
2706
7a4eca66
DE
2707/* Initialize vector TARGET to VALS. */
2708
2709void
2710rs6000_expand_vector_init (rtx target, rtx vals)
2711{
2712 enum machine_mode mode = GET_MODE (target);
2713 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2714 int n_elts = GET_MODE_NUNITS (mode);
2715 int n_var = 0, one_var = -1;
2716 bool all_same = true, all_const_zero = true;
2717 rtx x, mem;
2718 int i;
2719
2720 for (i = 0; i < n_elts; ++i)
2721 {
2722 x = XVECEXP (vals, 0, i);
2723 if (!CONSTANT_P (x))
2724 ++n_var, one_var = i;
2725 else if (x != CONST0_RTX (inner_mode))
2726 all_const_zero = false;
2727
2728 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2729 all_same = false;
2730 }
2731
2732 if (n_var == 0)
2733 {
2734 if (mode != V4SFmode && all_const_zero)
2735 {
2736 /* Zero register. */
2737 emit_insn (gen_rtx_SET (VOIDmode, target,
2738 gen_rtx_XOR (mode, target, target)));
2739 return;
2740 }
66180ff3 2741 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2742 {
2743 /* Splat immediate. */
66180ff3 2744 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2745 return;
2746 }
2747 else if (all_same)
2748 ; /* Splat vector element. */
2749 else
2750 {
2751 /* Load from constant pool. */
2752 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2753 return;
2754 }
2755 }
2756
2757 /* Store value to stack temp. Load vector element. Splat. */
2758 if (all_same)
2759 {
2760 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2761 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2762 XVECEXP (vals, 0, 0));
2763 x = gen_rtx_UNSPEC (VOIDmode,
2764 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2765 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2766 gen_rtvec (2,
2767 gen_rtx_SET (VOIDmode,
2768 target, mem),
2769 x)));
2770 x = gen_rtx_VEC_SELECT (inner_mode, target,
2771 gen_rtx_PARALLEL (VOIDmode,
2772 gen_rtvec (1, const0_rtx)));
2773 emit_insn (gen_rtx_SET (VOIDmode, target,
2774 gen_rtx_VEC_DUPLICATE (mode, x)));
2775 return;
2776 }
2777
2778 /* One field is non-constant. Load constant then overwrite
2779 varying field. */
2780 if (n_var == 1)
2781 {
2782 rtx copy = copy_rtx (vals);
2783
57b51d4d 2784 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2785 varying element. */
2786 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2787 rs6000_expand_vector_init (target, copy);
2788
2789 /* Insert variable. */
2790 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2791 return;
2792 }
2793
2794 /* Construct the vector in memory one field at a time
2795 and load the whole vector. */
2796 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2797 for (i = 0; i < n_elts; i++)
2798 emit_move_insn (adjust_address_nv (mem, inner_mode,
2799 i * GET_MODE_SIZE (inner_mode)),
2800 XVECEXP (vals, 0, i));
2801 emit_move_insn (target, mem);
2802}
2803
2804/* Set field ELT of TARGET to VAL. */
2805
2806void
2807rs6000_expand_vector_set (rtx target, rtx val, int elt)
2808{
2809 enum machine_mode mode = GET_MODE (target);
2810 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2811 rtx reg = gen_reg_rtx (mode);
2812 rtx mask, mem, x;
2813 int width = GET_MODE_SIZE (inner_mode);
2814 int i;
2815
2816 /* Load single variable value. */
2817 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2818 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2819 x = gen_rtx_UNSPEC (VOIDmode,
2820 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2821 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2822 gen_rtvec (2,
2823 gen_rtx_SET (VOIDmode,
2824 reg, mem),
2825 x)));
2826
2827 /* Linear sequence. */
2828 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2829 for (i = 0; i < 16; ++i)
2830 XVECEXP (mask, 0, i) = GEN_INT (i);
2831
2832 /* Set permute mask to insert element into target. */
2833 for (i = 0; i < width; ++i)
2834 XVECEXP (mask, 0, elt*width + i)
2835 = GEN_INT (i + 0x10);
2836 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2837 x = gen_rtx_UNSPEC (mode,
2838 gen_rtvec (3, target, reg,
2839 force_reg (V16QImode, x)),
2840 UNSPEC_VPERM);
2841 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2842}
2843
2844/* Extract field ELT from VEC into TARGET. */
2845
2846void
2847rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2848{
2849 enum machine_mode mode = GET_MODE (vec);
2850 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2851 rtx mem, x;
2852
2853 /* Allocate mode-sized buffer. */
2854 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2855
2856 /* Add offset to field within buffer matching vector element. */
2857 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2858
2859 /* Store single field into mode-sized buffer. */
2860 x = gen_rtx_UNSPEC (VOIDmode,
2861 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2862 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2863 gen_rtvec (2,
2864 gen_rtx_SET (VOIDmode,
2865 mem, vec),
2866 x)));
2867 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2868}
2869
0ba1b2ff
AM
2870/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2871 implement ANDing by the mask IN. */
2872void
a2369ed3 2873build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2874{
2875#if HOST_BITS_PER_WIDE_INT >= 64
2876 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2877 int shift;
2878
37409796 2879 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2880
2881 c = INTVAL (in);
2882 if (c & 1)
2883 {
2884 /* Assume c initially something like 0x00fff000000fffff. The idea
2885 is to rotate the word so that the middle ^^^^^^ group of zeros
2886 is at the MS end and can be cleared with an rldicl mask. We then
2887 rotate back and clear off the MS ^^ group of zeros with a
2888 second rldicl. */
2889 c = ~c; /* c == 0xff000ffffff00000 */
2890 lsb = c & -c; /* lsb == 0x0000000000100000 */
2891 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2892 c = ~c; /* c == 0x00fff000000fffff */
2893 c &= -lsb; /* c == 0x00fff00000000000 */
2894 lsb = c & -c; /* lsb == 0x0000100000000000 */
2895 c = ~c; /* c == 0xff000fffffffffff */
2896 c &= -lsb; /* c == 0xff00000000000000 */
2897 shift = 0;
2898 while ((lsb >>= 1) != 0)
2899 shift++; /* shift == 44 on exit from loop */
2900 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2901 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2902 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2903 }
2904 else
0ba1b2ff
AM
2905 {
2906 /* Assume c initially something like 0xff000f0000000000. The idea
2907 is to rotate the word so that the ^^^ middle group of zeros
2908 is at the LS end and can be cleared with an rldicr mask. We then
2909 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2910 a second rldicr. */
2911 lsb = c & -c; /* lsb == 0x0000010000000000 */
2912 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2913 c = ~c; /* c == 0x00fff0ffffffffff */
2914 c &= -lsb; /* c == 0x00fff00000000000 */
2915 lsb = c & -c; /* lsb == 0x0000100000000000 */
2916 c = ~c; /* c == 0xff000fffffffffff */
2917 c &= -lsb; /* c == 0xff00000000000000 */
2918 shift = 0;
2919 while ((lsb >>= 1) != 0)
2920 shift++; /* shift == 44 on exit from loop */
2921 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2922 m1 >>= shift; /* m1 == 0x0000000000000fff */
2923 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2924 }
2925
2926 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2927 masks will be all 1's. We are guaranteed more than one transition. */
2928 out[0] = GEN_INT (64 - shift);
2929 out[1] = GEN_INT (m1);
2930 out[2] = GEN_INT (shift);
2931 out[3] = GEN_INT (m2);
2932#else
045572c7
GK
2933 (void)in;
2934 (void)out;
37409796 2935 gcc_unreachable ();
0ba1b2ff 2936#endif
a260abc9
DE
2937}
2938
54b695e7 2939/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
2940
2941bool
54b695e7
AH
2942invalid_e500_subreg (rtx op, enum machine_mode mode)
2943{
61c76239
JM
2944 if (TARGET_E500_DOUBLE)
2945 {
17caeff2
JM
2946 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
2947 subreg:TI and reg:TF. */
61c76239 2948 if (GET_CODE (op) == SUBREG
17caeff2 2949 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 2950 && REG_P (SUBREG_REG (op))
17caeff2
JM
2951 && (GET_MODE (SUBREG_REG (op)) == DFmode
2952 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
2953 return true;
2954
17caeff2
JM
2955 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
2956 reg:TI. */
61c76239 2957 if (GET_CODE (op) == SUBREG
17caeff2 2958 && (mode == DFmode || mode == TFmode)
61c76239 2959 && REG_P (SUBREG_REG (op))
17caeff2
JM
2960 && (GET_MODE (SUBREG_REG (op)) == DImode
2961 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
2962 return true;
2963 }
54b695e7 2964
61c76239
JM
2965 if (TARGET_SPE
2966 && GET_CODE (op) == SUBREG
2967 && mode == SImode
54b695e7 2968 && REG_P (SUBREG_REG (op))
14502dad 2969 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
2970 return true;
2971
2972 return false;
2973}
2974
58182de3 2975/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
2976 field is an FP double while the FP fields remain word aligned. */
2977
19d66194 2978unsigned int
fa5b0972
AM
2979rs6000_special_round_type_align (tree type, unsigned int computed,
2980 unsigned int specified)
95727fb8 2981{
fa5b0972 2982 unsigned int align = MAX (computed, specified);
95727fb8 2983 tree field = TYPE_FIELDS (type);
95727fb8 2984
bb8df8a6 2985 /* Skip all non field decls */
85962ac8 2986 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
2987 field = TREE_CHAIN (field);
2988
fa5b0972
AM
2989 if (field != NULL && field != type)
2990 {
2991 type = TREE_TYPE (field);
2992 while (TREE_CODE (type) == ARRAY_TYPE)
2993 type = TREE_TYPE (type);
2994
2995 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2996 align = MAX (align, 64);
2997 }
95727fb8 2998
fa5b0972 2999 return align;
95727fb8
AP
3000}
3001
58182de3
GK
3002/* Darwin increases record alignment to the natural alignment of
3003 the first field. */
3004
3005unsigned int
3006darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3007 unsigned int specified)
3008{
3009 unsigned int align = MAX (computed, specified);
3010
3011 if (TYPE_PACKED (type))
3012 return align;
3013
3014 /* Find the first field, looking down into aggregates. */
3015 do {
3016 tree field = TYPE_FIELDS (type);
3017 /* Skip all non field decls */
3018 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3019 field = TREE_CHAIN (field);
3020 if (! field)
3021 break;
3022 type = TREE_TYPE (field);
3023 while (TREE_CODE (type) == ARRAY_TYPE)
3024 type = TREE_TYPE (type);
3025 } while (AGGREGATE_TYPE_P (type));
3026
3027 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3028 align = MAX (align, TYPE_ALIGN (type));
3029
3030 return align;
3031}
3032
a4f6c312 3033/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3034
3035int
f676971a 3036small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3037 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3038{
38c1f2d7 3039#if TARGET_ELF
5f59ecb7 3040 rtx sym_ref;
7509c759 3041
d9407988 3042 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3043 return 0;
a54d04b7 3044
f607bc57 3045 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3046 return 0;
3047
88228c4b
MM
3048 if (GET_CODE (op) == SYMBOL_REF)
3049 sym_ref = op;
3050
3051 else if (GET_CODE (op) != CONST
3052 || GET_CODE (XEXP (op, 0)) != PLUS
3053 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3054 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3055 return 0;
3056
88228c4b 3057 else
dbf55e53
MM
3058 {
3059 rtx sum = XEXP (op, 0);
3060 HOST_WIDE_INT summand;
3061
3062 /* We have to be careful here, because it is the referenced address
c4ad648e 3063 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3064 summand = INTVAL (XEXP (sum, 1));
307b599c 3065 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3066 return 0;
dbf55e53
MM
3067
3068 sym_ref = XEXP (sum, 0);
3069 }
88228c4b 3070
20bfcd69 3071 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3072#else
3073 return 0;
3074#endif
7509c759 3075}
46c07df8 3076
3a1f863f 3077/* Return true if either operand is a general purpose register. */
46c07df8 3078
3a1f863f
DE
3079bool
3080gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3081{
3a1f863f
DE
3082 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3083 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3084}
3085
9ebbca7d 3086\f
4d588c14
RH
3087/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3088
f676971a
EC
3089static int
3090constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3091{
9390387d 3092 switch (GET_CODE (op))
9ebbca7d
GK
3093 {
3094 case SYMBOL_REF:
c4501e62
JJ
3095 if (RS6000_SYMBOL_REF_TLS_P (op))
3096 return 0;
3097 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3098 {
3099 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3100 {
3101 *have_sym = 1;
3102 return 1;
3103 }
3104 else
3105 return 0;
3106 }
3107 else if (! strcmp (XSTR (op, 0), toc_label_name))
3108 {
3109 *have_toc = 1;
3110 return 1;
3111 }
3112 else
3113 return 0;
9ebbca7d
GK
3114 case PLUS:
3115 case MINUS:
c1f11548
DE
3116 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3117 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3118 case CONST:
a4f6c312 3119 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3120 case CONST_INT:
a4f6c312 3121 return 1;
9ebbca7d 3122 default:
a4f6c312 3123 return 0;
9ebbca7d
GK
3124 }
3125}
3126
4d588c14 3127static bool
a2369ed3 3128constant_pool_expr_p (rtx op)
9ebbca7d
GK
3129{
3130 int have_sym = 0;
3131 int have_toc = 0;
3132 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3133}
3134
48d72335 3135bool
a2369ed3 3136toc_relative_expr_p (rtx op)
9ebbca7d 3137{
4d588c14
RH
3138 int have_sym = 0;
3139 int have_toc = 0;
3140 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3141}
3142
4d588c14 3143bool
a2369ed3 3144legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3145{
3146 return (TARGET_TOC
3147 && GET_CODE (x) == PLUS
3148 && GET_CODE (XEXP (x, 0)) == REG
3149 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3150 && constant_pool_expr_p (XEXP (x, 1)));
3151}
3152
d04b6e6e
EB
3153static bool
3154legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3155{
3156 return (DEFAULT_ABI == ABI_V4
3157 && !flag_pic && !TARGET_TOC
3158 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3159 && small_data_operand (x, mode));
3160}
3161
60cdabab
DE
3162/* SPE offset addressing is limited to 5-bits worth of double words. */
3163#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3164
76d2b81d
DJ
3165bool
3166rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3167{
3168 unsigned HOST_WIDE_INT offset, extra;
3169
3170 if (GET_CODE (x) != PLUS)
3171 return false;
3172 if (GET_CODE (XEXP (x, 0)) != REG)
3173 return false;
3174 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3175 return false;
60cdabab
DE
3176 if (legitimate_constant_pool_address_p (x))
3177 return true;
4d588c14
RH
3178 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3179 return false;
3180
3181 offset = INTVAL (XEXP (x, 1));
3182 extra = 0;
3183 switch (mode)
3184 {
3185 case V16QImode:
3186 case V8HImode:
3187 case V4SFmode:
3188 case V4SImode:
7a4eca66
DE
3189 /* AltiVec vector modes. Only reg+reg addressing is valid and
3190 constant offset zero should not occur due to canonicalization.
3191 Allow any offset when not strict before reload. */
3192 return !strict;
4d588c14
RH
3193
3194 case V4HImode:
3195 case V2SImode:
3196 case V1DImode:
3197 case V2SFmode:
3198 /* SPE vector modes. */
3199 return SPE_CONST_OFFSET_OK (offset);
3200
3201 case DFmode:
7393f7f8 3202 case DDmode:
4d4cbc0e
AH
3203 if (TARGET_E500_DOUBLE)
3204 return SPE_CONST_OFFSET_OK (offset);
3205
4d588c14 3206 case DImode:
54b695e7
AH
3207 /* On e500v2, we may have:
3208
3209 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3210
3211 Which gets addressed with evldd instructions. */
3212 if (TARGET_E500_DOUBLE)
3213 return SPE_CONST_OFFSET_OK (offset);
3214
7393f7f8 3215 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3216 extra = 4;
3217 else if (offset & 3)
3218 return false;
3219 break;
3220
3221 case TFmode:
17caeff2
JM
3222 if (TARGET_E500_DOUBLE)
3223 return (SPE_CONST_OFFSET_OK (offset)
3224 && SPE_CONST_OFFSET_OK (offset + 8));
3225
4d588c14 3226 case TImode:
7393f7f8
BE
3227 case TDmode:
3228 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3229 extra = 12;
3230 else if (offset & 3)
3231 return false;
3232 else
3233 extra = 8;
3234 break;
3235
3236 default:
3237 break;
3238 }
3239
b1917422
AM
3240 offset += 0x8000;
3241 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3242}
3243
6fb5fa3c 3244bool
a2369ed3 3245legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3246{
3247 rtx op0, op1;
3248
3249 if (GET_CODE (x) != PLUS)
3250 return false;
850e8d3d 3251
4d588c14
RH
3252 op0 = XEXP (x, 0);
3253 op1 = XEXP (x, 1);
3254
bf00cc0f 3255 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3256 replaced with proper base and index regs. */
3257 if (!strict
3258 && reload_in_progress
3259 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3260 && REG_P (op1))
3261 return true;
3262
3263 return (REG_P (op0) && REG_P (op1)
3264 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3265 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3266 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3267 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3268}
3269
48d72335 3270inline bool
a2369ed3 3271legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3272{
3273 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3274}
3275
48d72335 3276bool
4c81e946
FJ
3277macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3278{
c4ad648e 3279 if (!TARGET_MACHO || !flag_pic
9390387d 3280 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3281 return false;
3282 x = XEXP (x, 0);
4c81e946
FJ
3283
3284 if (GET_CODE (x) != LO_SUM)
3285 return false;
3286 if (GET_CODE (XEXP (x, 0)) != REG)
3287 return false;
3288 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3289 return false;
3290 x = XEXP (x, 1);
3291
3292 return CONSTANT_P (x);
3293}
3294
4d588c14 3295static bool
a2369ed3 3296legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3297{
3298 if (GET_CODE (x) != LO_SUM)
3299 return false;
3300 if (GET_CODE (XEXP (x, 0)) != REG)
3301 return false;
3302 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3303 return false;
54b695e7 3304 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3305 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3306 || mode == DImode))
f82f556d 3307 return false;
4d588c14
RH
3308 x = XEXP (x, 1);
3309
8622e235 3310 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3311 {
a29077da 3312 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3313 return false;
3314 if (TARGET_TOC)
3315 return false;
3316 if (GET_MODE_NUNITS (mode) != 1)
3317 return false;
5e5f01b9 3318 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
3319 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
3320 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
3321 return false;
3322
3323 return CONSTANT_P (x);
3324 }
3325
3326 return false;
3327}
3328
3329
9ebbca7d
GK
3330/* Try machine-dependent ways of modifying an illegitimate address
3331 to be legitimate. If we find one, return the new, valid address.
3332 This is used from only one place: `memory_address' in explow.c.
3333
a4f6c312
SS
3334 OLDX is the address as it was before break_out_memory_refs was
3335 called. In some cases it is useful to look at this to decide what
3336 needs to be done.
9ebbca7d 3337
a4f6c312 3338 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3339
a4f6c312
SS
3340 It is always safe for this function to do nothing. It exists to
3341 recognize opportunities to optimize the output.
9ebbca7d
GK
3342
3343 On RS/6000, first check for the sum of a register with a constant
3344 integer that is out of range. If so, generate code to add the
3345 constant with the low-order 16 bits masked to the register and force
3346 this result into another register (this can be done with `cau').
3347 Then generate an address of REG+(CONST&0xffff), allowing for the
3348 possibility of bit 16 being a one.
3349
3350 Then check for the sum of a register and something not constant, try to
3351 load the other things into a register and return the sum. */
4d588c14 3352
9ebbca7d 3353rtx
a2369ed3
DJ
3354rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3355 enum machine_mode mode)
0ac081f6 3356{
c4501e62
JJ
3357 if (GET_CODE (x) == SYMBOL_REF)
3358 {
3359 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3360 if (model != 0)
3361 return rs6000_legitimize_tls_address (x, model);
3362 }
3363
f676971a 3364 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3365 && GET_CODE (XEXP (x, 0)) == REG
3366 && GET_CODE (XEXP (x, 1)) == CONST_INT
3367 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
f676971a 3368 {
9ebbca7d
GK
3369 HOST_WIDE_INT high_int, low_int;
3370 rtx sum;
a65c591c
DE
3371 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3372 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3373 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3374 GEN_INT (high_int)), 0);
3375 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3376 }
f676971a 3377 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3378 && GET_CODE (XEXP (x, 0)) == REG
3379 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3380 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3381 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3382 || TARGET_POWERPC64
7393f7f8
BE
3383 || (((mode != DImode && mode != DFmode && mode != DDmode)
3384 || TARGET_E500_DOUBLE)
3385 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3386 && (TARGET_POWERPC64 || mode != DImode)
3387 && mode != TImode)
3388 {
3389 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3390 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3391 }
0ac081f6
AH
3392 else if (ALTIVEC_VECTOR_MODE (mode))
3393 {
3394 rtx reg;
3395
3396 /* Make sure both operands are registers. */
3397 if (GET_CODE (x) == PLUS)
9f85ed45 3398 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3399 force_reg (Pmode, XEXP (x, 1)));
3400
3401 reg = force_reg (Pmode, x);
3402 return reg;
3403 }
4d4cbc0e 3404 else if (SPE_VECTOR_MODE (mode)
17caeff2 3405 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3406 || mode == DDmode || mode == TDmode
54b695e7 3407 || mode == DImode)))
a3170dc6 3408 {
54b695e7
AH
3409 if (mode == DImode)
3410 return NULL_RTX;
a3170dc6
AH
3411 /* We accept [reg + reg] and [reg + OFFSET]. */
3412
3413 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3414 {
3415 rtx op1 = XEXP (x, 0);
3416 rtx op2 = XEXP (x, 1);
a3170dc6 3417
c4ad648e 3418 op1 = force_reg (Pmode, op1);
a3170dc6 3419
c4ad648e
AM
3420 if (GET_CODE (op2) != REG
3421 && (GET_CODE (op2) != CONST_INT
3422 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3423 op2 = force_reg (Pmode, op2);
a3170dc6 3424
c4ad648e
AM
3425 return gen_rtx_PLUS (Pmode, op1, op2);
3426 }
a3170dc6
AH
3427
3428 return force_reg (Pmode, x);
3429 }
f1384257
AM
3430 else if (TARGET_ELF
3431 && TARGET_32BIT
3432 && TARGET_NO_TOC
3433 && ! flag_pic
9ebbca7d 3434 && GET_CODE (x) != CONST_INT
f676971a 3435 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3436 && CONSTANT_P (x)
6ac7bf2c
GK
3437 && GET_MODE_NUNITS (mode) == 1
3438 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 3439 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
3440 {
3441 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3442 emit_insn (gen_elf_high (reg, x));
3443 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3444 }
ee890fe2
SS
3445 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3446 && ! flag_pic
ab82a49f
AP
3447#if TARGET_MACHO
3448 && ! MACHO_DYNAMIC_NO_PIC_P
3449#endif
ee890fe2 3450 && GET_CODE (x) != CONST_INT
f676971a 3451 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3452 && CONSTANT_P (x)
f82f556d 3453 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 3454 && mode != DImode
ee890fe2
SS
3455 && mode != TImode)
3456 {
3457 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3458 emit_insn (gen_macho_high (reg, x));
3459 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3460 }
f676971a 3461 else if (TARGET_TOC
4d588c14 3462 && constant_pool_expr_p (x)
a9098fd0 3463 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3464 {
3465 return create_TOC_reference (x);
3466 }
3467 else
3468 return NULL_RTX;
3469}
258bfae2 3470
fdbe66f2 3471/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3472 We need to emit DTP-relative relocations. */
3473
fdbe66f2 3474static void
c973d557
JJ
3475rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3476{
3477 switch (size)
3478 {
3479 case 4:
3480 fputs ("\t.long\t", file);
3481 break;
3482 case 8:
3483 fputs (DOUBLE_INT_ASM_OP, file);
3484 break;
3485 default:
37409796 3486 gcc_unreachable ();
c973d557
JJ
3487 }
3488 output_addr_const (file, x);
3489 fputs ("@dtprel+0x8000", file);
3490}
3491
c4501e62
JJ
3492/* Construct the SYMBOL_REF for the tls_get_addr function. */
3493
3494static GTY(()) rtx rs6000_tls_symbol;
3495static rtx
863d938c 3496rs6000_tls_get_addr (void)
c4501e62
JJ
3497{
3498 if (!rs6000_tls_symbol)
3499 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3500
3501 return rs6000_tls_symbol;
3502}
3503
3504/* Construct the SYMBOL_REF for TLS GOT references. */
3505
3506static GTY(()) rtx rs6000_got_symbol;
3507static rtx
863d938c 3508rs6000_got_sym (void)
c4501e62
JJ
3509{
3510 if (!rs6000_got_symbol)
3511 {
3512 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3513 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3514 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3515 }
c4501e62
JJ
3516
3517 return rs6000_got_symbol;
3518}
3519
3520/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3521 this (thread-local) address. */
3522
3523static rtx
a2369ed3 3524rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3525{
3526 rtx dest, insn;
3527
3528 dest = gen_reg_rtx (Pmode);
3529 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3530 {
3531 rtx tlsreg;
3532
3533 if (TARGET_64BIT)
3534 {
3535 tlsreg = gen_rtx_REG (Pmode, 13);
3536 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3537 }
3538 else
3539 {
3540 tlsreg = gen_rtx_REG (Pmode, 2);
3541 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3542 }
3543 emit_insn (insn);
3544 }
3545 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3546 {
3547 rtx tlsreg, tmp;
3548
3549 tmp = gen_reg_rtx (Pmode);
3550 if (TARGET_64BIT)
3551 {
3552 tlsreg = gen_rtx_REG (Pmode, 13);
3553 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3554 }
3555 else
3556 {
3557 tlsreg = gen_rtx_REG (Pmode, 2);
3558 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3559 }
3560 emit_insn (insn);
3561 if (TARGET_64BIT)
3562 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3563 else
3564 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3565 emit_insn (insn);
3566 }
3567 else
3568 {
3569 rtx r3, got, tga, tmp1, tmp2, eqv;
3570
4fed8f8f
AM
3571 /* We currently use relocations like @got@tlsgd for tls, which
3572 means the linker will handle allocation of tls entries, placing
3573 them in the .got section. So use a pointer to the .got section,
3574 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3575 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3576 if (TARGET_64BIT)
972f427b 3577 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3578 else
3579 {
3580 if (flag_pic == 1)
3581 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3582 else
3583 {
3584 rtx gsym = rs6000_got_sym ();
3585 got = gen_reg_rtx (Pmode);
3586 if (flag_pic == 0)
3587 rs6000_emit_move (got, gsym, Pmode);
3588 else
3589 {
e65a3857 3590 rtx tmp3, mem;
c4501e62
JJ
3591 rtx first, last;
3592
c4501e62
JJ
3593 tmp1 = gen_reg_rtx (Pmode);
3594 tmp2 = gen_reg_rtx (Pmode);
3595 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3596 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3597
e65a3857
DE
3598 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3599 emit_move_insn (tmp1,
1de43f85 3600 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3601 emit_move_insn (tmp2, mem);
3602 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3603 last = emit_move_insn (got, tmp3);
bd94cb6e 3604 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3605 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3606 }
3607 }
3608 }
3609
3610 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3611 {
3612 r3 = gen_rtx_REG (Pmode, 3);
3613 if (TARGET_64BIT)
3614 insn = gen_tls_gd_64 (r3, got, addr);
3615 else
3616 insn = gen_tls_gd_32 (r3, got, addr);
3617 start_sequence ();
3618 emit_insn (insn);
3619 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3620 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3621 insn = emit_call_insn (insn);
3622 CONST_OR_PURE_CALL_P (insn) = 1;
3623 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3624 insn = get_insns ();
3625 end_sequence ();
3626 emit_libcall_block (insn, dest, r3, addr);
3627 }
3628 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3629 {
3630 r3 = gen_rtx_REG (Pmode, 3);
3631 if (TARGET_64BIT)
3632 insn = gen_tls_ld_64 (r3, got);
3633 else
3634 insn = gen_tls_ld_32 (r3, got);
3635 start_sequence ();
3636 emit_insn (insn);
3637 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3638 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3639 insn = emit_call_insn (insn);
3640 CONST_OR_PURE_CALL_P (insn) = 1;
3641 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3642 insn = get_insns ();
3643 end_sequence ();
3644 tmp1 = gen_reg_rtx (Pmode);
3645 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3646 UNSPEC_TLSLD);
3647 emit_libcall_block (insn, tmp1, r3, eqv);
3648 if (rs6000_tls_size == 16)
3649 {
3650 if (TARGET_64BIT)
3651 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3652 else
3653 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3654 }
3655 else if (rs6000_tls_size == 32)
3656 {
3657 tmp2 = gen_reg_rtx (Pmode);
3658 if (TARGET_64BIT)
3659 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3660 else
3661 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3662 emit_insn (insn);
3663 if (TARGET_64BIT)
3664 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3665 else
3666 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3667 }
3668 else
3669 {
3670 tmp2 = gen_reg_rtx (Pmode);
3671 if (TARGET_64BIT)
3672 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3673 else
3674 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3675 emit_insn (insn);
3676 insn = gen_rtx_SET (Pmode, dest,
3677 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3678 }
3679 emit_insn (insn);
3680 }
3681 else
3682 {
a7b376ee 3683 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3684 tmp2 = gen_reg_rtx (Pmode);
3685 if (TARGET_64BIT)
3686 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3687 else
3688 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3689 emit_insn (insn);
3690 if (TARGET_64BIT)
3691 insn = gen_tls_tls_64 (dest, tmp2, addr);
3692 else
3693 insn = gen_tls_tls_32 (dest, tmp2, addr);
3694 emit_insn (insn);
3695 }
3696 }
3697
3698 return dest;
3699}
3700
c4501e62
JJ
3701/* Return 1 if X contains a thread-local symbol. */
3702
3703bool
a2369ed3 3704rs6000_tls_referenced_p (rtx x)
c4501e62 3705{
cd413cab
AP
3706 if (! TARGET_HAVE_TLS)
3707 return false;
3708
c4501e62
JJ
3709 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3710}
3711
3712/* Return 1 if *X is a thread-local symbol. This is the same as
3713 rs6000_tls_symbol_ref except for the type of the unused argument. */
3714
9390387d 3715static int
a2369ed3 3716rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3717{
3718 return RS6000_SYMBOL_REF_TLS_P (*x);
3719}
3720
24ea750e
DJ
3721/* The convention appears to be to define this wherever it is used.
3722 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3723 is now used here. */
3724#ifndef REG_MODE_OK_FOR_BASE_P
3725#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3726#endif
3727
3728/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3729 replace the input X, or the original X if no replacement is called for.
3730 The output parameter *WIN is 1 if the calling macro should goto WIN,
3731 0 if it should not.
3732
3733 For RS/6000, we wish to handle large displacements off a base
3734 register by splitting the addend across an addiu/addis and the mem insn.
3735 This cuts number of extra insns needed from 3 to 1.
3736
3737 On Darwin, we use this to generate code for floating point constants.
3738 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3739 The Darwin code is inside #if TARGET_MACHO because only then is
3740 machopic_function_base_name() defined. */
3741rtx
f676971a 3742rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3743 int opnum, int type,
3744 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3745{
f676971a 3746 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3747 if (GET_CODE (x) == PLUS
3748 && GET_CODE (XEXP (x, 0)) == PLUS
3749 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3750 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3751 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3752 {
3753 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3754 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3755 opnum, (enum reload_type)type);
24ea750e
DJ
3756 *win = 1;
3757 return x;
3758 }
3deb2758 3759
24ea750e
DJ
3760#if TARGET_MACHO
3761 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3762 && GET_CODE (x) == LO_SUM
3763 && GET_CODE (XEXP (x, 0)) == PLUS
3764 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3765 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3766 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3767 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3768 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3769 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3770 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3771 {
3772 /* Result of previous invocation of this function on Darwin
6f317ef3 3773 floating point constant. */
24ea750e 3774 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3775 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3776 opnum, (enum reload_type)type);
24ea750e
DJ
3777 *win = 1;
3778 return x;
3779 }
3780#endif
4937d02d
DE
3781
3782 /* Force ld/std non-word aligned offset into base register by wrapping
3783 in offset 0. */
3784 if (GET_CODE (x) == PLUS
3785 && GET_CODE (XEXP (x, 0)) == REG
3786 && REGNO (XEXP (x, 0)) < 32
3787 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3788 && GET_CODE (XEXP (x, 1)) == CONST_INT
3789 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3790 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3791 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3792 && TARGET_POWERPC64)
3793 {
3794 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3795 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3796 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3797 opnum, (enum reload_type) type);
3798 *win = 1;
3799 return x;
3800 }
3801
24ea750e
DJ
3802 if (GET_CODE (x) == PLUS
3803 && GET_CODE (XEXP (x, 0)) == REG
3804 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3805 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3806 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3807 && !SPE_VECTOR_MODE (mode)
17caeff2 3808 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
54b695e7 3809 || mode == DImode))
78c875e8 3810 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3811 {
3812 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3813 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3814 HOST_WIDE_INT high
c4ad648e 3815 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3816
3817 /* Check for 32-bit overflow. */
3818 if (high + low != val)
c4ad648e 3819 {
24ea750e
DJ
3820 *win = 0;
3821 return x;
3822 }
3823
3824 /* Reload the high part into a base reg; leave the low part
c4ad648e 3825 in the mem directly. */
24ea750e
DJ
3826
3827 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3828 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3829 GEN_INT (high)),
3830 GEN_INT (low));
24ea750e
DJ
3831
3832 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3833 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3834 opnum, (enum reload_type)type);
24ea750e
DJ
3835 *win = 1;
3836 return x;
3837 }
4937d02d 3838
24ea750e 3839 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3840 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 3841 && !SPE_VECTOR_MODE (mode)
8308679f
DE
3842#if TARGET_MACHO
3843 && DEFAULT_ABI == ABI_DARWIN
a29077da 3844 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3845#else
3846 && DEFAULT_ABI == ABI_V4
3847 && !flag_pic
3848#endif
7393f7f8 3849 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
7b5d92b2
AM
3850 The same goes for DImode without 64-bit gprs and DFmode
3851 without fprs. */
0d8c1c97 3852 && mode != TFmode
7393f7f8 3853 && mode != TDmode
7b5d92b2
AM
3854 && (mode != DImode || TARGET_POWERPC64)
3855 && (mode != DFmode || TARGET_POWERPC64
3856 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 3857 {
8308679f 3858#if TARGET_MACHO
a29077da
GK
3859 if (flag_pic)
3860 {
3861 rtx offset = gen_rtx_CONST (Pmode,
3862 gen_rtx_MINUS (Pmode, x,
11abc112 3863 machopic_function_base_sym ()));
a29077da
GK
3864 x = gen_rtx_LO_SUM (GET_MODE (x),
3865 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3866 gen_rtx_HIGH (Pmode, offset)), offset);
3867 }
3868 else
8308679f 3869#endif
a29077da 3870 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3871 gen_rtx_HIGH (Pmode, x), x);
a29077da 3872
24ea750e 3873 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3874 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3875 opnum, (enum reload_type)type);
24ea750e
DJ
3876 *win = 1;
3877 return x;
3878 }
4937d02d 3879
dec1f3aa
DE
3880 /* Reload an offset address wrapped by an AND that represents the
3881 masking of the lower bits. Strip the outer AND and let reload
3882 convert the offset address into an indirect address. */
3883 if (TARGET_ALTIVEC
3884 && ALTIVEC_VECTOR_MODE (mode)
3885 && GET_CODE (x) == AND
3886 && GET_CODE (XEXP (x, 0)) == PLUS
3887 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3888 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3889 && GET_CODE (XEXP (x, 1)) == CONST_INT
3890 && INTVAL (XEXP (x, 1)) == -16)
3891 {
3892 x = XEXP (x, 0);
3893 *win = 1;
3894 return x;
3895 }
3896
24ea750e 3897 if (TARGET_TOC
4d588c14 3898 && constant_pool_expr_p (x)
c1f11548 3899 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 3900 {
194c524a 3901 x = create_TOC_reference (x);
24ea750e
DJ
3902 *win = 1;
3903 return x;
3904 }
3905 *win = 0;
3906 return x;
f676971a 3907}
24ea750e 3908
258bfae2
FS
3909/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3910 that is a valid memory address for an instruction.
3911 The MODE argument is the machine mode for the MEM expression
3912 that wants to use this address.
3913
3914 On the RS/6000, there are four valid address: a SYMBOL_REF that
3915 refers to a constant pool entry of an address (or the sum of it
3916 plus a constant), a short (16-bit signed) constant plus a register,
3917 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3918 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3919 we must ensure that both words are addressable or PowerPC64 with offset
3920 word aligned.
3921
3922 For modes spanning multiple registers (DFmode in 32-bit GPRs,
7393f7f8
BE
3923 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
3924 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
3925 during assembly output. */
3926int
a2369ed3 3927rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 3928{
850e8d3d
DN
3929 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3930 if (TARGET_ALTIVEC
3931 && ALTIVEC_VECTOR_MODE (mode)
3932 && GET_CODE (x) == AND
3933 && GET_CODE (XEXP (x, 1)) == CONST_INT
3934 && INTVAL (XEXP (x, 1)) == -16)
3935 x = XEXP (x, 0);
3936
c4501e62
JJ
3937 if (RS6000_SYMBOL_REF_TLS_P (x))
3938 return 0;
4d588c14 3939 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
3940 return 1;
3941 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 3942 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 3943 && !SPE_VECTOR_MODE (mode)
429ec7dc 3944 && mode != TFmode
7393f7f8 3945 && mode != TDmode
54b695e7 3946 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3947 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3948 || mode == DImode))
258bfae2 3949 && TARGET_UPDATE
4d588c14 3950 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 3951 return 1;
d04b6e6e 3952 if (legitimate_small_data_p (mode, x))
258bfae2 3953 return 1;
4d588c14 3954 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
3955 return 1;
3956 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3957 if (! reg_ok_strict
3958 && GET_CODE (x) == PLUS
3959 && GET_CODE (XEXP (x, 0)) == REG
708d2456 3960 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 3961 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
3962 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3963 return 1;
76d2b81d 3964 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3965 return 1;
3966 if (mode != TImode
76d2b81d 3967 && mode != TFmode
7393f7f8 3968 && mode != TDmode
a3170dc6
AH
3969 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3970 || TARGET_POWERPC64
4d4cbc0e 3971 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 3972 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 3973 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 3974 return 1;
6fb5fa3c
DB
3975 if (GET_CODE (x) == PRE_MODIFY
3976 && mode != TImode
3977 && mode != TFmode
3978 && mode != TDmode
3979 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3980 || TARGET_POWERPC64
3981 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3982 && (TARGET_POWERPC64 || mode != DImode)
3983 && !ALTIVEC_VECTOR_MODE (mode)
3984 && !SPE_VECTOR_MODE (mode)
3985 /* Restrict addressing for DI because of our SUBREG hackery. */
3986 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3987 && TARGET_UPDATE
3988 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
3989 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
3990 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
3991 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3992 return 1;
4d588c14 3993 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3994 return 1;
3995 return 0;
3996}
4d588c14
RH
3997
3998/* Go to LABEL if ADDR (a legitimate address expression)
3999 has an effect that depends on the machine mode it is used for.
4000
4001 On the RS/6000 this is true of all integral offsets (since AltiVec
4002 modes don't allow them) or is a pre-increment or decrement.
4003
4004 ??? Except that due to conceptual problems in offsettable_address_p
4005 we can't really report the problems of integral offsets. So leave
f676971a 4006 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4007 sub-words of a TFmode operand, which is what we had before. */
4008
4009bool
a2369ed3 4010rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4011{
4012 switch (GET_CODE (addr))
4013 {
4014 case PLUS:
4015 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4016 {
4017 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4018 return val + 12 + 0x8000 >= 0x10000;
4019 }
4020 break;
4021
4022 case LO_SUM:
4023 return true;
4024
6fb5fa3c
DB
4025 case PRE_INC:
4026 case PRE_DEC:
4027 case PRE_MODIFY:
4028 return TARGET_UPDATE;
4d588c14
RH
4029
4030 default:
4031 break;
4032 }
4033
4034 return false;
4035}
d8ecbcdb 4036
d04b6e6e
EB
4037/* More elaborate version of recog's offsettable_memref_p predicate
4038 that works around the ??? note of rs6000_mode_dependent_address.
4039 In particular it accepts
4040
4041 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4042
4043 in 32-bit mode, that the recog predicate rejects. */
4044
4045bool
4046rs6000_offsettable_memref_p (rtx op)
4047{
4048 if (!MEM_P (op))
4049 return false;
4050
4051 /* First mimic offsettable_memref_p. */
4052 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4053 return true;
4054
4055 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4056 the latter predicate knows nothing about the mode of the memory
4057 reference and, therefore, assumes that it is the largest supported
4058 mode (TFmode). As a consequence, legitimate offsettable memory
4059 references are rejected. rs6000_legitimate_offset_address_p contains
4060 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4061 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4062}
4063
d8ecbcdb
AH
4064/* Return number of consecutive hard regs needed starting at reg REGNO
4065 to hold something of mode MODE.
4066 This is ordinarily the length in words of a value of mode MODE
4067 but can be less for certain modes in special long registers.
4068
4069 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4070 scalar instructions. The upper 32 bits are only available to the
4071 SIMD instructions.
4072
4073 POWER and PowerPC GPRs hold 32 bits worth;
4074 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4075
4076int
4077rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4078{
4079 if (FP_REGNO_P (regno))
4080 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4081
4082 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4083 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4084
4085 if (ALTIVEC_REGNO_P (regno))
4086 return
4087 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4088
8521c414
JM
4089 /* The value returned for SCmode in the E500 double case is 2 for
4090 ABI compatibility; storing an SCmode value in a single register
4091 would require function_arg and rs6000_spe_function_arg to handle
4092 SCmode so as to pass the value correctly in a pair of
4093 registers. */
4094 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4095 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4096
d8ecbcdb
AH
4097 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4098}
2aa4498c
AH
4099
4100/* Change register usage conditional on target flags. */
4101void
4102rs6000_conditional_register_usage (void)
4103{
4104 int i;
4105
4106 /* Set MQ register fixed (already call_used) if not POWER
4107 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4108 be allocated. */
4109 if (! TARGET_POWER)
4110 fixed_regs[64] = 1;
4111
7c9ac5c0 4112 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4113 if (TARGET_64BIT)
4114 fixed_regs[13] = call_used_regs[13]
4115 = call_really_used_regs[13] = 1;
4116
4117 /* Conditionally disable FPRs. */
4118 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4119 for (i = 32; i < 64; i++)
4120 fixed_regs[i] = call_used_regs[i]
c4ad648e 4121 = call_really_used_regs[i] = 1;
2aa4498c 4122
7c9ac5c0
PH
4123 /* The TOC register is not killed across calls in a way that is
4124 visible to the compiler. */
4125 if (DEFAULT_ABI == ABI_AIX)
4126 call_really_used_regs[2] = 0;
4127
2aa4498c
AH
4128 if (DEFAULT_ABI == ABI_V4
4129 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4130 && flag_pic == 2)
4131 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4132
4133 if (DEFAULT_ABI == ABI_V4
4134 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4135 && flag_pic == 1)
4136 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4137 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4138 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4139
4140 if (DEFAULT_ABI == ABI_DARWIN
4141 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4142 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4143 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4144 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4145
b4db40bf
JJ
4146 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4147 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4148 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4149
2aa4498c
AH
4150 if (TARGET_ALTIVEC)
4151 global_regs[VSCR_REGNO] = 1;
4152
4153 if (TARGET_SPE)
4154 {
4155 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4156 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4157 registers in prologues and epilogues. We no longer use r14
4158 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4159 pool for link-compatibility with older versions of GCC. Once
4160 "old" code has died out, we can return r14 to the allocation
4161 pool. */
4162 fixed_regs[14]
4163 = call_used_regs[14]
4164 = call_really_used_regs[14] = 1;
2aa4498c
AH
4165 }
4166
4167 if (! TARGET_ALTIVEC)
4168 {
4169 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4170 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4171 call_really_used_regs[VRSAVE_REGNO] = 1;
4172 }
4173
4174 if (TARGET_ALTIVEC_ABI)
4175 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4176 call_used_regs[i] = call_really_used_regs[i] = 1;
4177}
fb4d4348 4178\f
a4f6c312
SS
4179/* Try to output insns to set TARGET equal to the constant C if it can
4180 be done in less than N insns. Do all computations in MODE.
4181 Returns the place where the output has been placed if it can be
4182 done and the insns have been emitted. If it would take more than N
4183 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4184
4185rtx
f676971a 4186rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4187 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4188{
af8cb5c5 4189 rtx result, insn, set;
2bfcf297
DB
4190 HOST_WIDE_INT c0, c1;
4191
37409796 4192 switch (mode)
2bfcf297 4193 {
37409796
NS
4194 case QImode:
4195 case HImode:
2bfcf297 4196 if (dest == NULL)
c4ad648e 4197 dest = gen_reg_rtx (mode);
2bfcf297
DB
4198 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4199 return dest;
bb8df8a6 4200
37409796 4201 case SImode:
b3a13419 4202 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4203
d448860e 4204 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4205 GEN_INT (INTVAL (source)
4206 & (~ (HOST_WIDE_INT) 0xffff))));
4207 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4208 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4209 GEN_INT (INTVAL (source) & 0xffff))));
4210 result = dest;
37409796
NS
4211 break;
4212
4213 case DImode:
4214 switch (GET_CODE (source))
af8cb5c5 4215 {
37409796 4216 case CONST_INT:
af8cb5c5
DE
4217 c0 = INTVAL (source);
4218 c1 = -(c0 < 0);
37409796 4219 break;
bb8df8a6 4220
37409796 4221 case CONST_DOUBLE:
2bfcf297 4222#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4223 c0 = CONST_DOUBLE_LOW (source);
4224 c1 = -(c0 < 0);
2bfcf297 4225#else
af8cb5c5
DE
4226 c0 = CONST_DOUBLE_LOW (source);
4227 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4228#endif
37409796
NS
4229 break;
4230
4231 default:
4232 gcc_unreachable ();
af8cb5c5 4233 }
af8cb5c5
DE
4234
4235 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4236 break;
4237
4238 default:
4239 gcc_unreachable ();
2bfcf297 4240 }
2bfcf297 4241
af8cb5c5
DE
4242 insn = get_last_insn ();
4243 set = single_set (insn);
4244 if (! CONSTANT_P (SET_SRC (set)))
4245 set_unique_reg_note (insn, REG_EQUAL, source);
4246
4247 return result;
2bfcf297
DB
4248}
4249
4250/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4251 fall back to a straight forward decomposition. We do this to avoid
4252 exponential run times encountered when looking for longer sequences
4253 with rs6000_emit_set_const. */
4254static rtx
a2369ed3 4255rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4256{
4257 if (!TARGET_POWERPC64)
4258 {
4259 rtx operand1, operand2;
4260
4261 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4262 DImode);
d448860e 4263 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4264 DImode);
4265 emit_move_insn (operand1, GEN_INT (c1));
4266 emit_move_insn (operand2, GEN_INT (c2));
4267 }
4268 else
4269 {
bc06712d 4270 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4271
bc06712d 4272 ud1 = c1 & 0xffff;
f921c9c9 4273 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4274#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4275 c2 = c1 >> 32;
2bfcf297 4276#endif
bc06712d 4277 ud3 = c2 & 0xffff;
f921c9c9 4278 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4279
f676971a 4280 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4281 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4282 {
bc06712d 4283 if (ud1 & 0x8000)
b78d48dd 4284 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4285 else
4286 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4287 }
2bfcf297 4288
f676971a 4289 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4290 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4291 {
bc06712d 4292 if (ud2 & 0x8000)
f676971a 4293 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4294 - 0x80000000));
252b88f7 4295 else
bc06712d
TR
4296 emit_move_insn (dest, GEN_INT (ud2 << 16));
4297 if (ud1 != 0)
d448860e
JH
4298 emit_move_insn (copy_rtx (dest),
4299 gen_rtx_IOR (DImode, copy_rtx (dest),
4300 GEN_INT (ud1)));
252b88f7 4301 }
f676971a 4302 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4303 || (ud4 == 0 && ! (ud3 & 0x8000)))
4304 {
4305 if (ud3 & 0x8000)
f676971a 4306 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4307 - 0x80000000));
4308 else
4309 emit_move_insn (dest, GEN_INT (ud3 << 16));
4310
4311 if (ud2 != 0)
d448860e
JH
4312 emit_move_insn (copy_rtx (dest),
4313 gen_rtx_IOR (DImode, copy_rtx (dest),
4314 GEN_INT (ud2)));
4315 emit_move_insn (copy_rtx (dest),
4316 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4317 GEN_INT (16)));
bc06712d 4318 if (ud1 != 0)
d448860e
JH
4319 emit_move_insn (copy_rtx (dest),
4320 gen_rtx_IOR (DImode, copy_rtx (dest),
4321 GEN_INT (ud1)));
bc06712d 4322 }
f676971a 4323 else
bc06712d
TR
4324 {
4325 if (ud4 & 0x8000)
f676971a 4326 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4327 - 0x80000000));
4328 else
4329 emit_move_insn (dest, GEN_INT (ud4 << 16));
4330
4331 if (ud3 != 0)
d448860e
JH
4332 emit_move_insn (copy_rtx (dest),
4333 gen_rtx_IOR (DImode, copy_rtx (dest),
4334 GEN_INT (ud3)));
2bfcf297 4335
d448860e
JH
4336 emit_move_insn (copy_rtx (dest),
4337 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4338 GEN_INT (32)));
bc06712d 4339 if (ud2 != 0)
d448860e
JH
4340 emit_move_insn (copy_rtx (dest),
4341 gen_rtx_IOR (DImode, copy_rtx (dest),
4342 GEN_INT (ud2 << 16)));
bc06712d 4343 if (ud1 != 0)
d448860e
JH
4344 emit_move_insn (copy_rtx (dest),
4345 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4346 }
4347 }
2bfcf297
DB
4348 return dest;
4349}
4350
76d2b81d 4351/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4352 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4353
4354static void
4355rs6000_eliminate_indexed_memrefs (rtx operands[2])
4356{
4357 if (GET_CODE (operands[0]) == MEM
4358 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4359 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4360 && ! reload_in_progress)
4361 operands[0]
4362 = replace_equiv_address (operands[0],
4363 copy_addr_to_reg (XEXP (operands[0], 0)));
4364
4365 if (GET_CODE (operands[1]) == MEM
4366 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4367 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4368 && ! reload_in_progress)
4369 operands[1]
4370 = replace_equiv_address (operands[1],
4371 copy_addr_to_reg (XEXP (operands[1], 0)));
4372}
4373
fb4d4348
GK
4374/* Emit a move from SOURCE to DEST in mode MODE. */
4375void
a2369ed3 4376rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4377{
4378 rtx operands[2];
4379 operands[0] = dest;
4380 operands[1] = source;
f676971a 4381
fb4d4348
GK
4382 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4383 if (GET_CODE (operands[1]) == CONST_DOUBLE
4384 && ! FLOAT_MODE_P (mode)
4385 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4386 {
4387 /* FIXME. This should never happen. */
4388 /* Since it seems that it does, do the safe thing and convert
4389 to a CONST_INT. */
2496c7bd 4390 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4391 }
37409796
NS
4392 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4393 || FLOAT_MODE_P (mode)
4394 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4395 || CONST_DOUBLE_LOW (operands[1]) < 0)
4396 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4397 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4398
c9e8cb32
DD
4399 /* Check if GCC is setting up a block move that will end up using FP
4400 registers as temporaries. We must make sure this is acceptable. */
4401 if (GET_CODE (operands[0]) == MEM
4402 && GET_CODE (operands[1]) == MEM
4403 && mode == DImode
41543739
GK
4404 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4405 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4406 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4407 ? 32 : MEM_ALIGN (operands[0])))
4408 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4409 ? 32
41543739
GK
4410 : MEM_ALIGN (operands[1]))))
4411 && ! MEM_VOLATILE_P (operands [0])
4412 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4413 {
41543739
GK
4414 emit_move_insn (adjust_address (operands[0], SImode, 0),
4415 adjust_address (operands[1], SImode, 0));
d448860e
JH
4416 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4417 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4418 return;
4419 }
630d42a0 4420
b3a13419 4421 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4422 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4423 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4424
a3170dc6
AH
4425 if (mode == SFmode && ! TARGET_POWERPC
4426 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4427 && GET_CODE (operands[0]) == MEM)
fb4d4348 4428 {
ffc14f31
GK
4429 int regnum;
4430
4431 if (reload_in_progress || reload_completed)
4432 regnum = true_regnum (operands[1]);
4433 else if (GET_CODE (operands[1]) == REG)
4434 regnum = REGNO (operands[1]);
4435 else
4436 regnum = -1;
f676971a 4437
fb4d4348
GK
4438 /* If operands[1] is a register, on POWER it may have
4439 double-precision data in it, so truncate it to single
4440 precision. */
4441 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4442 {
4443 rtx newreg;
b3a13419 4444 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4445 : gen_reg_rtx (mode));
fb4d4348
GK
4446 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4447 operands[1] = newreg;
4448 }
4449 }
4450
c4501e62
JJ
4451 /* Recognize the case where operand[1] is a reference to thread-local
4452 data and load its address to a register. */
84f52ebd 4453 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4454 {
84f52ebd
RH
4455 enum tls_model model;
4456 rtx tmp = operands[1];
4457 rtx addend = NULL;
4458
4459 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4460 {
4461 addend = XEXP (XEXP (tmp, 0), 1);
4462 tmp = XEXP (XEXP (tmp, 0), 0);
4463 }
4464
4465 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4466 model = SYMBOL_REF_TLS_MODEL (tmp);
4467 gcc_assert (model != 0);
4468
4469 tmp = rs6000_legitimize_tls_address (tmp, model);
4470 if (addend)
4471 {
4472 tmp = gen_rtx_PLUS (mode, tmp, addend);
4473 tmp = force_operand (tmp, operands[0]);
4474 }
4475 operands[1] = tmp;
c4501e62
JJ
4476 }
4477
8f4e6caf
RH
4478 /* Handle the case where reload calls us with an invalid address. */
4479 if (reload_in_progress && mode == Pmode
69ef87e2 4480 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4481 || ! nonimmediate_operand (operands[0], mode)))
4482 goto emit_set;
4483
a9baceb1
GK
4484 /* 128-bit constant floating-point values on Darwin should really be
4485 loaded as two parts. */
8521c414 4486 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4487 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4488 {
4489 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4490 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4491 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4492 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4493 simplify_gen_subreg (imode, operands[1], mode, 0),
4494 imode);
4495 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4496 GET_MODE_SIZE (imode)),
4497 simplify_gen_subreg (imode, operands[1], mode,
4498 GET_MODE_SIZE (imode)),
4499 imode);
a9baceb1
GK
4500 return;
4501 }
4502
fb4d4348
GK
4503 /* FIXME: In the long term, this switch statement should go away
4504 and be replaced by a sequence of tests based on things like
4505 mode == Pmode. */
4506 switch (mode)
4507 {
4508 case HImode:
4509 case QImode:
4510 if (CONSTANT_P (operands[1])
4511 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4512 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4513 break;
4514
06f4e019 4515 case TFmode:
7393f7f8 4516 case TDmode:
76d2b81d
DJ
4517 rs6000_eliminate_indexed_memrefs (operands);
4518 /* fall through */
4519
fb4d4348 4520 case DFmode:
7393f7f8 4521 case DDmode:
fb4d4348 4522 case SFmode:
f676971a 4523 if (CONSTANT_P (operands[1])
fb4d4348 4524 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4525 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4526 break;
f676971a 4527
0ac081f6
AH
4528 case V16QImode:
4529 case V8HImode:
4530 case V4SFmode:
4531 case V4SImode:
a3170dc6
AH
4532 case V4HImode:
4533 case V2SFmode:
4534 case V2SImode:
00a892b8 4535 case V1DImode:
69ef87e2 4536 if (CONSTANT_P (operands[1])
d744e06e 4537 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4538 operands[1] = force_const_mem (mode, operands[1]);
4539 break;
f676971a 4540
fb4d4348 4541 case SImode:
a9098fd0 4542 case DImode:
fb4d4348
GK
4543 /* Use default pattern for address of ELF small data */
4544 if (TARGET_ELF
a9098fd0 4545 && mode == Pmode
f607bc57 4546 && DEFAULT_ABI == ABI_V4
f676971a 4547 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4548 || GET_CODE (operands[1]) == CONST)
4549 && small_data_operand (operands[1], mode))
fb4d4348
GK
4550 {
4551 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4552 return;
4553 }
4554
f607bc57 4555 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4556 && mode == Pmode && mode == SImode
4557 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4558 {
4559 emit_insn (gen_movsi_got (operands[0], operands[1]));
4560 return;
4561 }
4562
ee890fe2 4563 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4564 && TARGET_NO_TOC
4565 && ! flag_pic
a9098fd0 4566 && mode == Pmode
fb4d4348
GK
4567 && CONSTANT_P (operands[1])
4568 && GET_CODE (operands[1]) != HIGH
4569 && GET_CODE (operands[1]) != CONST_INT)
4570 {
b3a13419
ILT
4571 rtx target = (!can_create_pseudo_p ()
4572 ? operands[0]
4573 : gen_reg_rtx (mode));
fb4d4348
GK
4574
4575 /* If this is a function address on -mcall-aixdesc,
4576 convert it to the address of the descriptor. */
4577 if (DEFAULT_ABI == ABI_AIX
4578 && GET_CODE (operands[1]) == SYMBOL_REF
4579 && XSTR (operands[1], 0)[0] == '.')
4580 {
4581 const char *name = XSTR (operands[1], 0);
4582 rtx new_ref;
4583 while (*name == '.')
4584 name++;
4585 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4586 CONSTANT_POOL_ADDRESS_P (new_ref)
4587 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4588 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4589 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4590 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4591 operands[1] = new_ref;
4592 }
7509c759 4593
ee890fe2
SS
4594 if (DEFAULT_ABI == ABI_DARWIN)
4595 {
ab82a49f
AP
4596#if TARGET_MACHO
4597 if (MACHO_DYNAMIC_NO_PIC_P)
4598 {
4599 /* Take care of any required data indirection. */
4600 operands[1] = rs6000_machopic_legitimize_pic_address (
4601 operands[1], mode, operands[0]);
4602 if (operands[0] != operands[1])
4603 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4604 operands[0], operands[1]));
ab82a49f
AP
4605 return;
4606 }
4607#endif
b8a55285
AP
4608 emit_insn (gen_macho_high (target, operands[1]));
4609 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4610 return;
4611 }
4612
fb4d4348
GK
4613 emit_insn (gen_elf_high (target, operands[1]));
4614 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4615 return;
4616 }
4617
a9098fd0
GK
4618 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4619 and we have put it in the TOC, we just need to make a TOC-relative
4620 reference to it. */
4621 if (TARGET_TOC
4622 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4623 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4624 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4625 get_pool_mode (operands[1])))
fb4d4348 4626 {
a9098fd0 4627 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4628 }
a9098fd0
GK
4629 else if (mode == Pmode
4630 && CONSTANT_P (operands[1])
38886f37
AO
4631 && ((GET_CODE (operands[1]) != CONST_INT
4632 && ! easy_fp_constant (operands[1], mode))
4633 || (GET_CODE (operands[1]) == CONST_INT
4634 && num_insns_constant (operands[1], mode) > 2)
4635 || (GET_CODE (operands[0]) == REG
4636 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4637 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4638 && ! legitimate_constant_pool_address_p (operands[1])
4639 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4640 {
4641 /* Emit a USE operation so that the constant isn't deleted if
4642 expensive optimizations are turned on because nobody
4643 references it. This should only be done for operands that
4644 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4645 This should not be done for operands that contain LABEL_REFs.
4646 For now, we just handle the obvious case. */
4647 if (GET_CODE (operands[1]) != LABEL_REF)
4648 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4649
c859cda6 4650#if TARGET_MACHO
ee890fe2 4651 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4652 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4653 {
ee890fe2
SS
4654 operands[1] =
4655 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4656 operands[0]);
4657 if (operands[0] != operands[1])
4658 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4659 return;
4660 }
c859cda6 4661#endif
ee890fe2 4662
fb4d4348
GK
4663 /* If we are to limit the number of things we put in the TOC and
4664 this is a symbol plus a constant we can add in one insn,
4665 just put the symbol in the TOC and add the constant. Don't do
4666 this if reload is in progress. */
4667 if (GET_CODE (operands[1]) == CONST
4668 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4669 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4670 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4671 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4672 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4673 && ! side_effects_p (operands[0]))
4674 {
a4f6c312
SS
4675 rtx sym =
4676 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4677 rtx other = XEXP (XEXP (operands[1], 0), 1);
4678
a9098fd0
GK
4679 sym = force_reg (mode, sym);
4680 if (mode == SImode)
4681 emit_insn (gen_addsi3 (operands[0], sym, other));
4682 else
4683 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4684 return;
4685 }
4686
a9098fd0 4687 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4688
f676971a 4689 if (TARGET_TOC
4d588c14 4690 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4691 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4692 get_pool_constant (XEXP (operands[1], 0)),
4693 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4694 {
ba4828e0 4695 operands[1]
542a8afa 4696 = gen_const_mem (mode,
c4ad648e 4697 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4698 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4699 }
fb4d4348
GK
4700 }
4701 break;
a9098fd0 4702
fb4d4348 4703 case TImode:
76d2b81d
DJ
4704 rs6000_eliminate_indexed_memrefs (operands);
4705
27dc0551
DE
4706 if (TARGET_POWER)
4707 {
4708 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4709 gen_rtvec (2,
4710 gen_rtx_SET (VOIDmode,
4711 operands[0], operands[1]),
4712 gen_rtx_CLOBBER (VOIDmode,
4713 gen_rtx_SCRATCH (SImode)))));
4714 return;
4715 }
fb4d4348
GK
4716 break;
4717
4718 default:
37409796 4719 gcc_unreachable ();
fb4d4348
GK
4720 }
4721
a9098fd0
GK
4722 /* Above, we may have called force_const_mem which may have returned
4723 an invalid address. If we can, fix this up; otherwise, reload will
4724 have to deal with it. */
8f4e6caf
RH
4725 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4726 operands[1] = validize_mem (operands[1]);
a9098fd0 4727
8f4e6caf 4728 emit_set:
fb4d4348
GK
4729 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4730}
4697a36c 4731\f
2858f73a
GK
4732/* Nonzero if we can use a floating-point register to pass this arg. */
4733#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4734 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4735 && (MODE) != SDmode \
2858f73a
GK
4736 && (CUM)->fregno <= FP_ARG_MAX_REG \
4737 && TARGET_HARD_FLOAT && TARGET_FPRS)
4738
4739/* Nonzero if we can use an AltiVec register to pass this arg. */
4740#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4741 (ALTIVEC_VECTOR_MODE (MODE) \
4742 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4743 && TARGET_ALTIVEC_ABI \
83953138 4744 && (NAMED))
2858f73a 4745
c6e8c921
GK
4746/* Return a nonzero value to say to return the function value in
4747 memory, just as large structures are always returned. TYPE will be
4748 the data type of the value, and FNTYPE will be the type of the
4749 function doing the returning, or @code{NULL} for libcalls.
4750
4751 The AIX ABI for the RS/6000 specifies that all structures are
4752 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4753 specifies that structures <= 8 bytes are returned in r3/r4, but a
4754 draft put them in memory, and GCC used to implement the draft
df01da37 4755 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4756 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4757 compatibility can change DRAFT_V4_STRUCT_RET to override the
4758 default, and -m switches get the final word. See
4759 rs6000_override_options for more details.
4760
4761 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4762 long double support is enabled. These values are returned in memory.
4763
4764 int_size_in_bytes returns -1 for variable size objects, which go in
4765 memory always. The cast to unsigned makes -1 > 8. */
4766
4767static bool
586de218 4768rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 4769{
594a51fe
SS
4770 /* In the darwin64 abi, try to use registers for larger structs
4771 if possible. */
0b5383eb 4772 if (rs6000_darwin64_abi
594a51fe 4773 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4774 && int_size_in_bytes (type) > 0)
4775 {
4776 CUMULATIVE_ARGS valcum;
4777 rtx valret;
4778
4779 valcum.words = 0;
4780 valcum.fregno = FP_ARG_MIN_REG;
4781 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4782 /* Do a trial code generation as if this were going to be passed
4783 as an argument; if any part goes in memory, we return NULL. */
4784 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4785 if (valret)
4786 return false;
4787 /* Otherwise fall through to more conventional ABI rules. */
4788 }
594a51fe 4789
c6e8c921 4790 if (AGGREGATE_TYPE_P (type)
df01da37 4791 && (aix_struct_return
c6e8c921
GK
4792 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4793 return true;
b693336b 4794
bada2eb8
DE
4795 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4796 modes only exist for GCC vector types if -maltivec. */
4797 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4798 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4799 return false;
4800
b693336b
PB
4801 /* Return synthetic vectors in memory. */
4802 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4803 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4804 {
4805 static bool warned_for_return_big_vectors = false;
4806 if (!warned_for_return_big_vectors)
4807 {
d4ee4d25 4808 warning (0, "GCC vector returned by reference: "
b693336b
PB
4809 "non-standard ABI extension with no compatibility guarantee");
4810 warned_for_return_big_vectors = true;
4811 }
4812 return true;
4813 }
4814
602ea4d3 4815 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 4816 return true;
ad630bef 4817
c6e8c921
GK
4818 return false;
4819}
4820
4697a36c
MM
4821/* Initialize a variable CUM of type CUMULATIVE_ARGS
4822 for a call to a function whose data type is FNTYPE.
4823 For a library call, FNTYPE is 0.
4824
4825 For incoming args we set the number of arguments in the prototype large
1c20ae99 4826 so we never return a PARALLEL. */
4697a36c
MM
4827
4828void
f676971a 4829init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4830 rtx libname ATTRIBUTE_UNUSED, int incoming,
4831 int libcall, int n_named_args)
4697a36c
MM
4832{
4833 static CUMULATIVE_ARGS zero_cumulative;
4834
4835 *cum = zero_cumulative;
4836 cum->words = 0;
4837 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4838 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4839 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4840 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4841 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4842 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4843 cum->stdarg = fntype
4844 && (TYPE_ARG_TYPES (fntype) != 0
4845 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4846 != void_type_node));
4697a36c 4847
0f6937fe
AM
4848 cum->nargs_prototype = 0;
4849 if (incoming || cum->prototype)
4850 cum->nargs_prototype = n_named_args;
4697a36c 4851
a5c76ee6 4852 /* Check for a longcall attribute. */
3eb4e360
AM
4853 if ((!fntype && rs6000_default_long_calls)
4854 || (fntype
4855 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4856 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4857 cum->call_cookie |= CALL_LONG;
6a4cee5f 4858
4697a36c
MM
4859 if (TARGET_DEBUG_ARG)
4860 {
4861 fprintf (stderr, "\ninit_cumulative_args:");
4862 if (fntype)
4863 {
4864 tree ret_type = TREE_TYPE (fntype);
4865 fprintf (stderr, " ret code = %s,",
4866 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4867 }
4868
6a4cee5f
MM
4869 if (cum->call_cookie & CALL_LONG)
4870 fprintf (stderr, " longcall,");
4871
4697a36c
MM
4872 fprintf (stderr, " proto = %d, nargs = %d\n",
4873 cum->prototype, cum->nargs_prototype);
4874 }
f676971a 4875
c4ad648e
AM
4876 if (fntype
4877 && !TARGET_ALTIVEC
4878 && TARGET_ALTIVEC_ABI
4879 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4880 {
c85ce869 4881 error ("cannot return value in vector register because"
c4ad648e 4882 " altivec instructions are disabled, use -maltivec"
c85ce869 4883 " to enable them");
c4ad648e 4884 }
4697a36c
MM
4885}
4886\f
fe984136
RH
4887/* Return true if TYPE must be passed on the stack and not in registers. */
4888
4889static bool
586de218 4890rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
4891{
4892 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4893 return must_pass_in_stack_var_size (mode, type);
4894 else
4895 return must_pass_in_stack_var_size_or_pad (mode, type);
4896}
4897
c229cba9
DE
4898/* If defined, a C expression which determines whether, and in which
4899 direction, to pad out an argument with extra space. The value
4900 should be of type `enum direction': either `upward' to pad above
4901 the argument, `downward' to pad below, or `none' to inhibit
4902 padding.
4903
4904 For the AIX ABI structs are always stored left shifted in their
4905 argument slot. */
4906
9ebbca7d 4907enum direction
586de218 4908function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 4909{
6e985040
AM
4910#ifndef AGGREGATE_PADDING_FIXED
4911#define AGGREGATE_PADDING_FIXED 0
4912#endif
4913#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4914#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4915#endif
4916
4917 if (!AGGREGATE_PADDING_FIXED)
4918 {
4919 /* GCC used to pass structures of the same size as integer types as
4920 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4921 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4922 passed padded downward, except that -mstrict-align further
4923 muddied the water in that multi-component structures of 2 and 4
4924 bytes in size were passed padded upward.
4925
4926 The following arranges for best compatibility with previous
4927 versions of gcc, but removes the -mstrict-align dependency. */
4928 if (BYTES_BIG_ENDIAN)
4929 {
4930 HOST_WIDE_INT size = 0;
4931
4932 if (mode == BLKmode)
4933 {
4934 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4935 size = int_size_in_bytes (type);
4936 }
4937 else
4938 size = GET_MODE_SIZE (mode);
4939
4940 if (size == 1 || size == 2 || size == 4)
4941 return downward;
4942 }
4943 return upward;
4944 }
4945
4946 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4947 {
4948 if (type != 0 && AGGREGATE_TYPE_P (type))
4949 return upward;
4950 }
c229cba9 4951
d3704c46
KH
4952 /* Fall back to the default. */
4953 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
4954}
4955
b6c9286a 4956/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 4957 of an argument with the specified mode and type. If it is not defined,
b6c9286a 4958 PARM_BOUNDARY is used for all arguments.
f676971a 4959
84e9ad15
AM
4960 V.4 wants long longs and doubles to be double word aligned. Just
4961 testing the mode size is a boneheaded way to do this as it means
4962 that other types such as complex int are also double word aligned.
4963 However, we're stuck with this because changing the ABI might break
4964 existing library interfaces.
4965
b693336b
PB
4966 Doubleword align SPE vectors.
4967 Quadword align Altivec vectors.
4968 Quadword align large synthetic vector types. */
b6c9286a
MM
4969
4970int
b693336b 4971function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 4972{
84e9ad15
AM
4973 if (DEFAULT_ABI == ABI_V4
4974 && (GET_MODE_SIZE (mode) == 8
4975 || (TARGET_HARD_FLOAT
4976 && TARGET_FPRS
7393f7f8 4977 && (mode == TFmode || mode == TDmode))))
4ed78545 4978 return 64;
ad630bef
DE
4979 else if (SPE_VECTOR_MODE (mode)
4980 || (type && TREE_CODE (type) == VECTOR_TYPE
4981 && int_size_in_bytes (type) >= 8
4982 && int_size_in_bytes (type) < 16))
e1f83b4d 4983 return 64;
ad630bef
DE
4984 else if (ALTIVEC_VECTOR_MODE (mode)
4985 || (type && TREE_CODE (type) == VECTOR_TYPE
4986 && int_size_in_bytes (type) >= 16))
0ac081f6 4987 return 128;
0b5383eb
DJ
4988 else if (rs6000_darwin64_abi && mode == BLKmode
4989 && type && TYPE_ALIGN (type) > 64)
4990 return 128;
9ebbca7d 4991 else
b6c9286a 4992 return PARM_BOUNDARY;
b6c9286a 4993}
c53bdcf5 4994
294bd182
AM
4995/* For a function parm of MODE and TYPE, return the starting word in
4996 the parameter area. NWORDS of the parameter area are already used. */
4997
4998static unsigned int
4999rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5000{
5001 unsigned int align;
5002 unsigned int parm_offset;
5003
5004 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5005 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5006 return nwords + (-(parm_offset + nwords) & align);
5007}
5008
c53bdcf5
AM
5009/* Compute the size (in words) of a function argument. */
5010
5011static unsigned long
5012rs6000_arg_size (enum machine_mode mode, tree type)
5013{
5014 unsigned long size;
5015
5016 if (mode != BLKmode)
5017 size = GET_MODE_SIZE (mode);
5018 else
5019 size = int_size_in_bytes (type);
5020
5021 if (TARGET_32BIT)
5022 return (size + 3) >> 2;
5023 else
5024 return (size + 7) >> 3;
5025}
b6c9286a 5026\f
0b5383eb 5027/* Use this to flush pending int fields. */
594a51fe
SS
5028
5029static void
0b5383eb
DJ
5030rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5031 HOST_WIDE_INT bitpos)
594a51fe 5032{
0b5383eb
DJ
5033 unsigned int startbit, endbit;
5034 int intregs, intoffset;
5035 enum machine_mode mode;
594a51fe 5036
0b5383eb
DJ
5037 if (cum->intoffset == -1)
5038 return;
594a51fe 5039
0b5383eb
DJ
5040 intoffset = cum->intoffset;
5041 cum->intoffset = -1;
5042
5043 if (intoffset % BITS_PER_WORD != 0)
5044 {
5045 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5046 MODE_INT, 0);
5047 if (mode == BLKmode)
594a51fe 5048 {
0b5383eb
DJ
5049 /* We couldn't find an appropriate mode, which happens,
5050 e.g., in packed structs when there are 3 bytes to load.
5051 Back intoffset back to the beginning of the word in this
5052 case. */
5053 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5054 }
594a51fe 5055 }
0b5383eb
DJ
5056
5057 startbit = intoffset & -BITS_PER_WORD;
5058 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5059 intregs = (endbit - startbit) / BITS_PER_WORD;
5060 cum->words += intregs;
5061}
5062
5063/* The darwin64 ABI calls for us to recurse down through structs,
5064 looking for elements passed in registers. Unfortunately, we have
5065 to track int register count here also because of misalignments
5066 in powerpc alignment mode. */
5067
5068static void
5069rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5070 tree type,
5071 HOST_WIDE_INT startbitpos)
5072{
5073 tree f;
5074
5075 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5076 if (TREE_CODE (f) == FIELD_DECL)
5077 {
5078 HOST_WIDE_INT bitpos = startbitpos;
5079 tree ftype = TREE_TYPE (f);
70fb00df
AP
5080 enum machine_mode mode;
5081 if (ftype == error_mark_node)
5082 continue;
5083 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5084
5085 if (DECL_SIZE (f) != 0
5086 && host_integerp (bit_position (f), 1))
5087 bitpos += int_bit_position (f);
5088
5089 /* ??? FIXME: else assume zero offset. */
5090
5091 if (TREE_CODE (ftype) == RECORD_TYPE)
5092 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5093 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5094 {
5095 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5096 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5097 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5098 }
5099 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5100 {
5101 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5102 cum->vregno++;
5103 cum->words += 2;
5104 }
5105 else if (cum->intoffset == -1)
5106 cum->intoffset = bitpos;
5107 }
594a51fe
SS
5108}
5109
4697a36c
MM
5110/* Update the data in CUM to advance over an argument
5111 of mode MODE and data type TYPE.
b2d04ecf
AM
5112 (TYPE is null for libcalls where that information may not be available.)
5113
5114 Note that for args passed by reference, function_arg will be called
5115 with MODE and TYPE set to that of the pointer to the arg, not the arg
5116 itself. */
4697a36c
MM
5117
5118void
f676971a 5119function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5120 tree type, int named, int depth)
4697a36c 5121{
0b5383eb
DJ
5122 int size;
5123
594a51fe
SS
5124 /* Only tick off an argument if we're not recursing. */
5125 if (depth == 0)
5126 cum->nargs_prototype--;
4697a36c 5127
ad630bef
DE
5128 if (TARGET_ALTIVEC_ABI
5129 && (ALTIVEC_VECTOR_MODE (mode)
5130 || (type && TREE_CODE (type) == VECTOR_TYPE
5131 && int_size_in_bytes (type) == 16)))
0ac081f6 5132 {
4ed78545
AM
5133 bool stack = false;
5134
2858f73a 5135 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5136 {
6d0ef01e
HP
5137 cum->vregno++;
5138 if (!TARGET_ALTIVEC)
c85ce869 5139 error ("cannot pass argument in vector register because"
6d0ef01e 5140 " altivec instructions are disabled, use -maltivec"
c85ce869 5141 " to enable them");
4ed78545
AM
5142
5143 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5144 even if it is going to be passed in a vector register.
4ed78545
AM
5145 Darwin does the same for variable-argument functions. */
5146 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5147 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5148 stack = true;
6d0ef01e 5149 }
4ed78545
AM
5150 else
5151 stack = true;
5152
5153 if (stack)
c4ad648e 5154 {
a594a19c 5155 int align;
f676971a 5156
2858f73a
GK
5157 /* Vector parameters must be 16-byte aligned. This places
5158 them at 2 mod 4 in terms of words in 32-bit mode, since
5159 the parameter save area starts at offset 24 from the
5160 stack. In 64-bit mode, they just have to start on an
5161 even word, since the parameter save area is 16-byte
5162 aligned. Space for GPRs is reserved even if the argument
5163 will be passed in memory. */
5164 if (TARGET_32BIT)
4ed78545 5165 align = (2 - cum->words) & 3;
2858f73a
GK
5166 else
5167 align = cum->words & 1;
c53bdcf5 5168 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5169
a594a19c
GK
5170 if (TARGET_DEBUG_ARG)
5171 {
f676971a 5172 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5173 cum->words, align);
5174 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5175 cum->nargs_prototype, cum->prototype,
2858f73a 5176 GET_MODE_NAME (mode));
a594a19c
GK
5177 }
5178 }
0ac081f6 5179 }
a4b0320c 5180 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5181 && !cum->stdarg
5182 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5183 cum->sysv_gregno++;
594a51fe
SS
5184
5185 else if (rs6000_darwin64_abi
5186 && mode == BLKmode
0b5383eb
DJ
5187 && TREE_CODE (type) == RECORD_TYPE
5188 && (size = int_size_in_bytes (type)) > 0)
5189 {
5190 /* Variable sized types have size == -1 and are
5191 treated as if consisting entirely of ints.
5192 Pad to 16 byte boundary if needed. */
5193 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5194 && (cum->words % 2) != 0)
5195 cum->words++;
5196 /* For varargs, we can just go up by the size of the struct. */
5197 if (!named)
5198 cum->words += (size + 7) / 8;
5199 else
5200 {
5201 /* It is tempting to say int register count just goes up by
5202 sizeof(type)/8, but this is wrong in a case such as
5203 { int; double; int; } [powerpc alignment]. We have to
5204 grovel through the fields for these too. */
5205 cum->intoffset = 0;
5206 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5207 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5208 size * BITS_PER_UNIT);
5209 }
5210 }
f607bc57 5211 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5212 {
a3170dc6 5213 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5214 && (mode == SFmode || mode == DFmode
7393f7f8 5215 || mode == DDmode || mode == TDmode
602ea4d3 5216 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5217 {
2d83f070
JJ
5218 /* _Decimal128 must use an even/odd register pair. This assumes
5219 that the register number is odd when fregno is odd. */
5220 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5221 cum->fregno++;
5222
5223 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5224 <= FP_ARG_V4_MAX_REG)
602ea4d3 5225 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5226 else
5227 {
602ea4d3 5228 cum->fregno = FP_ARG_V4_MAX_REG + 1;
7393f7f8 5229 if (mode == DFmode || mode == TFmode || mode == DDmode || mode == TDmode)
c4ad648e 5230 cum->words += cum->words & 1;
c53bdcf5 5231 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5232 }
4697a36c 5233 }
4cc833b7
RH
5234 else
5235 {
b2d04ecf 5236 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5237 int gregno = cum->sysv_gregno;
5238
4ed78545
AM
5239 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5240 (r7,r8) or (r9,r10). As does any other 2 word item such
5241 as complex int due to a historical mistake. */
5242 if (n_words == 2)
5243 gregno += (1 - gregno) & 1;
4cc833b7 5244
4ed78545 5245 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5246 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5247 {
4ed78545
AM
5248 /* Long long and SPE vectors are aligned on the stack.
5249 So are other 2 word items such as complex int due to
5250 a historical mistake. */
4cc833b7
RH
5251 if (n_words == 2)
5252 cum->words += cum->words & 1;
5253 cum->words += n_words;
5254 }
4697a36c 5255
4cc833b7
RH
5256 /* Note: continuing to accumulate gregno past when we've started
5257 spilling to the stack indicates the fact that we've started
5258 spilling to the stack to expand_builtin_saveregs. */
5259 cum->sysv_gregno = gregno + n_words;
5260 }
4697a36c 5261
4cc833b7
RH
5262 if (TARGET_DEBUG_ARG)
5263 {
5264 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5265 cum->words, cum->fregno);
5266 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5267 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5268 fprintf (stderr, "mode = %4s, named = %d\n",
5269 GET_MODE_NAME (mode), named);
5270 }
4697a36c
MM
5271 }
5272 else
4cc833b7 5273 {
b2d04ecf 5274 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5275 int start_words = cum->words;
5276 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5277
294bd182 5278 cum->words = align_words + n_words;
4697a36c 5279
ebb109ad 5280 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5281 && mode != SDmode
a3170dc6 5282 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5283 {
5284 /* _Decimal128 must be passed in an even/odd float register pair.
5285 This assumes that the register number is odd when fregno is
5286 odd. */
5287 if (mode == TDmode && (cum->fregno % 2) == 1)
5288 cum->fregno++;
5289 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5290 }
4cc833b7
RH
5291
5292 if (TARGET_DEBUG_ARG)
5293 {
5294 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5295 cum->words, cum->fregno);
5296 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5297 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5298 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5299 named, align_words - start_words, depth);
4cc833b7
RH
5300 }
5301 }
4697a36c 5302}
a6c9bed4 5303
f82f556d
AH
5304static rtx
5305spe_build_register_parallel (enum machine_mode mode, int gregno)
5306{
17caeff2 5307 rtx r1, r3, r5, r7;
f82f556d 5308
37409796 5309 switch (mode)
f82f556d 5310 {
37409796 5311 case DFmode:
54b695e7
AH
5312 r1 = gen_rtx_REG (DImode, gregno);
5313 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5314 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5315
5316 case DCmode:
17caeff2 5317 case TFmode:
54b695e7
AH
5318 r1 = gen_rtx_REG (DImode, gregno);
5319 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5320 r3 = gen_rtx_REG (DImode, gregno + 2);
5321 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5322 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5323
17caeff2
JM
5324 case TCmode:
5325 r1 = gen_rtx_REG (DImode, gregno);
5326 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5327 r3 = gen_rtx_REG (DImode, gregno + 2);
5328 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5329 r5 = gen_rtx_REG (DImode, gregno + 4);
5330 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5331 r7 = gen_rtx_REG (DImode, gregno + 6);
5332 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5333 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5334
37409796
NS
5335 default:
5336 gcc_unreachable ();
f82f556d 5337 }
f82f556d 5338}
b78d48dd 5339
f82f556d 5340/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5341static rtx
f676971a 5342rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5343 tree type)
a6c9bed4 5344{
f82f556d
AH
5345 int gregno = cum->sysv_gregno;
5346
5347 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5348 are passed and returned in a pair of GPRs for ABI compatibility. */
17caeff2
JM
5349 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode
5350 || mode == TFmode || mode == TCmode))
f82f556d 5351 {
b5870bee
AH
5352 int n_words = rs6000_arg_size (mode, type);
5353
f82f556d 5354 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
5355 if (mode == DFmode)
5356 gregno += (1 - gregno) & 1;
f82f556d 5357
b5870bee
AH
5358 /* Multi-reg args are not split between registers and stack. */
5359 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5360 return NULL_RTX;
5361
5362 return spe_build_register_parallel (mode, gregno);
5363 }
a6c9bed4
AH
5364 if (cum->stdarg)
5365 {
c53bdcf5 5366 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5367
5368 /* SPE vectors are put in odd registers. */
5369 if (n_words == 2 && (gregno & 1) == 0)
5370 gregno += 1;
5371
5372 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5373 {
5374 rtx r1, r2;
5375 enum machine_mode m = SImode;
5376
5377 r1 = gen_rtx_REG (m, gregno);
5378 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5379 r2 = gen_rtx_REG (m, gregno + 1);
5380 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5381 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5382 }
5383 else
b78d48dd 5384 return NULL_RTX;
a6c9bed4
AH
5385 }
5386 else
5387 {
f82f556d
AH
5388 if (gregno <= GP_ARG_MAX_REG)
5389 return gen_rtx_REG (mode, gregno);
a6c9bed4 5390 else
b78d48dd 5391 return NULL_RTX;
a6c9bed4
AH
5392 }
5393}
5394
0b5383eb
DJ
5395/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5396 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5397
0b5383eb 5398static void
bb8df8a6 5399rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5400 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5401{
0b5383eb
DJ
5402 enum machine_mode mode;
5403 unsigned int regno;
5404 unsigned int startbit, endbit;
5405 int this_regno, intregs, intoffset;
5406 rtx reg;
594a51fe 5407
0b5383eb
DJ
5408 if (cum->intoffset == -1)
5409 return;
5410
5411 intoffset = cum->intoffset;
5412 cum->intoffset = -1;
5413
5414 /* If this is the trailing part of a word, try to only load that
5415 much into the register. Otherwise load the whole register. Note
5416 that in the latter case we may pick up unwanted bits. It's not a
5417 problem at the moment but may wish to revisit. */
5418
5419 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5420 {
0b5383eb
DJ
5421 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5422 MODE_INT, 0);
5423 if (mode == BLKmode)
5424 {
5425 /* We couldn't find an appropriate mode, which happens,
5426 e.g., in packed structs when there are 3 bytes to load.
5427 Back intoffset back to the beginning of the word in this
5428 case. */
5429 intoffset = intoffset & -BITS_PER_WORD;
5430 mode = word_mode;
5431 }
5432 }
5433 else
5434 mode = word_mode;
5435
5436 startbit = intoffset & -BITS_PER_WORD;
5437 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5438 intregs = (endbit - startbit) / BITS_PER_WORD;
5439 this_regno = cum->words + intoffset / BITS_PER_WORD;
5440
5441 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5442 cum->use_stack = 1;
bb8df8a6 5443
0b5383eb
DJ
5444 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5445 if (intregs <= 0)
5446 return;
5447
5448 intoffset /= BITS_PER_UNIT;
5449 do
5450 {
5451 regno = GP_ARG_MIN_REG + this_regno;
5452 reg = gen_rtx_REG (mode, regno);
5453 rvec[(*k)++] =
5454 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5455
5456 this_regno += 1;
5457 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5458 mode = word_mode;
5459 intregs -= 1;
5460 }
5461 while (intregs > 0);
5462}
5463
5464/* Recursive workhorse for the following. */
5465
5466static void
586de218 5467rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5468 HOST_WIDE_INT startbitpos, rtx rvec[],
5469 int *k)
5470{
5471 tree f;
5472
5473 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5474 if (TREE_CODE (f) == FIELD_DECL)
5475 {
5476 HOST_WIDE_INT bitpos = startbitpos;
5477 tree ftype = TREE_TYPE (f);
70fb00df
AP
5478 enum machine_mode mode;
5479 if (ftype == error_mark_node)
5480 continue;
5481 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5482
5483 if (DECL_SIZE (f) != 0
5484 && host_integerp (bit_position (f), 1))
5485 bitpos += int_bit_position (f);
5486
5487 /* ??? FIXME: else assume zero offset. */
5488
5489 if (TREE_CODE (ftype) == RECORD_TYPE)
5490 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5491 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5492 {
0b5383eb
DJ
5493#if 0
5494 switch (mode)
594a51fe 5495 {
0b5383eb
DJ
5496 case SCmode: mode = SFmode; break;
5497 case DCmode: mode = DFmode; break;
5498 case TCmode: mode = TFmode; break;
5499 default: break;
594a51fe 5500 }
0b5383eb
DJ
5501#endif
5502 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5503 rvec[(*k)++]
bb8df8a6 5504 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5505 gen_rtx_REG (mode, cum->fregno++),
5506 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5507 if (mode == TFmode || mode == TDmode)
0b5383eb 5508 cum->fregno++;
594a51fe 5509 }
0b5383eb
DJ
5510 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5511 {
5512 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5513 rvec[(*k)++]
bb8df8a6
EC
5514 = gen_rtx_EXPR_LIST (VOIDmode,
5515 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5516 GEN_INT (bitpos / BITS_PER_UNIT));
5517 }
5518 else if (cum->intoffset == -1)
5519 cum->intoffset = bitpos;
5520 }
5521}
594a51fe 5522
0b5383eb
DJ
5523/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5524 the register(s) to be used for each field and subfield of a struct
5525 being passed by value, along with the offset of where the
5526 register's value may be found in the block. FP fields go in FP
5527 register, vector fields go in vector registers, and everything
bb8df8a6 5528 else goes in int registers, packed as in memory.
8ff40a74 5529
0b5383eb
DJ
5530 This code is also used for function return values. RETVAL indicates
5531 whether this is the case.
8ff40a74 5532
a4d05547 5533 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5534 calling convention. */
594a51fe 5535
0b5383eb 5536static rtx
586de218 5537rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5538 int named, bool retval)
5539{
5540 rtx rvec[FIRST_PSEUDO_REGISTER];
5541 int k = 1, kbase = 1;
5542 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5543 /* This is a copy; modifications are not visible to our caller. */
5544 CUMULATIVE_ARGS copy_cum = *orig_cum;
5545 CUMULATIVE_ARGS *cum = &copy_cum;
5546
5547 /* Pad to 16 byte boundary if needed. */
5548 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5549 && (cum->words % 2) != 0)
5550 cum->words++;
5551
5552 cum->intoffset = 0;
5553 cum->use_stack = 0;
5554 cum->named = named;
5555
5556 /* Put entries into rvec[] for individual FP and vector fields, and
5557 for the chunks of memory that go in int regs. Note we start at
5558 element 1; 0 is reserved for an indication of using memory, and
5559 may or may not be filled in below. */
5560 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5561 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5562
5563 /* If any part of the struct went on the stack put all of it there.
5564 This hack is because the generic code for
5565 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5566 parts of the struct are not at the beginning. */
5567 if (cum->use_stack)
5568 {
5569 if (retval)
5570 return NULL_RTX; /* doesn't go in registers at all */
5571 kbase = 0;
5572 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5573 }
5574 if (k > 1 || cum->use_stack)
5575 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5576 else
5577 return NULL_RTX;
5578}
5579
b78d48dd
FJ
5580/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5581
5582static rtx
ec6376ab 5583rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5584{
ec6376ab
AM
5585 int n_units;
5586 int i, k;
5587 rtx rvec[GP_ARG_NUM_REG + 1];
5588
5589 if (align_words >= GP_ARG_NUM_REG)
5590 return NULL_RTX;
5591
5592 n_units = rs6000_arg_size (mode, type);
5593
5594 /* Optimize the simple case where the arg fits in one gpr, except in
5595 the case of BLKmode due to assign_parms assuming that registers are
5596 BITS_PER_WORD wide. */
5597 if (n_units == 0
5598 || (n_units == 1 && mode != BLKmode))
5599 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5600
5601 k = 0;
5602 if (align_words + n_units > GP_ARG_NUM_REG)
5603 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5604 using a magic NULL_RTX component.
79773478
AM
5605 This is not strictly correct. Only some of the arg belongs in
5606 memory, not all of it. However, the normal scheme using
5607 function_arg_partial_nregs can result in unusual subregs, eg.
5608 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5609 store the whole arg to memory is often more efficient than code
5610 to store pieces, and we know that space is available in the right
5611 place for the whole arg. */
ec6376ab
AM
5612 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5613
5614 i = 0;
5615 do
36a454e1 5616 {
ec6376ab
AM
5617 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5618 rtx off = GEN_INT (i++ * 4);
5619 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5620 }
ec6376ab
AM
5621 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5622
5623 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5624}
5625
4697a36c
MM
5626/* Determine where to put an argument to a function.
5627 Value is zero to push the argument on the stack,
5628 or a hard register in which to store the argument.
5629
5630 MODE is the argument's machine mode.
5631 TYPE is the data type of the argument (as a tree).
5632 This is null for libcalls where that information may
5633 not be available.
5634 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5635 the preceding args and about the function being called. It is
5636 not modified in this routine.
4697a36c
MM
5637 NAMED is nonzero if this argument is a named parameter
5638 (otherwise it is an extra parameter matching an ellipsis).
5639
5640 On RS/6000 the first eight words of non-FP are normally in registers
5641 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5642 Under V.4, the first 8 FP args are in registers.
5643
5644 If this is floating-point and no prototype is specified, we use
5645 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5646 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5647 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5648 doesn't support PARALLEL anyway.
5649
5650 Note that for args passed by reference, function_arg will be called
5651 with MODE and TYPE set to that of the pointer to the arg, not the arg
5652 itself. */
4697a36c 5653
9390387d 5654rtx
f676971a 5655function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5656 tree type, int named)
4697a36c 5657{
4cc833b7 5658 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5659
a4f6c312
SS
5660 /* Return a marker to indicate whether CR1 needs to set or clear the
5661 bit that V.4 uses to say fp args were passed in registers.
5662 Assume that we don't need the marker for software floating point,
5663 or compiler generated library calls. */
4697a36c
MM
5664 if (mode == VOIDmode)
5665 {
f607bc57 5666 if (abi == ABI_V4
b9599e46 5667 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5668 && (cum->stdarg
5669 || (cum->nargs_prototype < 0
5670 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5671 {
a3170dc6
AH
5672 /* For the SPE, we need to crxor CR6 always. */
5673 if (TARGET_SPE_ABI)
5674 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5675 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5676 return GEN_INT (cum->call_cookie
5677 | ((cum->fregno == FP_ARG_MIN_REG)
5678 ? CALL_V4_SET_FP_ARGS
5679 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5680 }
4697a36c 5681
7509c759 5682 return GEN_INT (cum->call_cookie);
4697a36c
MM
5683 }
5684
0b5383eb
DJ
5685 if (rs6000_darwin64_abi && mode == BLKmode
5686 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5687 {
0b5383eb 5688 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5689 if (rslt != NULL_RTX)
5690 return rslt;
5691 /* Else fall through to usual handling. */
5692 }
5693
2858f73a 5694 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5695 if (TARGET_64BIT && ! cum->prototype)
5696 {
c4ad648e
AM
5697 /* Vector parameters get passed in vector register
5698 and also in GPRs or memory, in absence of prototype. */
5699 int align_words;
5700 rtx slot;
5701 align_words = (cum->words + 1) & ~1;
5702
5703 if (align_words >= GP_ARG_NUM_REG)
5704 {
5705 slot = NULL_RTX;
5706 }
5707 else
5708 {
5709 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5710 }
5711 return gen_rtx_PARALLEL (mode,
5712 gen_rtvec (2,
5713 gen_rtx_EXPR_LIST (VOIDmode,
5714 slot, const0_rtx),
5715 gen_rtx_EXPR_LIST (VOIDmode,
5716 gen_rtx_REG (mode, cum->vregno),
5717 const0_rtx)));
c72d6c26
HP
5718 }
5719 else
5720 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5721 else if (TARGET_ALTIVEC_ABI
5722 && (ALTIVEC_VECTOR_MODE (mode)
5723 || (type && TREE_CODE (type) == VECTOR_TYPE
5724 && int_size_in_bytes (type) == 16)))
0ac081f6 5725 {
2858f73a 5726 if (named || abi == ABI_V4)
a594a19c 5727 return NULL_RTX;
0ac081f6 5728 else
a594a19c
GK
5729 {
5730 /* Vector parameters to varargs functions under AIX or Darwin
5731 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5732 int align, align_words, n_words;
5733 enum machine_mode part_mode;
a594a19c
GK
5734
5735 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5736 2 mod 4 in terms of words in 32-bit mode, since the parameter
5737 save area starts at offset 24 from the stack. In 64-bit mode,
5738 they just have to start on an even word, since the parameter
5739 save area is 16-byte aligned. */
5740 if (TARGET_32BIT)
4ed78545 5741 align = (2 - cum->words) & 3;
2858f73a
GK
5742 else
5743 align = cum->words & 1;
a594a19c
GK
5744 align_words = cum->words + align;
5745
5746 /* Out of registers? Memory, then. */
5747 if (align_words >= GP_ARG_NUM_REG)
5748 return NULL_RTX;
ec6376ab
AM
5749
5750 if (TARGET_32BIT && TARGET_POWERPC64)
5751 return rs6000_mixed_function_arg (mode, type, align_words);
5752
2858f73a
GK
5753 /* The vector value goes in GPRs. Only the part of the
5754 value in GPRs is reported here. */
ec6376ab
AM
5755 part_mode = mode;
5756 n_words = rs6000_arg_size (mode, type);
5757 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5758 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5759 is either wholly in GPRs or half in GPRs and half not. */
5760 part_mode = DImode;
ec6376ab
AM
5761
5762 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5763 }
0ac081f6 5764 }
f82f556d
AH
5765 else if (TARGET_SPE_ABI && TARGET_SPE
5766 && (SPE_VECTOR_MODE (mode)
18f63bfa 5767 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5768 || mode == DDmode
17caeff2
JM
5769 || mode == DCmode
5770 || mode == TFmode
7393f7f8 5771 || mode == TDmode
17caeff2 5772 || mode == TCmode))))
a6c9bed4 5773 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5774
f607bc57 5775 else if (abi == ABI_V4)
4697a36c 5776 {
a3170dc6 5777 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5778 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5779 || (mode == TFmode && !TARGET_IEEEQUAD)
5780 || mode == DDmode || mode == TDmode))
4cc833b7 5781 {
2d83f070
JJ
5782 /* _Decimal128 must use an even/odd register pair. This assumes
5783 that the register number is odd when fregno is odd. */
5784 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5785 cum->fregno++;
5786
5787 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5788 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5789 return gen_rtx_REG (mode, cum->fregno);
5790 else
b78d48dd 5791 return NULL_RTX;
4cc833b7
RH
5792 }
5793 else
5794 {
b2d04ecf 5795 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5796 int gregno = cum->sysv_gregno;
5797
4ed78545
AM
5798 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5799 (r7,r8) or (r9,r10). As does any other 2 word item such
5800 as complex int due to a historical mistake. */
5801 if (n_words == 2)
5802 gregno += (1 - gregno) & 1;
4cc833b7 5803
4ed78545 5804 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5805 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5806 return NULL_RTX;
ec6376ab
AM
5807
5808 if (TARGET_32BIT && TARGET_POWERPC64)
5809 return rs6000_mixed_function_arg (mode, type,
5810 gregno - GP_ARG_MIN_REG);
5811 return gen_rtx_REG (mode, gregno);
4cc833b7 5812 }
4697a36c 5813 }
4cc833b7
RH
5814 else
5815 {
294bd182 5816 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5817
2d83f070
JJ
5818 /* _Decimal128 must be passed in an even/odd float register pair.
5819 This assumes that the register number is odd when fregno is odd. */
5820 if (mode == TDmode && (cum->fregno % 2) == 1)
5821 cum->fregno++;
5822
2858f73a 5823 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5824 {
ec6376ab
AM
5825 rtx rvec[GP_ARG_NUM_REG + 1];
5826 rtx r;
5827 int k;
c53bdcf5
AM
5828 bool needs_psave;
5829 enum machine_mode fmode = mode;
c53bdcf5
AM
5830 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5831
5832 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5833 {
c53bdcf5
AM
5834 /* Currently, we only ever need one reg here because complex
5835 doubles are split. */
7393f7f8
BE
5836 gcc_assert (cum->fregno == FP_ARG_MAX_REG
5837 && (fmode == TFmode || fmode == TDmode));
ec6376ab 5838
7393f7f8
BE
5839 /* Long double or _Decimal128 split over regs and memory. */
5840 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 5841 }
c53bdcf5
AM
5842
5843 /* Do we also need to pass this arg in the parameter save
5844 area? */
5845 needs_psave = (type
5846 && (cum->nargs_prototype <= 0
5847 || (DEFAULT_ABI == ABI_AIX
de17c25f 5848 && TARGET_XL_COMPAT
c53bdcf5
AM
5849 && align_words >= GP_ARG_NUM_REG)));
5850
5851 if (!needs_psave && mode == fmode)
ec6376ab 5852 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5853
ec6376ab 5854 k = 0;
c53bdcf5
AM
5855 if (needs_psave)
5856 {
ec6376ab 5857 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5858 This piece must come first, before the fprs. */
c53bdcf5
AM
5859 if (align_words < GP_ARG_NUM_REG)
5860 {
5861 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5862
5863 if (align_words + n_words > GP_ARG_NUM_REG
5864 || (TARGET_32BIT && TARGET_POWERPC64))
5865 {
5866 /* If this is partially on the stack, then we only
5867 include the portion actually in registers here. */
5868 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5869 rtx off;
79773478
AM
5870 int i = 0;
5871 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
5872 /* Not all of the arg fits in gprs. Say that it
5873 goes in memory too, using a magic NULL_RTX
5874 component. Also see comment in
5875 rs6000_mixed_function_arg for why the normal
5876 function_arg_partial_nregs scheme doesn't work
5877 in this case. */
5878 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5879 const0_rtx);
ec6376ab
AM
5880 do
5881 {
5882 r = gen_rtx_REG (rmode,
5883 GP_ARG_MIN_REG + align_words);
2e6c9641 5884 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5885 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5886 }
5887 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5888 }
5889 else
5890 {
5891 /* The whole arg fits in gprs. */
5892 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5893 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5894 }
c53bdcf5 5895 }
ec6376ab
AM
5896 else
5897 /* It's entirely in memory. */
5898 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5899 }
5900
ec6376ab
AM
5901 /* Describe where this piece goes in the fprs. */
5902 r = gen_rtx_REG (fmode, cum->fregno);
5903 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5904
5905 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5906 }
5907 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5908 {
ec6376ab
AM
5909 if (TARGET_32BIT && TARGET_POWERPC64)
5910 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5911
4eeca74f
AM
5912 if (mode == BLKmode)
5913 mode = Pmode;
5914
b2d04ecf
AM
5915 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5916 }
4cc833b7
RH
5917 else
5918 return NULL_RTX;
4697a36c 5919 }
4697a36c
MM
5920}
5921\f
ec6376ab 5922/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5923 the number of bytes passed in registers. For args passed entirely in
5924 registers or entirely in memory, zero. When an arg is described by a
5925 PARALLEL, perhaps using more than one register type, this function
5926 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 5927
78a52f11
RH
5928static int
5929rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5930 tree type, bool named)
4697a36c 5931{
c53bdcf5 5932 int ret = 0;
ec6376ab 5933 int align_words;
c53bdcf5 5934
f607bc57 5935 if (DEFAULT_ABI == ABI_V4)
4697a36c 5936 return 0;
4697a36c 5937
c53bdcf5
AM
5938 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5939 && cum->nargs_prototype >= 0)
5940 return 0;
5941
0b5383eb
DJ
5942 /* In this complicated case we just disable the partial_nregs code. */
5943 if (rs6000_darwin64_abi && mode == BLKmode
5944 && TREE_CODE (type) == RECORD_TYPE
5945 && int_size_in_bytes (type) > 0)
5946 return 0;
5947
294bd182 5948 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 5949
79773478
AM
5950 if (USE_FP_FOR_ARG_P (cum, mode, type))
5951 {
fb63c729
AM
5952 /* If we are passing this arg in the fixed parameter save area
5953 (gprs or memory) as well as fprs, then this function should
79773478
AM
5954 return the number of partial bytes passed in the parameter
5955 save area rather than partial bytes passed in fprs. */
5956 if (type
5957 && (cum->nargs_prototype <= 0
5958 || (DEFAULT_ABI == ABI_AIX
5959 && TARGET_XL_COMPAT
5960 && align_words >= GP_ARG_NUM_REG)))
5961 return 0;
5962 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5963 > FP_ARG_MAX_REG + 1)
ac7e839c 5964 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 5965 else if (cum->nargs_prototype >= 0)
4697a36c
MM
5966 return 0;
5967 }
5968
ec6376ab
AM
5969 if (align_words < GP_ARG_NUM_REG
5970 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 5971 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 5972
c53bdcf5 5973 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 5974 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 5975
c53bdcf5 5976 return ret;
4697a36c
MM
5977}
5978\f
5979/* A C expression that indicates when an argument must be passed by
5980 reference. If nonzero for an argument, a copy of that argument is
5981 made in memory and a pointer to the argument is passed instead of
5982 the argument itself. The pointer is passed in whatever way is
5983 appropriate for passing a pointer to that type.
5984
b2d04ecf
AM
5985 Under V.4, aggregates and long double are passed by reference.
5986
5987 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5988 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
5989
5990 As an extension to all ABIs, variable sized types are passed by
5991 reference. */
4697a36c 5992
8cd5a4e0 5993static bool
f676971a 5994rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 5995 enum machine_mode mode, const_tree type,
bada2eb8 5996 bool named ATTRIBUTE_UNUSED)
4697a36c 5997{
602ea4d3 5998 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
5999 {
6000 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6001 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6002 return 1;
6003 }
6004
6005 if (!type)
6006 return 0;
4697a36c 6007
bada2eb8
DE
6008 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6009 {
6010 if (TARGET_DEBUG_ARG)
6011 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6012 return 1;
6013 }
6014
6015 if (int_size_in_bytes (type) < 0)
6016 {
6017 if (TARGET_DEBUG_ARG)
6018 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6019 return 1;
6020 }
6021
6022 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6023 modes only exist for GCC vector types if -maltivec. */
6024 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6025 {
6026 if (TARGET_DEBUG_ARG)
6027 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6028 return 1;
6029 }
b693336b
PB
6030
6031 /* Pass synthetic vectors in memory. */
bada2eb8 6032 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6033 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6034 {
6035 static bool warned_for_pass_big_vectors = false;
6036 if (TARGET_DEBUG_ARG)
6037 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6038 if (!warned_for_pass_big_vectors)
6039 {
d4ee4d25 6040 warning (0, "GCC vector passed by reference: "
b693336b
PB
6041 "non-standard ABI extension with no compatibility guarantee");
6042 warned_for_pass_big_vectors = true;
6043 }
6044 return 1;
6045 }
6046
b2d04ecf 6047 return 0;
4697a36c 6048}
5985c7a6
FJ
6049
6050static void
2d9db8eb 6051rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6052{
6053 int i;
6054 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6055
6056 if (nregs == 0)
6057 return;
6058
c4ad648e 6059 for (i = 0; i < nregs; i++)
5985c7a6 6060 {
9390387d 6061 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6062 if (reload_completed)
c4ad648e
AM
6063 {
6064 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6065 tem = NULL_RTX;
6066 else
6067 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6068 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6069 }
5985c7a6
FJ
6070 else
6071 tem = replace_equiv_address (tem, XEXP (tem, 0));
6072
37409796 6073 gcc_assert (tem);
5985c7a6
FJ
6074
6075 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6076 }
6077}
4697a36c
MM
6078\f
6079/* Perform any needed actions needed for a function that is receiving a
f676971a 6080 variable number of arguments.
4697a36c
MM
6081
6082 CUM is as above.
6083
6084 MODE and TYPE are the mode and type of the current parameter.
6085
6086 PRETEND_SIZE is a variable that should be set to the amount of stack
6087 that must be pushed by the prolog to pretend that our caller pushed
6088 it.
6089
6090 Normally, this macro will push all remaining incoming registers on the
6091 stack and set PRETEND_SIZE to the length of the registers pushed. */
6092
c6e8c921 6093static void
f676971a 6094setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6095 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6096 int no_rtl)
4697a36c 6097{
4cc833b7
RH
6098 CUMULATIVE_ARGS next_cum;
6099 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6100 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6101 int first_reg_offset;
6102 alias_set_type set;
4697a36c 6103
f31bf321 6104 /* Skip the last named argument. */
d34c5b80 6105 next_cum = *cum;
594a51fe 6106 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6107
f607bc57 6108 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6109 {
5b667039
JJ
6110 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6111
60e2d0ca 6112 if (! no_rtl)
5b667039
JJ
6113 {
6114 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6115 HOST_WIDE_INT offset = 0;
6116
6117 /* Try to optimize the size of the varargs save area.
6118 The ABI requires that ap.reg_save_area is doubleword
6119 aligned, but we don't need to allocate space for all
6120 the bytes, only those to which we actually will save
6121 anything. */
6122 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6123 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6124 if (TARGET_HARD_FLOAT && TARGET_FPRS
6125 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6126 && cfun->va_list_fpr_size)
6127 {
6128 if (gpr_reg_num)
6129 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6130 * UNITS_PER_FP_WORD;
6131 if (cfun->va_list_fpr_size
6132 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6133 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6134 else
6135 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6136 * UNITS_PER_FP_WORD;
6137 }
6138 if (gpr_reg_num)
6139 {
6140 offset = -((first_reg_offset * reg_size) & ~7);
6141 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6142 {
6143 gpr_reg_num = cfun->va_list_gpr_size;
6144 if (reg_size == 4 && (first_reg_offset & 1))
6145 gpr_reg_num++;
6146 }
6147 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6148 }
6149 else if (fpr_size)
6150 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6151 * UNITS_PER_FP_WORD
6152 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6153
5b667039
JJ
6154 if (gpr_size + fpr_size)
6155 {
6156 rtx reg_save_area
6157 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6158 gcc_assert (GET_CODE (reg_save_area) == MEM);
6159 reg_save_area = XEXP (reg_save_area, 0);
6160 if (GET_CODE (reg_save_area) == PLUS)
6161 {
6162 gcc_assert (XEXP (reg_save_area, 0)
6163 == virtual_stack_vars_rtx);
6164 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6165 offset += INTVAL (XEXP (reg_save_area, 1));
6166 }
6167 else
6168 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6169 }
6170
6171 cfun->machine->varargs_save_offset = offset;
6172 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6173 }
4697a36c 6174 }
60e2d0ca 6175 else
4697a36c 6176 {
d34c5b80 6177 first_reg_offset = next_cum.words;
4cc833b7 6178 save_area = virtual_incoming_args_rtx;
4697a36c 6179
fe984136 6180 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6181 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6182 }
4697a36c 6183
dfafc897 6184 set = get_varargs_alias_set ();
9d30f3c1
JJ
6185 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6186 && cfun->va_list_gpr_size)
4cc833b7 6187 {
9d30f3c1
JJ
6188 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6189
6190 if (va_list_gpr_counter_field)
6191 {
6192 /* V4 va_list_gpr_size counts number of registers needed. */
6193 if (nregs > cfun->va_list_gpr_size)
6194 nregs = cfun->va_list_gpr_size;
6195 }
6196 else
6197 {
6198 /* char * va_list instead counts number of bytes needed. */
6199 if (nregs > cfun->va_list_gpr_size / reg_size)
6200 nregs = cfun->va_list_gpr_size / reg_size;
6201 }
6202
dfafc897 6203 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6204 plus_constant (save_area,
13e2e16e
DE
6205 first_reg_offset * reg_size));
6206 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6207 set_mem_alias_set (mem, set);
8ac61af7 6208 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6209
f676971a 6210 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6211 nregs);
4697a36c
MM
6212 }
6213
4697a36c 6214 /* Save FP registers if needed. */
f607bc57 6215 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6216 && TARGET_HARD_FLOAT && TARGET_FPRS
6217 && ! no_rtl
9d30f3c1
JJ
6218 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6219 && cfun->va_list_fpr_size)
4697a36c 6220 {
9d30f3c1 6221 int fregno = next_cum.fregno, nregs;
9ebbca7d 6222 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6223 rtx lab = gen_label_rtx ();
5b667039
JJ
6224 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6225 * UNITS_PER_FP_WORD);
4697a36c 6226
c4ad648e
AM
6227 emit_jump_insn
6228 (gen_rtx_SET (VOIDmode,
6229 pc_rtx,
6230 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6231 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6232 const0_rtx),
39403d82 6233 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6234 pc_rtx)));
6235
9d30f3c1
JJ
6236 for (nregs = 0;
6237 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6238 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6239 {
5496b36f 6240 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6241 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6242 set_mem_alias_set (mem, set);
94ff898d 6243 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6244 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6245 }
4cc833b7
RH
6246
6247 emit_label (lab);
4697a36c 6248 }
4697a36c 6249}
4697a36c 6250
dfafc897 6251/* Create the va_list data type. */
2c4974b7 6252
c35d187f
RH
6253static tree
6254rs6000_build_builtin_va_list (void)
dfafc897 6255{
64c2816f 6256 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6257
9ebbca7d
GK
6258 /* For AIX, prefer 'char *' because that's what the system
6259 header files like. */
f607bc57 6260 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6261 return build_pointer_type (char_type_node);
dfafc897 6262
f1e639b1 6263 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6264 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6265
f676971a 6266 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6267 unsigned_char_type_node);
f676971a 6268 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6269 unsigned_char_type_node);
64c2816f
DT
6270 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6271 every user file. */
6272 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6273 short_unsigned_type_node);
dfafc897
FS
6274 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6275 ptr_type_node);
6276 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6277 ptr_type_node);
6278
9d30f3c1
JJ
6279 va_list_gpr_counter_field = f_gpr;
6280 va_list_fpr_counter_field = f_fpr;
6281
dfafc897
FS
6282 DECL_FIELD_CONTEXT (f_gpr) = record;
6283 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6284 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6285 DECL_FIELD_CONTEXT (f_ovf) = record;
6286 DECL_FIELD_CONTEXT (f_sav) = record;
6287
bab45a51
FS
6288 TREE_CHAIN (record) = type_decl;
6289 TYPE_NAME (record) = type_decl;
dfafc897
FS
6290 TYPE_FIELDS (record) = f_gpr;
6291 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6292 TREE_CHAIN (f_fpr) = f_res;
6293 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6294 TREE_CHAIN (f_ovf) = f_sav;
6295
6296 layout_type (record);
6297
6298 /* The correct type is an array type of one element. */
6299 return build_array_type (record, build_index_type (size_zero_node));
6300}
6301
6302/* Implement va_start. */
6303
6304void
a2369ed3 6305rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6306{
dfafc897 6307 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6308 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6309 tree gpr, fpr, ovf, sav, t;
2c4974b7 6310
dfafc897 6311 /* Only SVR4 needs something special. */
f607bc57 6312 if (DEFAULT_ABI != ABI_V4)
dfafc897 6313 {
e5faf155 6314 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6315 return;
6316 }
6317
973a648b 6318 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6319 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6320 f_res = TREE_CHAIN (f_fpr);
6321 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6322 f_sav = TREE_CHAIN (f_ovf);
6323
872a65b5 6324 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6325 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6326 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6327 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6328 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6329
6330 /* Count number of gp and fp argument registers used. */
4cc833b7 6331 words = current_function_args_info.words;
987732e0
DE
6332 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6333 GP_ARG_NUM_REG);
6334 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6335 FP_ARG_NUM_REG);
dfafc897
FS
6336
6337 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6338 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6339 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6340 words, n_gpr, n_fpr);
dfafc897 6341
9d30f3c1
JJ
6342 if (cfun->va_list_gpr_size)
6343 {
07beea0d 6344 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6345 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6346 TREE_SIDE_EFFECTS (t) = 1;
6347 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6348 }
58c8adc1 6349
9d30f3c1
JJ
6350 if (cfun->va_list_fpr_size)
6351 {
07beea0d 6352 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6353 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6354 TREE_SIDE_EFFECTS (t) = 1;
6355 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6356 }
dfafc897
FS
6357
6358 /* Find the overflow area. */
6359 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6360 if (words != 0)
5be014d5
AP
6361 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6362 size_int (words * UNITS_PER_WORD));
07beea0d 6363 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6364 TREE_SIDE_EFFECTS (t) = 1;
6365 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6366
9d30f3c1
JJ
6367 /* If there were no va_arg invocations, don't set up the register
6368 save area. */
6369 if (!cfun->va_list_gpr_size
6370 && !cfun->va_list_fpr_size
6371 && n_gpr < GP_ARG_NUM_REG
6372 && n_fpr < FP_ARG_V4_MAX_REG)
6373 return;
6374
dfafc897
FS
6375 /* Find the register save area. */
6376 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6377 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6378 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6379 size_int (cfun->machine->varargs_save_offset));
07beea0d 6380 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6381 TREE_SIDE_EFFECTS (t) = 1;
6382 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6383}
6384
6385/* Implement va_arg. */
6386
23a60a04
JM
6387tree
6388rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6389{
cd3ce9b4
JM
6390 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6391 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6392 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6393 tree lab_false, lab_over, addr;
6394 int align;
6395 tree ptrtype = build_pointer_type (type);
7393f7f8 6396 int regalign = 0;
cd3ce9b4 6397
08b0dc1b
RH
6398 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6399 {
6400 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6401 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6402 }
6403
cd3ce9b4
JM
6404 if (DEFAULT_ABI != ABI_V4)
6405 {
08b0dc1b 6406 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6407 {
6408 tree elem_type = TREE_TYPE (type);
6409 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6410 int elem_size = GET_MODE_SIZE (elem_mode);
6411
6412 if (elem_size < UNITS_PER_WORD)
6413 {
23a60a04 6414 tree real_part, imag_part;
cd3ce9b4
JM
6415 tree post = NULL_TREE;
6416
23a60a04
JM
6417 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6418 &post);
6419 /* Copy the value into a temporary, lest the formal temporary
6420 be reused out from under us. */
6421 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6422 append_to_statement_list (post, pre_p);
6423
23a60a04
JM
6424 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6425 post_p);
cd3ce9b4 6426
47a25a46 6427 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6428 }
6429 }
6430
23a60a04 6431 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6432 }
6433
6434 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6435 f_fpr = TREE_CHAIN (f_gpr);
6436 f_res = TREE_CHAIN (f_fpr);
6437 f_ovf = TREE_CHAIN (f_res);
6438 f_sav = TREE_CHAIN (f_ovf);
6439
872a65b5 6440 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6441 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6442 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6443 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6444 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6445
6446 size = int_size_in_bytes (type);
6447 rsize = (size + 3) / 4;
6448 align = 1;
6449
08b0dc1b 6450 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6451 && (TYPE_MODE (type) == SFmode
6452 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6453 || TYPE_MODE (type) == TFmode
6454 || TYPE_MODE (type) == DDmode
6455 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6456 {
6457 /* FP args go in FP registers, if present. */
cd3ce9b4 6458 reg = fpr;
602ea4d3 6459 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6460 sav_ofs = 8*4;
6461 sav_scale = 8;
602ea4d3 6462 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6463 align = 8;
6464 }
6465 else
6466 {
6467 /* Otherwise into GP registers. */
cd3ce9b4
JM
6468 reg = gpr;
6469 n_reg = rsize;
6470 sav_ofs = 0;
6471 sav_scale = 4;
6472 if (n_reg == 2)
6473 align = 8;
6474 }
6475
6476 /* Pull the value out of the saved registers.... */
6477
6478 lab_over = NULL;
6479 addr = create_tmp_var (ptr_type_node, "addr");
6480 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6481
6482 /* AltiVec vectors never go in registers when -mabi=altivec. */
6483 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6484 align = 16;
6485 else
6486 {
6487 lab_false = create_artificial_label ();
6488 lab_over = create_artificial_label ();
6489
6490 /* Long long and SPE vectors are aligned in the registers.
6491 As are any other 2 gpr item such as complex int due to a
6492 historical mistake. */
6493 u = reg;
602ea4d3 6494 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6495 {
7393f7f8 6496 regalign = 1;
cd3ce9b4 6497 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6498 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6499 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6500 }
7393f7f8
BE
6501 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6502 reg number is 0 for f1, so we want to make it odd. */
6503 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6504 {
6505 regalign = 1;
6506 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6507 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6508 }
cd3ce9b4 6509
95674810 6510 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6511 t = build2 (GE_EXPR, boolean_type_node, u, t);
6512 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6513 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6514 gimplify_and_add (t, pre_p);
6515
6516 t = sav;
6517 if (sav_ofs)
5be014d5 6518 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6519
8fb632eb
ZD
6520 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6521 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6522 u = fold_convert (sizetype, u);
6523 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6524 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6525
07beea0d 6526 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6527 gimplify_and_add (t, pre_p);
6528
6529 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6530 gimplify_and_add (t, pre_p);
6531
6532 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6533 append_to_statement_list (t, pre_p);
6534
7393f7f8 6535 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6536 {
6537 /* Ensure that we don't find any more args in regs.
7393f7f8 6538 Alignment has taken care of for special cases. */
07beea0d 6539 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6540 gimplify_and_add (t, pre_p);
6541 }
6542 }
6543
6544 /* ... otherwise out of the overflow area. */
6545
6546 /* Care for on-stack alignment if needed. */
6547 t = ovf;
6548 if (align != 1)
6549 {
5be014d5
AP
6550 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6551 t = fold_convert (sizetype, t);
4a90aeeb 6552 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6553 size_int (-align));
6554 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6555 }
6556 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6557
07beea0d 6558 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6559 gimplify_and_add (u, pre_p);
6560
5be014d5 6561 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6562 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6563 gimplify_and_add (t, pre_p);
6564
6565 if (lab_over)
6566 {
6567 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6568 append_to_statement_list (t, pre_p);
6569 }
6570
0cfbc62b
JM
6571 if (STRICT_ALIGNMENT
6572 && (TYPE_ALIGN (type)
6573 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6574 {
6575 /* The value (of type complex double, for example) may not be
6576 aligned in memory in the saved registers, so copy via a
6577 temporary. (This is the same code as used for SPARC.) */
6578 tree tmp = create_tmp_var (type, "va_arg_tmp");
6579 tree dest_addr = build_fold_addr_expr (tmp);
6580
5039610b
SL
6581 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6582 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6583
6584 gimplify_and_add (copy, pre_p);
6585 addr = dest_addr;
6586 }
6587
08b0dc1b 6588 addr = fold_convert (ptrtype, addr);
872a65b5 6589 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6590}
6591
0ac081f6
AH
6592/* Builtins. */
6593
58646b77
PB
6594static void
6595def_builtin (int mask, const char *name, tree type, int code)
6596{
6597 if (mask & target_flags)
6598 {
6599 if (rs6000_builtin_decls[code])
6600 abort ();
6601
6602 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6603 add_builtin_function (name, type, code, BUILT_IN_MD,
6604 NULL, NULL_TREE);
58646b77
PB
6605 }
6606}
0ac081f6 6607
24408032
AH
6608/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6609
2212663f 6610static const struct builtin_description bdesc_3arg[] =
24408032
AH
6611{
6612 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6613 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6614 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6615 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6616 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6617 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6618 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6619 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6620 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6621 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6622 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6623 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6624 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6625 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6626 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6627 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6628 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6629 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6630 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6631 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6632 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6633 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6634 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6635
6636 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6637 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6638 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6639 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6640 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6641 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6642 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6643 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6644 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6645 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6646 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6647 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6648 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6649 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6650 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
24408032 6651};
2212663f 6652
95385cbb
AH
6653/* DST operations: void foo (void *, const int, const char). */
6654
6655static const struct builtin_description bdesc_dst[] =
6656{
6657 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6658 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6659 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6660 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6661
6662 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6663 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6664 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6665 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6666};
6667
2212663f 6668/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6669
a3170dc6 6670static struct builtin_description bdesc_2arg[] =
0ac081f6 6671{
f18c054f
DB
6672 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6673 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6674 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6675 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6676 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6677 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6678 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6679 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6680 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6681 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6682 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6683 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6684 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6685 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6686 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6687 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6688 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6689 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6690 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6691 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6692 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6693 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6694 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6695 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6696 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6697 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6698 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6699 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6700 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6701 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6702 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6703 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6704 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6705 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6706 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6707 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6708 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6709 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6710 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6711 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6712 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6713 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6714 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6715 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6716 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6717 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6718 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6719 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6720 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6721 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6722 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6723 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6724 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6725 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6726 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6727 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6728 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6729 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6730 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6731 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6732 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6733 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6734 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6735 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6736 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6737 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6738 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6739 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6740 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6741 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6742 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6743 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6744 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6745 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6746 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6747 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6748 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6749 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6750 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6751 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6752 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6753 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6754 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6755 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6756 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6757 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6758 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6759 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6760 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6761 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6762 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6763 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6764 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6765 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6766 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6767 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6768 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6769 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6770 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6771 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6772 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6773 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6774 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6775 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6776 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6777 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6778 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6779 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6780 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6781 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6782 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6783
58646b77
PB
6784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6798 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6799 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6800 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6801 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6802 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6803 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6804 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6805 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6806 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6807 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6808 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6809 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6810 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6811 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6812 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6813 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6814 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6815 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6816 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6817 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6818 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6819 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6820 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6821 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6822 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6823 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6824 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6825 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6826 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6827 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6828 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6829 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6830 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6831 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6832 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6833 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6834 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6835 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6836 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6837 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6838 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6842 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6843 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6844 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6845 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6846 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6847 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6848 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6849 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6850 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6851 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6852 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6853 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6854 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6855 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6856 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6857 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6858 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6859 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6860 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6861 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6862 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6863 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6864 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6865 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6866 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6867 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6868 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6869 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6870 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6871 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6872 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6879 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6880 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6881 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6882 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6883 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6884 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6885 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6886 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6887 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6888 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6889 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6890 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6891 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6892 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6893 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6894 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6895 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6896 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6897 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6898 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6899 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6900 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6901 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6902 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6903 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6904 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6905 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6906 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6907 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6908 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6909 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6910 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6911
a3170dc6
AH
6912 /* Place holder, leave as first spe builtin. */
6913 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6914 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6915 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6916 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6917 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6918 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6919 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6920 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6921 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6922 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6923 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6924 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6925 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6926 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6927 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6928 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6929 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6930 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6931 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6932 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6933 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6934 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6935 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6936 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6937 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6938 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6939 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6940 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6941 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6942 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6943 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6944 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6945 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6946 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6947 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6948 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6949 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6950 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6951 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6952 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6953 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6954 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6955 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6956 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6957 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6958 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6959 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6960 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6961 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6962 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6963 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6964 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6965 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6966 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6967 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6968 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6969 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6970 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6971 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6972 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6973 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6974 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6975 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6976 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6977 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6978 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6979 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6980 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6981 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6982 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6983 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6984 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6985 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6986 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
6987 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6988 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
6989 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6990 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6991 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6992 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6993 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6994 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6995 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6996 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6997 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6998 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6999 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7000 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7001 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7002 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7003 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7004 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7005 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7006 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7007 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7008 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7009 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7010 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7011 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7012 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7013 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7014 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7015 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7016 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7017 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7018 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7019 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7020 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7021 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7022
7023 /* SPE binary operations expecting a 5-bit unsigned literal. */
7024 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7025
7026 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7027 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7028 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7029 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7030 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7031 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7032 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7033 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7034 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7035 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7036 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7037 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7038 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7039 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7040 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7041 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7042 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7043 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7044 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7045 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7046 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7047 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7048 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7049 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7050 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7051 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7052
7053 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7054 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7055};
7056
7057/* AltiVec predicates. */
7058
7059struct builtin_description_predicates
7060{
7061 const unsigned int mask;
7062 const enum insn_code icode;
7063 const char *opcode;
7064 const char *const name;
7065 const enum rs6000_builtins code;
7066};
7067
7068static const struct builtin_description_predicates bdesc_altivec_preds[] =
7069{
7070 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7071 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7072 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7073 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7074 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7075 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7076 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7077 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7078 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7079 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7080 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7081 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7082 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7083
7084 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7085 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7086 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7087};
24408032 7088
a3170dc6
AH
7089/* SPE predicates. */
7090static struct builtin_description bdesc_spe_predicates[] =
7091{
7092 /* Place-holder. Leave as first. */
7093 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7094 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7095 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7096 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7097 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7098 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7099 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7100 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7101 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7102 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7103 /* Place-holder. Leave as last. */
7104 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7105};
7106
7107/* SPE evsel predicates. */
7108static struct builtin_description bdesc_spe_evsel[] =
7109{
7110 /* Place-holder. Leave as first. */
7111 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7112 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7113 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7114 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7115 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7116 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7117 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7118 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7119 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7120 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7121 /* Place-holder. Leave as last. */
7122 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7123};
7124
b6d08ca1 7125/* ABS* operations. */
100c4561
AH
7126
7127static const struct builtin_description bdesc_abs[] =
7128{
7129 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7130 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7131 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7132 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7133 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7134 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7135 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7136};
7137
617e0e1d
DB
7138/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7139 foo (VECa). */
24408032 7140
a3170dc6 7141static struct builtin_description bdesc_1arg[] =
2212663f 7142{
617e0e1d
DB
7143 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7144 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7145 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7146 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7147 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7148 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7149 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7150 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7151 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7152 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7153 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7154 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7155 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7156 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7157 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7158 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7159 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7160
58646b77
PB
7161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7180
a3170dc6
AH
7181 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7182 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7183 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7184 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7185 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7186 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7187 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7188 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7189 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7190 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7191 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7192 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7193 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7194 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7195 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7196 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7197 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7198 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7199 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7200 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7201 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7202 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7203 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7204 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7205 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7206 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7207 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7208 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7209 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7210 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7211
7212 /* Place-holder. Leave as last unary SPE builtin. */
58646b77 7213 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
2212663f
DB
7214};
7215
7216static rtx
5039610b 7217rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7218{
7219 rtx pat;
5039610b 7220 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7221 rtx op0 = expand_normal (arg0);
2212663f
DB
7222 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7223 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7224
0559cc77
DE
7225 if (icode == CODE_FOR_nothing)
7226 /* Builtin not supported on this processor. */
7227 return 0;
7228
20e26713
AH
7229 /* If we got invalid arguments bail out before generating bad rtl. */
7230 if (arg0 == error_mark_node)
9a171fcd 7231 return const0_rtx;
20e26713 7232
0559cc77
DE
7233 if (icode == CODE_FOR_altivec_vspltisb
7234 || icode == CODE_FOR_altivec_vspltish
7235 || icode == CODE_FOR_altivec_vspltisw
7236 || icode == CODE_FOR_spe_evsplatfi
7237 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7238 {
7239 /* Only allow 5-bit *signed* literals. */
b44140e7 7240 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7241 || INTVAL (op0) > 15
7242 || INTVAL (op0) < -16)
b44140e7
AH
7243 {
7244 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7245 return const0_rtx;
b44140e7 7246 }
b44140e7
AH
7247 }
7248
c62f2db5 7249 if (target == 0
2212663f
DB
7250 || GET_MODE (target) != tmode
7251 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7252 target = gen_reg_rtx (tmode);
7253
7254 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7255 op0 = copy_to_mode_reg (mode0, op0);
7256
7257 pat = GEN_FCN (icode) (target, op0);
7258 if (! pat)
7259 return 0;
7260 emit_insn (pat);
0ac081f6 7261
2212663f
DB
7262 return target;
7263}
ae4b4a02 7264
100c4561 7265static rtx
5039610b 7266altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7267{
7268 rtx pat, scratch1, scratch2;
5039610b 7269 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7270 rtx op0 = expand_normal (arg0);
100c4561
AH
7271 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7272 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7273
7274 /* If we have invalid arguments, bail out before generating bad rtl. */
7275 if (arg0 == error_mark_node)
9a171fcd 7276 return const0_rtx;
100c4561
AH
7277
7278 if (target == 0
7279 || GET_MODE (target) != tmode
7280 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7281 target = gen_reg_rtx (tmode);
7282
7283 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7284 op0 = copy_to_mode_reg (mode0, op0);
7285
7286 scratch1 = gen_reg_rtx (mode0);
7287 scratch2 = gen_reg_rtx (mode0);
7288
7289 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7290 if (! pat)
7291 return 0;
7292 emit_insn (pat);
7293
7294 return target;
7295}
7296
0ac081f6 7297static rtx
5039610b 7298rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7299{
7300 rtx pat;
5039610b
SL
7301 tree arg0 = CALL_EXPR_ARG (exp, 0);
7302 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7303 rtx op0 = expand_normal (arg0);
7304 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7305 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7306 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7307 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7308
0559cc77
DE
7309 if (icode == CODE_FOR_nothing)
7310 /* Builtin not supported on this processor. */
7311 return 0;
7312
20e26713
AH
7313 /* If we got invalid arguments bail out before generating bad rtl. */
7314 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7315 return const0_rtx;
20e26713 7316
0559cc77
DE
7317 if (icode == CODE_FOR_altivec_vcfux
7318 || icode == CODE_FOR_altivec_vcfsx
7319 || icode == CODE_FOR_altivec_vctsxs
7320 || icode == CODE_FOR_altivec_vctuxs
7321 || icode == CODE_FOR_altivec_vspltb
7322 || icode == CODE_FOR_altivec_vsplth
7323 || icode == CODE_FOR_altivec_vspltw
7324 || icode == CODE_FOR_spe_evaddiw
7325 || icode == CODE_FOR_spe_evldd
7326 || icode == CODE_FOR_spe_evldh
7327 || icode == CODE_FOR_spe_evldw
7328 || icode == CODE_FOR_spe_evlhhesplat
7329 || icode == CODE_FOR_spe_evlhhossplat
7330 || icode == CODE_FOR_spe_evlhhousplat
7331 || icode == CODE_FOR_spe_evlwhe
7332 || icode == CODE_FOR_spe_evlwhos
7333 || icode == CODE_FOR_spe_evlwhou
7334 || icode == CODE_FOR_spe_evlwhsplat
7335 || icode == CODE_FOR_spe_evlwwsplat
7336 || icode == CODE_FOR_spe_evrlwi
7337 || icode == CODE_FOR_spe_evslwi
7338 || icode == CODE_FOR_spe_evsrwis
f5119d10 7339 || icode == CODE_FOR_spe_evsubifw
0559cc77 7340 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7341 {
7342 /* Only allow 5-bit unsigned literals. */
8bb418a3 7343 STRIP_NOPS (arg1);
b44140e7
AH
7344 if (TREE_CODE (arg1) != INTEGER_CST
7345 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7346 {
7347 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7348 return const0_rtx;
b44140e7 7349 }
b44140e7
AH
7350 }
7351
c62f2db5 7352 if (target == 0
0ac081f6
AH
7353 || GET_MODE (target) != tmode
7354 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7355 target = gen_reg_rtx (tmode);
7356
7357 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7358 op0 = copy_to_mode_reg (mode0, op0);
7359 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7360 op1 = copy_to_mode_reg (mode1, op1);
7361
7362 pat = GEN_FCN (icode) (target, op0, op1);
7363 if (! pat)
7364 return 0;
7365 emit_insn (pat);
7366
7367 return target;
7368}
6525c0e7 7369
ae4b4a02 7370static rtx
f676971a 7371altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7372 tree exp, rtx target)
ae4b4a02
AH
7373{
7374 rtx pat, scratch;
5039610b
SL
7375 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7376 tree arg0 = CALL_EXPR_ARG (exp, 1);
7377 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7378 rtx op0 = expand_normal (arg0);
7379 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7380 enum machine_mode tmode = SImode;
7381 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7382 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7383 int cr6_form_int;
7384
7385 if (TREE_CODE (cr6_form) != INTEGER_CST)
7386 {
7387 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7388 return const0_rtx;
ae4b4a02
AH
7389 }
7390 else
7391 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7392
37409796 7393 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7394
7395 /* If we have invalid arguments, bail out before generating bad rtl. */
7396 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7397 return const0_rtx;
ae4b4a02
AH
7398
7399 if (target == 0
7400 || GET_MODE (target) != tmode
7401 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7402 target = gen_reg_rtx (tmode);
7403
7404 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7405 op0 = copy_to_mode_reg (mode0, op0);
7406 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7407 op1 = copy_to_mode_reg (mode1, op1);
7408
7409 scratch = gen_reg_rtx (mode0);
7410
7411 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7412 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7413 if (! pat)
7414 return 0;
7415 emit_insn (pat);
7416
7417 /* The vec_any* and vec_all* predicates use the same opcodes for two
7418 different operations, but the bits in CR6 will be different
7419 depending on what information we want. So we have to play tricks
7420 with CR6 to get the right bits out.
7421
7422 If you think this is disgusting, look at the specs for the
7423 AltiVec predicates. */
7424
c4ad648e
AM
7425 switch (cr6_form_int)
7426 {
7427 case 0:
7428 emit_insn (gen_cr6_test_for_zero (target));
7429 break;
7430 case 1:
7431 emit_insn (gen_cr6_test_for_zero_reverse (target));
7432 break;
7433 case 2:
7434 emit_insn (gen_cr6_test_for_lt (target));
7435 break;
7436 case 3:
7437 emit_insn (gen_cr6_test_for_lt_reverse (target));
7438 break;
7439 default:
7440 error ("argument 1 of __builtin_altivec_predicate is out of range");
7441 break;
7442 }
ae4b4a02
AH
7443
7444 return target;
7445}
7446
b4a62fa0 7447static rtx
5039610b 7448altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7449{
7450 rtx pat, addr;
5039610b
SL
7451 tree arg0 = CALL_EXPR_ARG (exp, 0);
7452 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7453 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7454 enum machine_mode mode0 = Pmode;
7455 enum machine_mode mode1 = Pmode;
84217346
MD
7456 rtx op0 = expand_normal (arg0);
7457 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7458
7459 if (icode == CODE_FOR_nothing)
7460 /* Builtin not supported on this processor. */
7461 return 0;
7462
7463 /* If we got invalid arguments bail out before generating bad rtl. */
7464 if (arg0 == error_mark_node || arg1 == error_mark_node)
7465 return const0_rtx;
7466
7467 if (target == 0
7468 || GET_MODE (target) != tmode
7469 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7470 target = gen_reg_rtx (tmode);
7471
f676971a 7472 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7473
7474 if (op0 == const0_rtx)
7475 {
7476 addr = gen_rtx_MEM (tmode, op1);
7477 }
7478 else
7479 {
7480 op0 = copy_to_mode_reg (mode0, op0);
7481 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7482 }
7483
7484 pat = GEN_FCN (icode) (target, addr);
7485
7486 if (! pat)
7487 return 0;
7488 emit_insn (pat);
7489
7490 return target;
7491}
7492
61bea3b0 7493static rtx
5039610b 7494spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7495{
5039610b
SL
7496 tree arg0 = CALL_EXPR_ARG (exp, 0);
7497 tree arg1 = CALL_EXPR_ARG (exp, 1);
7498 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7499 rtx op0 = expand_normal (arg0);
7500 rtx op1 = expand_normal (arg1);
7501 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7502 rtx pat;
7503 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7504 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7505 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7506
7507 /* Invalid arguments. Bail before doing anything stoopid! */
7508 if (arg0 == error_mark_node
7509 || arg1 == error_mark_node
7510 || arg2 == error_mark_node)
7511 return const0_rtx;
7512
7513 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7514 op0 = copy_to_mode_reg (mode2, op0);
7515 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7516 op1 = copy_to_mode_reg (mode0, op1);
7517 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7518 op2 = copy_to_mode_reg (mode1, op2);
7519
7520 pat = GEN_FCN (icode) (op1, op2, op0);
7521 if (pat)
7522 emit_insn (pat);
7523 return NULL_RTX;
7524}
7525
6525c0e7 7526static rtx
5039610b 7527altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7528{
5039610b
SL
7529 tree arg0 = CALL_EXPR_ARG (exp, 0);
7530 tree arg1 = CALL_EXPR_ARG (exp, 1);
7531 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7532 rtx op0 = expand_normal (arg0);
7533 rtx op1 = expand_normal (arg1);
7534 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7535 rtx pat, addr;
7536 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7537 enum machine_mode mode1 = Pmode;
7538 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7539
7540 /* Invalid arguments. Bail before doing anything stoopid! */
7541 if (arg0 == error_mark_node
7542 || arg1 == error_mark_node
7543 || arg2 == error_mark_node)
9a171fcd 7544 return const0_rtx;
6525c0e7 7545
b4a62fa0
SB
7546 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7547 op0 = copy_to_mode_reg (tmode, op0);
7548
f676971a 7549 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7550
7551 if (op1 == const0_rtx)
7552 {
7553 addr = gen_rtx_MEM (tmode, op2);
7554 }
7555 else
7556 {
7557 op1 = copy_to_mode_reg (mode1, op1);
7558 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7559 }
6525c0e7 7560
b4a62fa0 7561 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7562 if (pat)
7563 emit_insn (pat);
7564 return NULL_RTX;
7565}
7566
2212663f 7567static rtx
5039610b 7568rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7569{
7570 rtx pat;
5039610b
SL
7571 tree arg0 = CALL_EXPR_ARG (exp, 0);
7572 tree arg1 = CALL_EXPR_ARG (exp, 1);
7573 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7574 rtx op0 = expand_normal (arg0);
7575 rtx op1 = expand_normal (arg1);
7576 rtx op2 = expand_normal (arg2);
2212663f
DB
7577 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7578 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7579 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7580 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7581
774b5662
DE
7582 if (icode == CODE_FOR_nothing)
7583 /* Builtin not supported on this processor. */
7584 return 0;
7585
20e26713
AH
7586 /* If we got invalid arguments bail out before generating bad rtl. */
7587 if (arg0 == error_mark_node
7588 || arg1 == error_mark_node
7589 || arg2 == error_mark_node)
9a171fcd 7590 return const0_rtx;
20e26713 7591
aba5fb01
NS
7592 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7593 || icode == CODE_FOR_altivec_vsldoi_v4si
7594 || icode == CODE_FOR_altivec_vsldoi_v8hi
7595 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7596 {
7597 /* Only allow 4-bit unsigned literals. */
8bb418a3 7598 STRIP_NOPS (arg2);
b44140e7
AH
7599 if (TREE_CODE (arg2) != INTEGER_CST
7600 || TREE_INT_CST_LOW (arg2) & ~0xf)
7601 {
7602 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7603 return const0_rtx;
b44140e7 7604 }
b44140e7
AH
7605 }
7606
c62f2db5 7607 if (target == 0
2212663f
DB
7608 || GET_MODE (target) != tmode
7609 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7610 target = gen_reg_rtx (tmode);
7611
7612 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7613 op0 = copy_to_mode_reg (mode0, op0);
7614 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7615 op1 = copy_to_mode_reg (mode1, op1);
7616 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7617 op2 = copy_to_mode_reg (mode2, op2);
7618
7619 pat = GEN_FCN (icode) (target, op0, op1, op2);
7620 if (! pat)
7621 return 0;
7622 emit_insn (pat);
7623
7624 return target;
7625}
92898235 7626
3a9b8c7e 7627/* Expand the lvx builtins. */
0ac081f6 7628static rtx
a2369ed3 7629altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7630{
5039610b 7631 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7632 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7633 tree arg0;
7634 enum machine_mode tmode, mode0;
7c3abc73 7635 rtx pat, op0;
3a9b8c7e 7636 enum insn_code icode;
92898235 7637
0ac081f6
AH
7638 switch (fcode)
7639 {
f18c054f 7640 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7641 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7642 break;
f18c054f 7643 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7644 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7645 break;
7646 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7647 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7648 break;
7649 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7650 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7651 break;
7652 default:
7653 *expandedp = false;
7654 return NULL_RTX;
7655 }
0ac081f6 7656
3a9b8c7e 7657 *expandedp = true;
f18c054f 7658
5039610b 7659 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7660 op0 = expand_normal (arg0);
3a9b8c7e
AH
7661 tmode = insn_data[icode].operand[0].mode;
7662 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7663
3a9b8c7e
AH
7664 if (target == 0
7665 || GET_MODE (target) != tmode
7666 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7667 target = gen_reg_rtx (tmode);
24408032 7668
3a9b8c7e
AH
7669 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7670 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7671
3a9b8c7e
AH
7672 pat = GEN_FCN (icode) (target, op0);
7673 if (! pat)
7674 return 0;
7675 emit_insn (pat);
7676 return target;
7677}
f18c054f 7678
3a9b8c7e
AH
7679/* Expand the stvx builtins. */
7680static rtx
f676971a 7681altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7682 bool *expandedp)
3a9b8c7e 7683{
5039610b 7684 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7685 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7686 tree arg0, arg1;
7687 enum machine_mode mode0, mode1;
7c3abc73 7688 rtx pat, op0, op1;
3a9b8c7e 7689 enum insn_code icode;
f18c054f 7690
3a9b8c7e
AH
7691 switch (fcode)
7692 {
7693 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 7694 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
7695 break;
7696 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 7697 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
7698 break;
7699 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 7700 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
7701 break;
7702 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 7703 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
7704 break;
7705 default:
7706 *expandedp = false;
7707 return NULL_RTX;
7708 }
24408032 7709
5039610b
SL
7710 arg0 = CALL_EXPR_ARG (exp, 0);
7711 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7712 op0 = expand_normal (arg0);
7713 op1 = expand_normal (arg1);
3a9b8c7e
AH
7714 mode0 = insn_data[icode].operand[0].mode;
7715 mode1 = insn_data[icode].operand[1].mode;
f18c054f 7716
3a9b8c7e
AH
7717 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7718 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7719 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7720 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 7721
3a9b8c7e
AH
7722 pat = GEN_FCN (icode) (op0, op1);
7723 if (pat)
7724 emit_insn (pat);
f18c054f 7725
3a9b8c7e
AH
7726 *expandedp = true;
7727 return NULL_RTX;
7728}
f18c054f 7729
3a9b8c7e
AH
7730/* Expand the dst builtins. */
7731static rtx
f676971a 7732altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7733 bool *expandedp)
3a9b8c7e 7734{
5039610b 7735 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7736 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7737 tree arg0, arg1, arg2;
7738 enum machine_mode mode0, mode1, mode2;
7c3abc73 7739 rtx pat, op0, op1, op2;
586de218 7740 const struct builtin_description *d;
a3170dc6 7741 size_t i;
f18c054f 7742
3a9b8c7e 7743 *expandedp = false;
f18c054f 7744
3a9b8c7e 7745 /* Handle DST variants. */
586de218 7746 d = bdesc_dst;
3a9b8c7e
AH
7747 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7748 if (d->code == fcode)
7749 {
5039610b
SL
7750 arg0 = CALL_EXPR_ARG (exp, 0);
7751 arg1 = CALL_EXPR_ARG (exp, 1);
7752 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7753 op0 = expand_normal (arg0);
7754 op1 = expand_normal (arg1);
7755 op2 = expand_normal (arg2);
3a9b8c7e
AH
7756 mode0 = insn_data[d->icode].operand[0].mode;
7757 mode1 = insn_data[d->icode].operand[1].mode;
7758 mode2 = insn_data[d->icode].operand[2].mode;
24408032 7759
3a9b8c7e
AH
7760 /* Invalid arguments, bail out before generating bad rtl. */
7761 if (arg0 == error_mark_node
7762 || arg1 == error_mark_node
7763 || arg2 == error_mark_node)
7764 return const0_rtx;
f18c054f 7765
86e7df90 7766 *expandedp = true;
8bb418a3 7767 STRIP_NOPS (arg2);
3a9b8c7e
AH
7768 if (TREE_CODE (arg2) != INTEGER_CST
7769 || TREE_INT_CST_LOW (arg2) & ~0x3)
7770 {
9e637a26 7771 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7772 return const0_rtx;
7773 }
f18c054f 7774
3a9b8c7e 7775 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7776 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7777 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7778 op1 = copy_to_mode_reg (mode1, op1);
24408032 7779
3a9b8c7e
AH
7780 pat = GEN_FCN (d->icode) (op0, op1, op2);
7781 if (pat != 0)
7782 emit_insn (pat);
f18c054f 7783
3a9b8c7e
AH
7784 return NULL_RTX;
7785 }
f18c054f 7786
3a9b8c7e
AH
7787 return NULL_RTX;
7788}
24408032 7789
7a4eca66
DE
7790/* Expand vec_init builtin. */
7791static rtx
5039610b 7792altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
7793{
7794 enum machine_mode tmode = TYPE_MODE (type);
7795 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7796 int i, n_elt = GET_MODE_NUNITS (tmode);
7797 rtvec v = rtvec_alloc (n_elt);
7798
7799 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 7800 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 7801
5039610b 7802 for (i = 0; i < n_elt; ++i)
7a4eca66 7803 {
5039610b 7804 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
7805 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7806 }
7807
7a4eca66
DE
7808 if (!target || !register_operand (target, tmode))
7809 target = gen_reg_rtx (tmode);
7810
7811 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7812 return target;
7813}
7814
7815/* Return the integer constant in ARG. Constrain it to be in the range
7816 of the subparts of VEC_TYPE; issue an error if not. */
7817
7818static int
7819get_element_number (tree vec_type, tree arg)
7820{
7821 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7822
7823 if (!host_integerp (arg, 1)
7824 || (elt = tree_low_cst (arg, 1), elt > max))
7825 {
7826 error ("selector must be an integer constant in the range 0..%wi", max);
7827 return 0;
7828 }
7829
7830 return elt;
7831}
7832
7833/* Expand vec_set builtin. */
7834static rtx
5039610b 7835altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
7836{
7837 enum machine_mode tmode, mode1;
7838 tree arg0, arg1, arg2;
7839 int elt;
7840 rtx op0, op1;
7841
5039610b
SL
7842 arg0 = CALL_EXPR_ARG (exp, 0);
7843 arg1 = CALL_EXPR_ARG (exp, 1);
7844 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
7845
7846 tmode = TYPE_MODE (TREE_TYPE (arg0));
7847 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7848 gcc_assert (VECTOR_MODE_P (tmode));
7849
7850 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7851 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7852 elt = get_element_number (TREE_TYPE (arg0), arg2);
7853
7854 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7855 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7856
7857 op0 = force_reg (tmode, op0);
7858 op1 = force_reg (mode1, op1);
7859
7860 rs6000_expand_vector_set (op0, op1, elt);
7861
7862 return op0;
7863}
7864
7865/* Expand vec_ext builtin. */
7866static rtx
5039610b 7867altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
7868{
7869 enum machine_mode tmode, mode0;
7870 tree arg0, arg1;
7871 int elt;
7872 rtx op0;
7873
5039610b
SL
7874 arg0 = CALL_EXPR_ARG (exp, 0);
7875 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 7876
84217346 7877 op0 = expand_normal (arg0);
7a4eca66
DE
7878 elt = get_element_number (TREE_TYPE (arg0), arg1);
7879
7880 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7881 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7882 gcc_assert (VECTOR_MODE_P (mode0));
7883
7884 op0 = force_reg (mode0, op0);
7885
7886 if (optimize || !target || !register_operand (target, tmode))
7887 target = gen_reg_rtx (tmode);
7888
7889 rs6000_expand_vector_extract (target, op0, elt);
7890
7891 return target;
7892}
7893
3a9b8c7e
AH
7894/* Expand the builtin in EXP and store the result in TARGET. Store
7895 true in *EXPANDEDP if we found a builtin to expand. */
7896static rtx
a2369ed3 7897altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 7898{
586de218
KG
7899 const struct builtin_description *d;
7900 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
7901 size_t i;
7902 enum insn_code icode;
5039610b 7903 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
7904 tree arg0;
7905 rtx op0, pat;
7906 enum machine_mode tmode, mode0;
3a9b8c7e 7907 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 7908
58646b77
PB
7909 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7910 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7911 {
7912 *expandedp = true;
ea40ba9c 7913 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
7914 return const0_rtx;
7915 }
7916
3a9b8c7e
AH
7917 target = altivec_expand_ld_builtin (exp, target, expandedp);
7918 if (*expandedp)
7919 return target;
0ac081f6 7920
3a9b8c7e
AH
7921 target = altivec_expand_st_builtin (exp, target, expandedp);
7922 if (*expandedp)
7923 return target;
7924
7925 target = altivec_expand_dst_builtin (exp, target, expandedp);
7926 if (*expandedp)
7927 return target;
7928
7929 *expandedp = true;
95385cbb 7930
3a9b8c7e
AH
7931 switch (fcode)
7932 {
6525c0e7 7933 case ALTIVEC_BUILTIN_STVX:
5039610b 7934 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 7935 case ALTIVEC_BUILTIN_STVEBX:
5039610b 7936 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 7937 case ALTIVEC_BUILTIN_STVEHX:
5039610b 7938 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 7939 case ALTIVEC_BUILTIN_STVEWX:
5039610b 7940 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 7941 case ALTIVEC_BUILTIN_STVXL:
5039610b 7942 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 7943
95385cbb
AH
7944 case ALTIVEC_BUILTIN_MFVSCR:
7945 icode = CODE_FOR_altivec_mfvscr;
7946 tmode = insn_data[icode].operand[0].mode;
7947
7948 if (target == 0
7949 || GET_MODE (target) != tmode
7950 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7951 target = gen_reg_rtx (tmode);
f676971a 7952
95385cbb 7953 pat = GEN_FCN (icode) (target);
0ac081f6
AH
7954 if (! pat)
7955 return 0;
7956 emit_insn (pat);
95385cbb
AH
7957 return target;
7958
7959 case ALTIVEC_BUILTIN_MTVSCR:
7960 icode = CODE_FOR_altivec_mtvscr;
5039610b 7961 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7962 op0 = expand_normal (arg0);
95385cbb
AH
7963 mode0 = insn_data[icode].operand[0].mode;
7964
7965 /* If we got invalid arguments bail out before generating bad rtl. */
7966 if (arg0 == error_mark_node)
9a171fcd 7967 return const0_rtx;
95385cbb
AH
7968
7969 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7970 op0 = copy_to_mode_reg (mode0, op0);
7971
7972 pat = GEN_FCN (icode) (op0);
7973 if (pat)
7974 emit_insn (pat);
7975 return NULL_RTX;
3a9b8c7e 7976
95385cbb
AH
7977 case ALTIVEC_BUILTIN_DSSALL:
7978 emit_insn (gen_altivec_dssall ());
7979 return NULL_RTX;
7980
7981 case ALTIVEC_BUILTIN_DSS:
7982 icode = CODE_FOR_altivec_dss;
5039610b 7983 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 7984 STRIP_NOPS (arg0);
84217346 7985 op0 = expand_normal (arg0);
95385cbb
AH
7986 mode0 = insn_data[icode].operand[0].mode;
7987
7988 /* If we got invalid arguments bail out before generating bad rtl. */
7989 if (arg0 == error_mark_node)
9a171fcd 7990 return const0_rtx;
95385cbb 7991
b44140e7
AH
7992 if (TREE_CODE (arg0) != INTEGER_CST
7993 || TREE_INT_CST_LOW (arg0) & ~0x3)
7994 {
7995 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 7996 return const0_rtx;
b44140e7
AH
7997 }
7998
95385cbb
AH
7999 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8000 op0 = copy_to_mode_reg (mode0, op0);
8001
8002 emit_insn (gen_altivec_dss (op0));
0ac081f6 8003 return NULL_RTX;
7a4eca66
DE
8004
8005 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8006 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8007 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8008 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8009 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8010
8011 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8012 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8013 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8014 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8015 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8016
8017 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8018 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8019 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8020 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8021 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8022
8023 default:
8024 break;
8025 /* Fall through. */
0ac081f6 8026 }
24408032 8027
100c4561 8028 /* Expand abs* operations. */
586de218 8029 d = bdesc_abs;
ca7558fc 8030 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8031 if (d->code == fcode)
5039610b 8032 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8033
ae4b4a02 8034 /* Expand the AltiVec predicates. */
586de218 8035 dp = bdesc_altivec_preds;
ca7558fc 8036 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8037 if (dp->code == fcode)
c4ad648e 8038 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8039 exp, target);
ae4b4a02 8040
6525c0e7
AH
8041 /* LV* are funky. We initialized them differently. */
8042 switch (fcode)
8043 {
8044 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8045 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8046 exp, target);
6525c0e7 8047 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8048 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8049 exp, target);
6525c0e7 8050 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8051 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8052 exp, target);
6525c0e7 8053 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8054 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8055 exp, target);
6525c0e7 8056 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8057 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8058 exp, target);
6525c0e7 8059 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8060 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8061 exp, target);
6525c0e7 8062 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8063 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8064 exp, target);
6525c0e7
AH
8065 default:
8066 break;
8067 /* Fall through. */
8068 }
95385cbb 8069
92898235 8070 *expandedp = false;
0ac081f6
AH
8071 return NULL_RTX;
8072}
8073
a3170dc6
AH
8074/* Binops that need to be initialized manually, but can be expanded
8075 automagically by rs6000_expand_binop_builtin. */
8076static struct builtin_description bdesc_2arg_spe[] =
8077{
8078 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8079 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8080 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8081 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8082 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8083 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8084 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8085 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8086 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8087 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8088 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8089 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8090 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8091 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8092 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8093 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8094 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8095 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8096 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8097 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8098 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8099 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8100};
8101
8102/* Expand the builtin in EXP and store the result in TARGET. Store
8103 true in *EXPANDEDP if we found a builtin to expand.
8104
8105 This expands the SPE builtins that are not simple unary and binary
8106 operations. */
8107static rtx
a2369ed3 8108spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8109{
5039610b 8110 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8111 tree arg1, arg0;
8112 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8113 enum insn_code icode;
8114 enum machine_mode tmode, mode0;
8115 rtx pat, op0;
8116 struct builtin_description *d;
8117 size_t i;
8118
8119 *expandedp = true;
8120
8121 /* Syntax check for a 5-bit unsigned immediate. */
8122 switch (fcode)
8123 {
8124 case SPE_BUILTIN_EVSTDD:
8125 case SPE_BUILTIN_EVSTDH:
8126 case SPE_BUILTIN_EVSTDW:
8127 case SPE_BUILTIN_EVSTWHE:
8128 case SPE_BUILTIN_EVSTWHO:
8129 case SPE_BUILTIN_EVSTWWE:
8130 case SPE_BUILTIN_EVSTWWO:
5039610b 8131 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8132 if (TREE_CODE (arg1) != INTEGER_CST
8133 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8134 {
8135 error ("argument 2 must be a 5-bit unsigned literal");
8136 return const0_rtx;
8137 }
8138 break;
8139 default:
8140 break;
8141 }
8142
00332c9f
AH
8143 /* The evsplat*i instructions are not quite generic. */
8144 switch (fcode)
8145 {
8146 case SPE_BUILTIN_EVSPLATFI:
8147 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8148 exp, target);
00332c9f
AH
8149 case SPE_BUILTIN_EVSPLATI:
8150 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8151 exp, target);
00332c9f
AH
8152 default:
8153 break;
8154 }
8155
a3170dc6
AH
8156 d = (struct builtin_description *) bdesc_2arg_spe;
8157 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8158 if (d->code == fcode)
5039610b 8159 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8160
8161 d = (struct builtin_description *) bdesc_spe_predicates;
8162 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8163 if (d->code == fcode)
5039610b 8164 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8165
8166 d = (struct builtin_description *) bdesc_spe_evsel;
8167 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8168 if (d->code == fcode)
5039610b 8169 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8170
8171 switch (fcode)
8172 {
8173 case SPE_BUILTIN_EVSTDDX:
5039610b 8174 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8175 case SPE_BUILTIN_EVSTDHX:
5039610b 8176 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8177 case SPE_BUILTIN_EVSTDWX:
5039610b 8178 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8179 case SPE_BUILTIN_EVSTWHEX:
5039610b 8180 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8181 case SPE_BUILTIN_EVSTWHOX:
5039610b 8182 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8183 case SPE_BUILTIN_EVSTWWEX:
5039610b 8184 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8185 case SPE_BUILTIN_EVSTWWOX:
5039610b 8186 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8187 case SPE_BUILTIN_EVSTDD:
5039610b 8188 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8189 case SPE_BUILTIN_EVSTDH:
5039610b 8190 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8191 case SPE_BUILTIN_EVSTDW:
5039610b 8192 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8193 case SPE_BUILTIN_EVSTWHE:
5039610b 8194 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8195 case SPE_BUILTIN_EVSTWHO:
5039610b 8196 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8197 case SPE_BUILTIN_EVSTWWE:
5039610b 8198 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8199 case SPE_BUILTIN_EVSTWWO:
5039610b 8200 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8201 case SPE_BUILTIN_MFSPEFSCR:
8202 icode = CODE_FOR_spe_mfspefscr;
8203 tmode = insn_data[icode].operand[0].mode;
8204
8205 if (target == 0
8206 || GET_MODE (target) != tmode
8207 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8208 target = gen_reg_rtx (tmode);
f676971a 8209
a3170dc6
AH
8210 pat = GEN_FCN (icode) (target);
8211 if (! pat)
8212 return 0;
8213 emit_insn (pat);
8214 return target;
8215 case SPE_BUILTIN_MTSPEFSCR:
8216 icode = CODE_FOR_spe_mtspefscr;
5039610b 8217 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8218 op0 = expand_normal (arg0);
a3170dc6
AH
8219 mode0 = insn_data[icode].operand[0].mode;
8220
8221 if (arg0 == error_mark_node)
8222 return const0_rtx;
8223
8224 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8225 op0 = copy_to_mode_reg (mode0, op0);
8226
8227 pat = GEN_FCN (icode) (op0);
8228 if (pat)
8229 emit_insn (pat);
8230 return NULL_RTX;
8231 default:
8232 break;
8233 }
8234
8235 *expandedp = false;
8236 return NULL_RTX;
8237}
8238
8239static rtx
5039610b 8240spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8241{
8242 rtx pat, scratch, tmp;
5039610b
SL
8243 tree form = CALL_EXPR_ARG (exp, 0);
8244 tree arg0 = CALL_EXPR_ARG (exp, 1);
8245 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8246 rtx op0 = expand_normal (arg0);
8247 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8248 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8249 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8250 int form_int;
8251 enum rtx_code code;
8252
8253 if (TREE_CODE (form) != INTEGER_CST)
8254 {
8255 error ("argument 1 of __builtin_spe_predicate must be a constant");
8256 return const0_rtx;
8257 }
8258 else
8259 form_int = TREE_INT_CST_LOW (form);
8260
37409796 8261 gcc_assert (mode0 == mode1);
a3170dc6
AH
8262
8263 if (arg0 == error_mark_node || arg1 == error_mark_node)
8264 return const0_rtx;
8265
8266 if (target == 0
8267 || GET_MODE (target) != SImode
8268 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8269 target = gen_reg_rtx (SImode);
8270
8271 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8272 op0 = copy_to_mode_reg (mode0, op0);
8273 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8274 op1 = copy_to_mode_reg (mode1, op1);
8275
8276 scratch = gen_reg_rtx (CCmode);
8277
8278 pat = GEN_FCN (icode) (scratch, op0, op1);
8279 if (! pat)
8280 return const0_rtx;
8281 emit_insn (pat);
8282
8283 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8284 _lower_. We use one compare, but look in different bits of the
8285 CR for each variant.
8286
8287 There are 2 elements in each SPE simd type (upper/lower). The CR
8288 bits are set as follows:
8289
8290 BIT0 | BIT 1 | BIT 2 | BIT 3
8291 U | L | (U | L) | (U & L)
8292
8293 So, for an "all" relationship, BIT 3 would be set.
8294 For an "any" relationship, BIT 2 would be set. Etc.
8295
8296 Following traditional nomenclature, these bits map to:
8297
8298 BIT0 | BIT 1 | BIT 2 | BIT 3
8299 LT | GT | EQ | OV
8300
8301 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8302 */
8303
8304 switch (form_int)
8305 {
8306 /* All variant. OV bit. */
8307 case 0:
8308 /* We need to get to the OV bit, which is the ORDERED bit. We
8309 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8310 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8311 So let's just use another pattern. */
8312 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8313 return target;
8314 /* Any variant. EQ bit. */
8315 case 1:
8316 code = EQ;
8317 break;
8318 /* Upper variant. LT bit. */
8319 case 2:
8320 code = LT;
8321 break;
8322 /* Lower variant. GT bit. */
8323 case 3:
8324 code = GT;
8325 break;
8326 default:
8327 error ("argument 1 of __builtin_spe_predicate is out of range");
8328 return const0_rtx;
8329 }
8330
8331 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8332 emit_move_insn (target, tmp);
8333
8334 return target;
8335}
8336
8337/* The evsel builtins look like this:
8338
8339 e = __builtin_spe_evsel_OP (a, b, c, d);
8340
8341 and work like this:
8342
8343 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8344 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8345*/
8346
8347static rtx
5039610b 8348spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8349{
8350 rtx pat, scratch;
5039610b
SL
8351 tree arg0 = CALL_EXPR_ARG (exp, 0);
8352 tree arg1 = CALL_EXPR_ARG (exp, 1);
8353 tree arg2 = CALL_EXPR_ARG (exp, 2);
8354 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8355 rtx op0 = expand_normal (arg0);
8356 rtx op1 = expand_normal (arg1);
8357 rtx op2 = expand_normal (arg2);
8358 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8359 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8360 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8361
37409796 8362 gcc_assert (mode0 == mode1);
a3170dc6
AH
8363
8364 if (arg0 == error_mark_node || arg1 == error_mark_node
8365 || arg2 == error_mark_node || arg3 == error_mark_node)
8366 return const0_rtx;
8367
8368 if (target == 0
8369 || GET_MODE (target) != mode0
8370 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8371 target = gen_reg_rtx (mode0);
8372
8373 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8374 op0 = copy_to_mode_reg (mode0, op0);
8375 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8376 op1 = copy_to_mode_reg (mode0, op1);
8377 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8378 op2 = copy_to_mode_reg (mode0, op2);
8379 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8380 op3 = copy_to_mode_reg (mode0, op3);
8381
8382 /* Generate the compare. */
8383 scratch = gen_reg_rtx (CCmode);
8384 pat = GEN_FCN (icode) (scratch, op0, op1);
8385 if (! pat)
8386 return const0_rtx;
8387 emit_insn (pat);
8388
8389 if (mode0 == V2SImode)
8390 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8391 else
8392 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8393
8394 return target;
8395}
8396
0ac081f6
AH
8397/* Expand an expression EXP that calls a built-in function,
8398 with result going to TARGET if that's convenient
8399 (and in mode MODE if that's convenient).
8400 SUBTARGET may be used as the target for computing one of EXP's operands.
8401 IGNORE is nonzero if the value is to be ignored. */
8402
8403static rtx
a2369ed3 8404rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8405 enum machine_mode mode ATTRIBUTE_UNUSED,
8406 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8407{
5039610b 8408 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 8409 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 8410 const struct builtin_description *d;
92898235
AH
8411 size_t i;
8412 rtx ret;
8413 bool success;
f676971a 8414
7ccf35ed
DN
8415 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8416 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8417 {
8418 int icode = (int) CODE_FOR_altivec_lvsr;
8419 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8420 enum machine_mode mode = insn_data[icode].operand[1].mode;
8421 tree arg;
8422 rtx op, addr, pat;
8423
37409796 8424 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8425
5039610b 8426 arg = CALL_EXPR_ARG (exp, 0);
37409796 8427 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8428 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8429 addr = memory_address (mode, op);
8430 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8431 op = addr;
8432 else
8433 {
8434 /* For the load case need to negate the address. */
8435 op = gen_reg_rtx (GET_MODE (addr));
8436 emit_insn (gen_rtx_SET (VOIDmode, op,
8437 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8438 }
7ccf35ed
DN
8439 op = gen_rtx_MEM (mode, op);
8440
8441 if (target == 0
8442 || GET_MODE (target) != tmode
8443 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8444 target = gen_reg_rtx (tmode);
8445
8446 /*pat = gen_altivec_lvsr (target, op);*/
8447 pat = GEN_FCN (icode) (target, op);
8448 if (!pat)
8449 return 0;
8450 emit_insn (pat);
8451
8452 return target;
8453 }
5039610b
SL
8454
8455 /* FIXME: There's got to be a nicer way to handle this case than
8456 constructing a new CALL_EXPR. */
f57d17f1
TM
8457 if (fcode == ALTIVEC_BUILTIN_VCFUX
8458 || fcode == ALTIVEC_BUILTIN_VCFSX)
8459 {
5039610b
SL
8460 if (call_expr_nargs (exp) == 1)
8461 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8462 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8463 }
7ccf35ed 8464
0ac081f6 8465 if (TARGET_ALTIVEC)
92898235
AH
8466 {
8467 ret = altivec_expand_builtin (exp, target, &success);
8468
a3170dc6
AH
8469 if (success)
8470 return ret;
8471 }
8472 if (TARGET_SPE)
8473 {
8474 ret = spe_expand_builtin (exp, target, &success);
8475
92898235
AH
8476 if (success)
8477 return ret;
8478 }
8479
37409796 8480 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
bb8df8a6 8481
37409796
NS
8482 /* Handle simple unary operations. */
8483 d = (struct builtin_description *) bdesc_1arg;
8484 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8485 if (d->code == fcode)
5039610b 8486 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8487
37409796
NS
8488 /* Handle simple binary operations. */
8489 d = (struct builtin_description *) bdesc_2arg;
8490 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8491 if (d->code == fcode)
5039610b 8492 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8493
37409796 8494 /* Handle simple ternary operations. */
586de218 8495 d = bdesc_3arg;
37409796
NS
8496 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8497 if (d->code == fcode)
5039610b 8498 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8499
37409796 8500 gcc_unreachable ();
0ac081f6
AH
8501}
8502
7c62e993
PB
8503static tree
8504build_opaque_vector_type (tree node, int nunits)
8505{
8506 node = copy_node (node);
8507 TYPE_MAIN_VARIANT (node) = node;
8508 return build_vector_type (node, nunits);
8509}
8510
0ac081f6 8511static void
863d938c 8512rs6000_init_builtins (void)
0ac081f6 8513{
4a5eab38
PB
8514 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8515 V2SF_type_node = build_vector_type (float_type_node, 2);
8516 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8517 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8518 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8519 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8520 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8521
8522 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8523 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8524 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8525
7c62e993
PB
8526 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8527 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8528 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8529 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8530
8bb418a3
ZL
8531 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8532 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8533 'vector unsigned short'. */
8534
8dd16ecc
NS
8535 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8536 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8537 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8538 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8539
58646b77
PB
8540 long_integer_type_internal_node = long_integer_type_node;
8541 long_unsigned_type_internal_node = long_unsigned_type_node;
8542 intQI_type_internal_node = intQI_type_node;
8543 uintQI_type_internal_node = unsigned_intQI_type_node;
8544 intHI_type_internal_node = intHI_type_node;
8545 uintHI_type_internal_node = unsigned_intHI_type_node;
8546 intSI_type_internal_node = intSI_type_node;
8547 uintSI_type_internal_node = unsigned_intSI_type_node;
8548 float_type_internal_node = float_type_node;
8549 void_type_internal_node = void_type_node;
8550
8bb418a3
ZL
8551 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8552 get_identifier ("__bool char"),
8553 bool_char_type_node));
8554 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8555 get_identifier ("__bool short"),
8556 bool_short_type_node));
8557 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8558 get_identifier ("__bool int"),
8559 bool_int_type_node));
8560 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8561 get_identifier ("__pixel"),
8562 pixel_type_node));
8563
4a5eab38
PB
8564 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8565 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8566 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8567 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
8568
8569 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8570 get_identifier ("__vector unsigned char"),
8571 unsigned_V16QI_type_node));
8572 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8573 get_identifier ("__vector signed char"),
8574 V16QI_type_node));
8575 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8576 get_identifier ("__vector __bool char"),
8577 bool_V16QI_type_node));
8578
8579 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8580 get_identifier ("__vector unsigned short"),
8581 unsigned_V8HI_type_node));
8582 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8583 get_identifier ("__vector signed short"),
8584 V8HI_type_node));
8585 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8586 get_identifier ("__vector __bool short"),
8587 bool_V8HI_type_node));
8588
8589 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8590 get_identifier ("__vector unsigned int"),
8591 unsigned_V4SI_type_node));
8592 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8593 get_identifier ("__vector signed int"),
8594 V4SI_type_node));
8595 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8596 get_identifier ("__vector __bool int"),
8597 bool_V4SI_type_node));
8598
8599 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8600 get_identifier ("__vector float"),
8601 V4SF_type_node));
8602 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8603 get_identifier ("__vector __pixel"),
8604 pixel_V8HI_type_node));
8605
a3170dc6 8606 if (TARGET_SPE)
3fdaa45a 8607 spe_init_builtins ();
0ac081f6
AH
8608 if (TARGET_ALTIVEC)
8609 altivec_init_builtins ();
0559cc77
DE
8610 if (TARGET_ALTIVEC || TARGET_SPE)
8611 rs6000_common_init_builtins ();
69ca3549
DE
8612
8613#if TARGET_XCOFF
8614 /* AIX libm provides clog as __clog. */
8615 if (built_in_decls [BUILT_IN_CLOG])
8616 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8617#endif
0ac081f6
AH
8618}
8619
a3170dc6
AH
8620/* Search through a set of builtins and enable the mask bits.
8621 DESC is an array of builtins.
b6d08ca1 8622 SIZE is the total number of builtins.
a3170dc6
AH
8623 START is the builtin enum at which to start.
8624 END is the builtin enum at which to end. */
0ac081f6 8625static void
a2369ed3 8626enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 8627 enum rs6000_builtins start,
a2369ed3 8628 enum rs6000_builtins end)
a3170dc6
AH
8629{
8630 int i;
8631
8632 for (i = 0; i < size; ++i)
8633 if (desc[i].code == start)
8634 break;
8635
8636 if (i == size)
8637 return;
8638
8639 for (; i < size; ++i)
8640 {
8641 /* Flip all the bits on. */
8642 desc[i].mask = target_flags;
8643 if (desc[i].code == end)
8644 break;
8645 }
8646}
8647
8648static void
863d938c 8649spe_init_builtins (void)
0ac081f6 8650{
a3170dc6
AH
8651 tree endlink = void_list_node;
8652 tree puint_type_node = build_pointer_type (unsigned_type_node);
8653 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 8654 struct builtin_description *d;
0ac081f6
AH
8655 size_t i;
8656
a3170dc6
AH
8657 tree v2si_ftype_4_v2si
8658 = build_function_type
3fdaa45a
AH
8659 (opaque_V2SI_type_node,
8660 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8661 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8662 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8663 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8664 endlink)))));
8665
8666 tree v2sf_ftype_4_v2sf
8667 = build_function_type
3fdaa45a
AH
8668 (opaque_V2SF_type_node,
8669 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8670 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8671 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8672 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8673 endlink)))));
8674
8675 tree int_ftype_int_v2si_v2si
8676 = build_function_type
8677 (integer_type_node,
8678 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8679 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8680 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8681 endlink))));
8682
8683 tree int_ftype_int_v2sf_v2sf
8684 = build_function_type
8685 (integer_type_node,
8686 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8687 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8688 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8689 endlink))));
8690
8691 tree void_ftype_v2si_puint_int
8692 = build_function_type (void_type_node,
3fdaa45a 8693 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8694 tree_cons (NULL_TREE, puint_type_node,
8695 tree_cons (NULL_TREE,
8696 integer_type_node,
8697 endlink))));
8698
8699 tree void_ftype_v2si_puint_char
8700 = build_function_type (void_type_node,
3fdaa45a 8701 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8702 tree_cons (NULL_TREE, puint_type_node,
8703 tree_cons (NULL_TREE,
8704 char_type_node,
8705 endlink))));
8706
8707 tree void_ftype_v2si_pv2si_int
8708 = build_function_type (void_type_node,
3fdaa45a 8709 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8710 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8711 tree_cons (NULL_TREE,
8712 integer_type_node,
8713 endlink))));
8714
8715 tree void_ftype_v2si_pv2si_char
8716 = build_function_type (void_type_node,
3fdaa45a 8717 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8718 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8719 tree_cons (NULL_TREE,
8720 char_type_node,
8721 endlink))));
8722
8723 tree void_ftype_int
8724 = build_function_type (void_type_node,
8725 tree_cons (NULL_TREE, integer_type_node, endlink));
8726
8727 tree int_ftype_void
36e8d515 8728 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
8729
8730 tree v2si_ftype_pv2si_int
3fdaa45a 8731 = build_function_type (opaque_V2SI_type_node,
6035d635 8732 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8733 tree_cons (NULL_TREE, integer_type_node,
8734 endlink)));
8735
8736 tree v2si_ftype_puint_int
3fdaa45a 8737 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8738 tree_cons (NULL_TREE, puint_type_node,
8739 tree_cons (NULL_TREE, integer_type_node,
8740 endlink)));
8741
8742 tree v2si_ftype_pushort_int
3fdaa45a 8743 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8744 tree_cons (NULL_TREE, pushort_type_node,
8745 tree_cons (NULL_TREE, integer_type_node,
8746 endlink)));
8747
00332c9f
AH
8748 tree v2si_ftype_signed_char
8749 = build_function_type (opaque_V2SI_type_node,
8750 tree_cons (NULL_TREE, signed_char_type_node,
8751 endlink));
8752
a3170dc6
AH
8753 /* The initialization of the simple binary and unary builtins is
8754 done in rs6000_common_init_builtins, but we have to enable the
8755 mask bits here manually because we have run out of `target_flags'
8756 bits. We really need to redesign this mask business. */
8757
8758 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8759 ARRAY_SIZE (bdesc_2arg),
8760 SPE_BUILTIN_EVADDW,
8761 SPE_BUILTIN_EVXOR);
8762 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8763 ARRAY_SIZE (bdesc_1arg),
8764 SPE_BUILTIN_EVABS,
8765 SPE_BUILTIN_EVSUBFUSIAAW);
8766 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8767 ARRAY_SIZE (bdesc_spe_predicates),
8768 SPE_BUILTIN_EVCMPEQ,
8769 SPE_BUILTIN_EVFSTSTLT);
8770 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8771 ARRAY_SIZE (bdesc_spe_evsel),
8772 SPE_BUILTIN_EVSEL_CMPGTS,
8773 SPE_BUILTIN_EVSEL_FSTSTEQ);
8774
36252949
AH
8775 (*lang_hooks.decls.pushdecl)
8776 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8777 opaque_V2SI_type_node));
8778
a3170dc6 8779 /* Initialize irregular SPE builtins. */
f676971a 8780
a3170dc6
AH
8781 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8782 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8783 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8784 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8785 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8786 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8787 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8788 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8789 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8790 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8791 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8792 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8793 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8794 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8795 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8796 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
8797 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8798 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
8799
8800 /* Loads. */
8801 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8802 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8803 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8804 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8805 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8806 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8807 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8808 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8809 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8810 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8811 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8812 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8813 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8814 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8815 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8816 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8817 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8818 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8819 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8820 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8821 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8822 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8823
8824 /* Predicates. */
8825 d = (struct builtin_description *) bdesc_spe_predicates;
8826 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8827 {
8828 tree type;
8829
8830 switch (insn_data[d->icode].operand[1].mode)
8831 {
8832 case V2SImode:
8833 type = int_ftype_int_v2si_v2si;
8834 break;
8835 case V2SFmode:
8836 type = int_ftype_int_v2sf_v2sf;
8837 break;
8838 default:
37409796 8839 gcc_unreachable ();
a3170dc6
AH
8840 }
8841
8842 def_builtin (d->mask, d->name, type, d->code);
8843 }
8844
8845 /* Evsel predicates. */
8846 d = (struct builtin_description *) bdesc_spe_evsel;
8847 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8848 {
8849 tree type;
8850
8851 switch (insn_data[d->icode].operand[1].mode)
8852 {
8853 case V2SImode:
8854 type = v2si_ftype_4_v2si;
8855 break;
8856 case V2SFmode:
8857 type = v2sf_ftype_4_v2sf;
8858 break;
8859 default:
37409796 8860 gcc_unreachable ();
a3170dc6
AH
8861 }
8862
8863 def_builtin (d->mask, d->name, type, d->code);
8864 }
8865}
8866
8867static void
863d938c 8868altivec_init_builtins (void)
a3170dc6 8869{
586de218
KG
8870 const struct builtin_description *d;
8871 const struct builtin_description_predicates *dp;
a3170dc6 8872 size_t i;
7a4eca66
DE
8873 tree ftype;
8874
a3170dc6
AH
8875 tree pfloat_type_node = build_pointer_type (float_type_node);
8876 tree pint_type_node = build_pointer_type (integer_type_node);
8877 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8878 tree pchar_type_node = build_pointer_type (char_type_node);
8879
8880 tree pvoid_type_node = build_pointer_type (void_type_node);
8881
0dbc3651
ZW
8882 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8883 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8884 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8885 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8886
8887 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8888
58646b77
PB
8889 tree int_ftype_opaque
8890 = build_function_type_list (integer_type_node,
8891 opaque_V4SI_type_node, NULL_TREE);
8892
8893 tree opaque_ftype_opaque_int
8894 = build_function_type_list (opaque_V4SI_type_node,
8895 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8896 tree opaque_ftype_opaque_opaque_int
8897 = build_function_type_list (opaque_V4SI_type_node,
8898 opaque_V4SI_type_node, opaque_V4SI_type_node,
8899 integer_type_node, NULL_TREE);
8900 tree int_ftype_int_opaque_opaque
8901 = build_function_type_list (integer_type_node,
8902 integer_type_node, opaque_V4SI_type_node,
8903 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
8904 tree int_ftype_int_v4si_v4si
8905 = build_function_type_list (integer_type_node,
8906 integer_type_node, V4SI_type_node,
8907 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8908 tree v4sf_ftype_pcfloat
8909 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 8910 tree void_ftype_pfloat_v4sf
b4de2f7d 8911 = build_function_type_list (void_type_node,
a3170dc6 8912 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
8913 tree v4si_ftype_pcint
8914 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8915 tree void_ftype_pint_v4si
b4de2f7d
AH
8916 = build_function_type_list (void_type_node,
8917 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8918 tree v8hi_ftype_pcshort
8919 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 8920 tree void_ftype_pshort_v8hi
b4de2f7d
AH
8921 = build_function_type_list (void_type_node,
8922 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
8923 tree v16qi_ftype_pcchar
8924 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 8925 tree void_ftype_pchar_v16qi
b4de2f7d
AH
8926 = build_function_type_list (void_type_node,
8927 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 8928 tree void_ftype_v4si
b4de2f7d 8929 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8930 tree v8hi_ftype_void
8931 = build_function_type (V8HI_type_node, void_list_node);
8932 tree void_ftype_void
8933 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
8934 tree void_ftype_int
8935 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 8936
58646b77
PB
8937 tree opaque_ftype_long_pcvoid
8938 = build_function_type_list (opaque_V4SI_type_node,
8939 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 8940 tree v16qi_ftype_long_pcvoid
a3170dc6 8941 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
8942 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8943 tree v8hi_ftype_long_pcvoid
a3170dc6 8944 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
8945 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8946 tree v4si_ftype_long_pcvoid
a3170dc6 8947 = build_function_type_list (V4SI_type_node,
b4a62fa0 8948 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 8949
58646b77
PB
8950 tree void_ftype_opaque_long_pvoid
8951 = build_function_type_list (void_type_node,
8952 opaque_V4SI_type_node, long_integer_type_node,
8953 pvoid_type_node, NULL_TREE);
b4a62fa0 8954 tree void_ftype_v4si_long_pvoid
b4de2f7d 8955 = build_function_type_list (void_type_node,
b4a62fa0 8956 V4SI_type_node, long_integer_type_node,
b4de2f7d 8957 pvoid_type_node, NULL_TREE);
b4a62fa0 8958 tree void_ftype_v16qi_long_pvoid
b4de2f7d 8959 = build_function_type_list (void_type_node,
b4a62fa0 8960 V16QI_type_node, long_integer_type_node,
b4de2f7d 8961 pvoid_type_node, NULL_TREE);
b4a62fa0 8962 tree void_ftype_v8hi_long_pvoid
b4de2f7d 8963 = build_function_type_list (void_type_node,
b4a62fa0 8964 V8HI_type_node, long_integer_type_node,
b4de2f7d 8965 pvoid_type_node, NULL_TREE);
a3170dc6
AH
8966 tree int_ftype_int_v8hi_v8hi
8967 = build_function_type_list (integer_type_node,
8968 integer_type_node, V8HI_type_node,
8969 V8HI_type_node, NULL_TREE);
8970 tree int_ftype_int_v16qi_v16qi
8971 = build_function_type_list (integer_type_node,
8972 integer_type_node, V16QI_type_node,
8973 V16QI_type_node, NULL_TREE);
8974 tree int_ftype_int_v4sf_v4sf
8975 = build_function_type_list (integer_type_node,
8976 integer_type_node, V4SF_type_node,
8977 V4SF_type_node, NULL_TREE);
8978 tree v4si_ftype_v4si
8979 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8980 tree v8hi_ftype_v8hi
8981 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8982 tree v16qi_ftype_v16qi
8983 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8984 tree v4sf_ftype_v4sf
8985 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 8986 tree void_ftype_pcvoid_int_int
a3170dc6 8987 = build_function_type_list (void_type_node,
0dbc3651 8988 pcvoid_type_node, integer_type_node,
8bb418a3 8989 integer_type_node, NULL_TREE);
8bb418a3 8990
0dbc3651
ZW
8991 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8992 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8993 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8994 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8995 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8996 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8997 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8998 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8999 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9000 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9001 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9002 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9003 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9004 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9005 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9006 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9007 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9008 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9009 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9010 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9011 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9012 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9013 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9014 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9015 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9016 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9017 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9018 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9019 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9020 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9021 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9022 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9023 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9024 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9025 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9026 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9027 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9028 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9029 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9030 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9031 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9032 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9033 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9034 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9035 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9036 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9037
9038 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9039
9040 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9041 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9042 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9043 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9044 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9045 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9046 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9047 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9048 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9049 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9050
a3170dc6 9051 /* Add the DST variants. */
586de218 9052 d = bdesc_dst;
a3170dc6 9053 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9054 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9055
9056 /* Initialize the predicates. */
586de218 9057 dp = bdesc_altivec_preds;
a3170dc6
AH
9058 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9059 {
9060 enum machine_mode mode1;
9061 tree type;
58646b77
PB
9062 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9063 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9064
58646b77
PB
9065 if (is_overloaded)
9066 mode1 = VOIDmode;
9067 else
9068 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9069
9070 switch (mode1)
9071 {
58646b77
PB
9072 case VOIDmode:
9073 type = int_ftype_int_opaque_opaque;
9074 break;
a3170dc6
AH
9075 case V4SImode:
9076 type = int_ftype_int_v4si_v4si;
9077 break;
9078 case V8HImode:
9079 type = int_ftype_int_v8hi_v8hi;
9080 break;
9081 case V16QImode:
9082 type = int_ftype_int_v16qi_v16qi;
9083 break;
9084 case V4SFmode:
9085 type = int_ftype_int_v4sf_v4sf;
9086 break;
9087 default:
37409796 9088 gcc_unreachable ();
a3170dc6 9089 }
f676971a 9090
a3170dc6
AH
9091 def_builtin (dp->mask, dp->name, type, dp->code);
9092 }
9093
9094 /* Initialize the abs* operators. */
586de218 9095 d = bdesc_abs;
a3170dc6
AH
9096 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9097 {
9098 enum machine_mode mode0;
9099 tree type;
9100
9101 mode0 = insn_data[d->icode].operand[0].mode;
9102
9103 switch (mode0)
9104 {
9105 case V4SImode:
9106 type = v4si_ftype_v4si;
9107 break;
9108 case V8HImode:
9109 type = v8hi_ftype_v8hi;
9110 break;
9111 case V16QImode:
9112 type = v16qi_ftype_v16qi;
9113 break;
9114 case V4SFmode:
9115 type = v4sf_ftype_v4sf;
9116 break;
9117 default:
37409796 9118 gcc_unreachable ();
a3170dc6 9119 }
f676971a 9120
a3170dc6
AH
9121 def_builtin (d->mask, d->name, type, d->code);
9122 }
7ccf35ed 9123
13c62176
DN
9124 if (TARGET_ALTIVEC)
9125 {
9126 tree decl;
9127
9128 /* Initialize target builtin that implements
9129 targetm.vectorize.builtin_mask_for_load. */
9130
c79efc4d
RÁE
9131 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9132 v16qi_ftype_long_pcvoid,
9133 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9134 BUILT_IN_MD, NULL, NULL_TREE);
9135 TREE_READONLY (decl) = 1;
13c62176
DN
9136 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9137 altivec_builtin_mask_for_load = decl;
13c62176 9138 }
7a4eca66
DE
9139
9140 /* Access to the vec_init patterns. */
9141 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9142 integer_type_node, integer_type_node,
9143 integer_type_node, NULL_TREE);
9144 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9145 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9146
9147 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9148 short_integer_type_node,
9149 short_integer_type_node,
9150 short_integer_type_node,
9151 short_integer_type_node,
9152 short_integer_type_node,
9153 short_integer_type_node,
9154 short_integer_type_node, NULL_TREE);
9155 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9156 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9157
9158 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9159 char_type_node, char_type_node,
9160 char_type_node, char_type_node,
9161 char_type_node, char_type_node,
9162 char_type_node, char_type_node,
9163 char_type_node, char_type_node,
9164 char_type_node, char_type_node,
9165 char_type_node, char_type_node,
9166 char_type_node, NULL_TREE);
9167 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9168 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9169
9170 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9171 float_type_node, float_type_node,
9172 float_type_node, NULL_TREE);
9173 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9174 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9175
9176 /* Access to the vec_set patterns. */
9177 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9178 intSI_type_node,
9179 integer_type_node, NULL_TREE);
9180 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9181 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9182
9183 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9184 intHI_type_node,
9185 integer_type_node, NULL_TREE);
9186 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9187 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9188
9189 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9190 intQI_type_node,
9191 integer_type_node, NULL_TREE);
9192 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9193 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9194
9195 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9196 float_type_node,
9197 integer_type_node, NULL_TREE);
9198 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9199 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9200
9201 /* Access to the vec_extract patterns. */
9202 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9203 integer_type_node, NULL_TREE);
9204 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9205 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9206
9207 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9208 integer_type_node, NULL_TREE);
9209 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9210 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9211
9212 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9213 integer_type_node, NULL_TREE);
9214 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9215 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9216
9217 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9218 integer_type_node, NULL_TREE);
9219 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9220 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9221}
9222
9223static void
863d938c 9224rs6000_common_init_builtins (void)
a3170dc6 9225{
586de218 9226 const struct builtin_description *d;
a3170dc6
AH
9227 size_t i;
9228
9229 tree v4sf_ftype_v4sf_v4sf_v16qi
9230 = build_function_type_list (V4SF_type_node,
9231 V4SF_type_node, V4SF_type_node,
9232 V16QI_type_node, NULL_TREE);
9233 tree v4si_ftype_v4si_v4si_v16qi
9234 = build_function_type_list (V4SI_type_node,
9235 V4SI_type_node, V4SI_type_node,
9236 V16QI_type_node, NULL_TREE);
9237 tree v8hi_ftype_v8hi_v8hi_v16qi
9238 = build_function_type_list (V8HI_type_node,
9239 V8HI_type_node, V8HI_type_node,
9240 V16QI_type_node, NULL_TREE);
9241 tree v16qi_ftype_v16qi_v16qi_v16qi
9242 = build_function_type_list (V16QI_type_node,
9243 V16QI_type_node, V16QI_type_node,
9244 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9245 tree v4si_ftype_int
9246 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9247 tree v8hi_ftype_int
9248 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9249 tree v16qi_ftype_int
9250 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9251 tree v8hi_ftype_v16qi
9252 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9253 tree v4sf_ftype_v4sf
9254 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9255
9256 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9257 = build_function_type_list (opaque_V2SI_type_node,
9258 opaque_V2SI_type_node,
9259 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9260
9261 tree v2sf_ftype_v2sf_v2sf
2abe3e28
AH
9262 = build_function_type_list (opaque_V2SF_type_node,
9263 opaque_V2SF_type_node,
9264 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9265
9266 tree v2si_ftype_int_int
2abe3e28 9267 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9268 integer_type_node, integer_type_node,
9269 NULL_TREE);
9270
58646b77
PB
9271 tree opaque_ftype_opaque
9272 = build_function_type_list (opaque_V4SI_type_node,
9273 opaque_V4SI_type_node, NULL_TREE);
9274
a3170dc6 9275 tree v2si_ftype_v2si
2abe3e28
AH
9276 = build_function_type_list (opaque_V2SI_type_node,
9277 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9278
9279 tree v2sf_ftype_v2sf
2abe3e28
AH
9280 = build_function_type_list (opaque_V2SF_type_node,
9281 opaque_V2SF_type_node, NULL_TREE);
f676971a 9282
a3170dc6 9283 tree v2sf_ftype_v2si
2abe3e28
AH
9284 = build_function_type_list (opaque_V2SF_type_node,
9285 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9286
9287 tree v2si_ftype_v2sf
2abe3e28
AH
9288 = build_function_type_list (opaque_V2SI_type_node,
9289 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9290
9291 tree v2si_ftype_v2si_char
2abe3e28
AH
9292 = build_function_type_list (opaque_V2SI_type_node,
9293 opaque_V2SI_type_node,
9294 char_type_node, NULL_TREE);
a3170dc6
AH
9295
9296 tree v2si_ftype_int_char
2abe3e28 9297 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9298 integer_type_node, char_type_node, NULL_TREE);
9299
9300 tree v2si_ftype_char
2abe3e28
AH
9301 = build_function_type_list (opaque_V2SI_type_node,
9302 char_type_node, NULL_TREE);
a3170dc6
AH
9303
9304 tree int_ftype_int_int
9305 = build_function_type_list (integer_type_node,
9306 integer_type_node, integer_type_node,
9307 NULL_TREE);
95385cbb 9308
58646b77
PB
9309 tree opaque_ftype_opaque_opaque
9310 = build_function_type_list (opaque_V4SI_type_node,
9311 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9312 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9313 = build_function_type_list (V4SI_type_node,
9314 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9315 tree v4sf_ftype_v4si_int
b4de2f7d 9316 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9317 V4SI_type_node, integer_type_node, NULL_TREE);
9318 tree v4si_ftype_v4sf_int
b4de2f7d 9319 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9320 V4SF_type_node, integer_type_node, NULL_TREE);
9321 tree v4si_ftype_v4si_int
b4de2f7d 9322 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9323 V4SI_type_node, integer_type_node, NULL_TREE);
9324 tree v8hi_ftype_v8hi_int
b4de2f7d 9325 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9326 V8HI_type_node, integer_type_node, NULL_TREE);
9327 tree v16qi_ftype_v16qi_int
b4de2f7d 9328 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9329 V16QI_type_node, integer_type_node, NULL_TREE);
9330 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9331 = build_function_type_list (V16QI_type_node,
9332 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9333 integer_type_node, NULL_TREE);
9334 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9335 = build_function_type_list (V8HI_type_node,
9336 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9337 integer_type_node, NULL_TREE);
9338 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9339 = build_function_type_list (V4SI_type_node,
9340 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9341 integer_type_node, NULL_TREE);
9342 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9343 = build_function_type_list (V4SF_type_node,
9344 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9345 integer_type_node, NULL_TREE);
0ac081f6 9346 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9347 = build_function_type_list (V4SF_type_node,
9348 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9349 tree opaque_ftype_opaque_opaque_opaque
9350 = build_function_type_list (opaque_V4SI_type_node,
9351 opaque_V4SI_type_node, opaque_V4SI_type_node,
9352 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9353 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9354 = build_function_type_list (V4SF_type_node,
9355 V4SF_type_node, V4SF_type_node,
9356 V4SI_type_node, NULL_TREE);
2212663f 9357 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9358 = build_function_type_list (V4SF_type_node,
9359 V4SF_type_node, V4SF_type_node,
9360 V4SF_type_node, NULL_TREE);
f676971a 9361 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9362 = build_function_type_list (V4SI_type_node,
9363 V4SI_type_node, V4SI_type_node,
9364 V4SI_type_node, NULL_TREE);
0ac081f6 9365 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9366 = build_function_type_list (V8HI_type_node,
9367 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9368 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9369 = build_function_type_list (V8HI_type_node,
9370 V8HI_type_node, V8HI_type_node,
9371 V8HI_type_node, NULL_TREE);
c4ad648e 9372 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9373 = build_function_type_list (V4SI_type_node,
9374 V8HI_type_node, V8HI_type_node,
9375 V4SI_type_node, NULL_TREE);
c4ad648e 9376 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9377 = build_function_type_list (V4SI_type_node,
9378 V16QI_type_node, V16QI_type_node,
9379 V4SI_type_node, NULL_TREE);
0ac081f6 9380 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9381 = build_function_type_list (V16QI_type_node,
9382 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9383 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9384 = build_function_type_list (V4SI_type_node,
9385 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9386 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9387 = build_function_type_list (V8HI_type_node,
9388 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9389 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9390 = build_function_type_list (V4SI_type_node,
9391 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9392 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9393 = build_function_type_list (V8HI_type_node,
9394 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9395 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9396 = build_function_type_list (V16QI_type_node,
9397 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9398 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9399 = build_function_type_list (V4SI_type_node,
9400 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9401 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9402 = build_function_type_list (V4SI_type_node,
9403 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9404 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9405 = build_function_type_list (V4SI_type_node,
9406 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9407 tree v4si_ftype_v8hi
9408 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9409 tree int_ftype_v4si_v4si
9410 = build_function_type_list (integer_type_node,
9411 V4SI_type_node, V4SI_type_node, NULL_TREE);
9412 tree int_ftype_v4sf_v4sf
9413 = build_function_type_list (integer_type_node,
9414 V4SF_type_node, V4SF_type_node, NULL_TREE);
9415 tree int_ftype_v16qi_v16qi
9416 = build_function_type_list (integer_type_node,
9417 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9418 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9419 = build_function_type_list (integer_type_node,
9420 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9421
6f317ef3 9422 /* Add the simple ternary operators. */
586de218 9423 d = bdesc_3arg;
ca7558fc 9424 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9425 {
2212663f
DB
9426 enum machine_mode mode0, mode1, mode2, mode3;
9427 tree type;
58646b77
PB
9428 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9429 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9430
58646b77
PB
9431 if (is_overloaded)
9432 {
9433 mode0 = VOIDmode;
9434 mode1 = VOIDmode;
9435 mode2 = VOIDmode;
9436 mode3 = VOIDmode;
9437 }
9438 else
9439 {
9440 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9441 continue;
f676971a 9442
58646b77
PB
9443 mode0 = insn_data[d->icode].operand[0].mode;
9444 mode1 = insn_data[d->icode].operand[1].mode;
9445 mode2 = insn_data[d->icode].operand[2].mode;
9446 mode3 = insn_data[d->icode].operand[3].mode;
9447 }
bb8df8a6 9448
2212663f
DB
9449 /* When all four are of the same mode. */
9450 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9451 {
9452 switch (mode0)
9453 {
58646b77
PB
9454 case VOIDmode:
9455 type = opaque_ftype_opaque_opaque_opaque;
9456 break;
617e0e1d
DB
9457 case V4SImode:
9458 type = v4si_ftype_v4si_v4si_v4si;
9459 break;
2212663f
DB
9460 case V4SFmode:
9461 type = v4sf_ftype_v4sf_v4sf_v4sf;
9462 break;
9463 case V8HImode:
9464 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 9465 break;
2212663f
DB
9466 case V16QImode:
9467 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9468 break;
2212663f 9469 default:
37409796 9470 gcc_unreachable ();
2212663f
DB
9471 }
9472 }
9473 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 9474 {
2212663f
DB
9475 switch (mode0)
9476 {
9477 case V4SImode:
9478 type = v4si_ftype_v4si_v4si_v16qi;
9479 break;
9480 case V4SFmode:
9481 type = v4sf_ftype_v4sf_v4sf_v16qi;
9482 break;
9483 case V8HImode:
9484 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 9485 break;
2212663f
DB
9486 case V16QImode:
9487 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9488 break;
2212663f 9489 default:
37409796 9490 gcc_unreachable ();
2212663f
DB
9491 }
9492 }
f676971a 9493 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 9494 && mode3 == V4SImode)
24408032 9495 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 9496 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 9497 && mode3 == V4SImode)
24408032 9498 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 9499 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 9500 && mode3 == V4SImode)
24408032
AH
9501 type = v4sf_ftype_v4sf_v4sf_v4si;
9502
a7b376ee 9503 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
9504 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9505 && mode3 == QImode)
b9e4e5d1 9506 type = v16qi_ftype_v16qi_v16qi_int;
24408032 9507
a7b376ee 9508 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
9509 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9510 && mode3 == QImode)
b9e4e5d1 9511 type = v8hi_ftype_v8hi_v8hi_int;
24408032 9512
a7b376ee 9513 /* vint, vint, vint, 4-bit literal. */
24408032
AH
9514 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9515 && mode3 == QImode)
b9e4e5d1 9516 type = v4si_ftype_v4si_v4si_int;
24408032 9517
a7b376ee 9518 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
9519 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9520 && mode3 == QImode)
b9e4e5d1 9521 type = v4sf_ftype_v4sf_v4sf_int;
24408032 9522
2212663f 9523 else
37409796 9524 gcc_unreachable ();
2212663f
DB
9525
9526 def_builtin (d->mask, d->name, type, d->code);
9527 }
9528
0ac081f6 9529 /* Add the simple binary operators. */
00b960c7 9530 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 9531 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
9532 {
9533 enum machine_mode mode0, mode1, mode2;
9534 tree type;
58646b77
PB
9535 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9536 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 9537
58646b77
PB
9538 if (is_overloaded)
9539 {
9540 mode0 = VOIDmode;
9541 mode1 = VOIDmode;
9542 mode2 = VOIDmode;
9543 }
9544 else
bb8df8a6 9545 {
58646b77
PB
9546 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9547 continue;
f676971a 9548
58646b77
PB
9549 mode0 = insn_data[d->icode].operand[0].mode;
9550 mode1 = insn_data[d->icode].operand[1].mode;
9551 mode2 = insn_data[d->icode].operand[2].mode;
9552 }
0ac081f6
AH
9553
9554 /* When all three operands are of the same mode. */
9555 if (mode0 == mode1 && mode1 == mode2)
9556 {
9557 switch (mode0)
9558 {
58646b77
PB
9559 case VOIDmode:
9560 type = opaque_ftype_opaque_opaque;
9561 break;
0ac081f6
AH
9562 case V4SFmode:
9563 type = v4sf_ftype_v4sf_v4sf;
9564 break;
9565 case V4SImode:
9566 type = v4si_ftype_v4si_v4si;
9567 break;
9568 case V16QImode:
9569 type = v16qi_ftype_v16qi_v16qi;
9570 break;
9571 case V8HImode:
9572 type = v8hi_ftype_v8hi_v8hi;
9573 break;
a3170dc6
AH
9574 case V2SImode:
9575 type = v2si_ftype_v2si_v2si;
9576 break;
9577 case V2SFmode:
9578 type = v2sf_ftype_v2sf_v2sf;
9579 break;
9580 case SImode:
9581 type = int_ftype_int_int;
9582 break;
0ac081f6 9583 default:
37409796 9584 gcc_unreachable ();
0ac081f6
AH
9585 }
9586 }
9587
9588 /* A few other combos we really don't want to do manually. */
9589
9590 /* vint, vfloat, vfloat. */
9591 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9592 type = v4si_ftype_v4sf_v4sf;
9593
9594 /* vshort, vchar, vchar. */
9595 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9596 type = v8hi_ftype_v16qi_v16qi;
9597
9598 /* vint, vshort, vshort. */
9599 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9600 type = v4si_ftype_v8hi_v8hi;
9601
9602 /* vshort, vint, vint. */
9603 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9604 type = v8hi_ftype_v4si_v4si;
9605
9606 /* vchar, vshort, vshort. */
9607 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9608 type = v16qi_ftype_v8hi_v8hi;
9609
9610 /* vint, vchar, vint. */
9611 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9612 type = v4si_ftype_v16qi_v4si;
9613
fa066a23
AH
9614 /* vint, vchar, vchar. */
9615 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9616 type = v4si_ftype_v16qi_v16qi;
9617
0ac081f6
AH
9618 /* vint, vshort, vint. */
9619 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9620 type = v4si_ftype_v8hi_v4si;
f676971a 9621
a7b376ee 9622 /* vint, vint, 5-bit literal. */
2212663f 9623 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9624 type = v4si_ftype_v4si_int;
f676971a 9625
a7b376ee 9626 /* vshort, vshort, 5-bit literal. */
2212663f 9627 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 9628 type = v8hi_ftype_v8hi_int;
f676971a 9629
a7b376ee 9630 /* vchar, vchar, 5-bit literal. */
2212663f 9631 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 9632 type = v16qi_ftype_v16qi_int;
0ac081f6 9633
a7b376ee 9634 /* vfloat, vint, 5-bit literal. */
617e0e1d 9635 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9636 type = v4sf_ftype_v4si_int;
f676971a 9637
a7b376ee 9638 /* vint, vfloat, 5-bit literal. */
617e0e1d 9639 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 9640 type = v4si_ftype_v4sf_int;
617e0e1d 9641
a3170dc6
AH
9642 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9643 type = v2si_ftype_int_int;
9644
9645 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9646 type = v2si_ftype_v2si_char;
9647
9648 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9649 type = v2si_ftype_int_char;
9650
37409796 9651 else
0ac081f6 9652 {
37409796
NS
9653 /* int, x, x. */
9654 gcc_assert (mode0 == SImode);
0ac081f6
AH
9655 switch (mode1)
9656 {
9657 case V4SImode:
9658 type = int_ftype_v4si_v4si;
9659 break;
9660 case V4SFmode:
9661 type = int_ftype_v4sf_v4sf;
9662 break;
9663 case V16QImode:
9664 type = int_ftype_v16qi_v16qi;
9665 break;
9666 case V8HImode:
9667 type = int_ftype_v8hi_v8hi;
9668 break;
9669 default:
37409796 9670 gcc_unreachable ();
0ac081f6
AH
9671 }
9672 }
9673
2212663f
DB
9674 def_builtin (d->mask, d->name, type, d->code);
9675 }
24408032 9676
2212663f
DB
9677 /* Add the simple unary operators. */
9678 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 9679 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
9680 {
9681 enum machine_mode mode0, mode1;
9682 tree type;
58646b77
PB
9683 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9684 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9685
9686 if (is_overloaded)
9687 {
9688 mode0 = VOIDmode;
9689 mode1 = VOIDmode;
9690 }
9691 else
9692 {
9693 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9694 continue;
bb8df8a6 9695
58646b77
PB
9696 mode0 = insn_data[d->icode].operand[0].mode;
9697 mode1 = insn_data[d->icode].operand[1].mode;
9698 }
2212663f
DB
9699
9700 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 9701 type = v4si_ftype_int;
2212663f 9702 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 9703 type = v8hi_ftype_int;
2212663f 9704 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 9705 type = v16qi_ftype_int;
58646b77
PB
9706 else if (mode0 == VOIDmode && mode1 == VOIDmode)
9707 type = opaque_ftype_opaque;
617e0e1d
DB
9708 else if (mode0 == V4SFmode && mode1 == V4SFmode)
9709 type = v4sf_ftype_v4sf;
20e26713
AH
9710 else if (mode0 == V8HImode && mode1 == V16QImode)
9711 type = v8hi_ftype_v16qi;
9712 else if (mode0 == V4SImode && mode1 == V8HImode)
9713 type = v4si_ftype_v8hi;
a3170dc6
AH
9714 else if (mode0 == V2SImode && mode1 == V2SImode)
9715 type = v2si_ftype_v2si;
9716 else if (mode0 == V2SFmode && mode1 == V2SFmode)
9717 type = v2sf_ftype_v2sf;
9718 else if (mode0 == V2SFmode && mode1 == V2SImode)
9719 type = v2sf_ftype_v2si;
9720 else if (mode0 == V2SImode && mode1 == V2SFmode)
9721 type = v2si_ftype_v2sf;
9722 else if (mode0 == V2SImode && mode1 == QImode)
9723 type = v2si_ftype_char;
2212663f 9724 else
37409796 9725 gcc_unreachable ();
2212663f 9726
0ac081f6
AH
9727 def_builtin (d->mask, d->name, type, d->code);
9728 }
9729}
9730
c15c90bb
ZW
9731static void
9732rs6000_init_libfuncs (void)
9733{
602ea4d3
JJ
9734 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
9735 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 9736 {
602ea4d3
JJ
9737 /* AIX library routines for float->int conversion. */
9738 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9739 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9740 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9741 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9742 }
c15c90bb 9743
602ea4d3 9744 if (!TARGET_IEEEQUAD)
98c41d98 9745 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
9746 if (!TARGET_XL_COMPAT)
9747 {
9748 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9749 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9750 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9751 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 9752
17caeff2 9753 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
9754 {
9755 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
9756 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
9757 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
9758 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
9759 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
9760 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
9761 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
9762
9763 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
9764 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
9765 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
9766 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
9767 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
9768 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
9769 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
9770 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
9771 }
b26941b4
JM
9772
9773 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
9774 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
9775 }
9776 else
9777 {
9778 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9779 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9780 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9781 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9782 }
c9034561 9783 else
c15c90bb 9784 {
c9034561 9785 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
9786
9787 set_optab_libfunc (add_optab, TFmode, "_q_add");
9788 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9789 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9790 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9791 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9792 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9793 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9794
c9034561
ZW
9795 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9796 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9797 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9798 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9799 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9800 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9801
85363ca0
ZW
9802 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9803 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9804 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9805 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9806 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9807 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9808 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 9809 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
9810 }
9811}
fba73eb1
DE
9812
9813\f
9814/* Expand a block clear operation, and return 1 if successful. Return 0
9815 if we should let the compiler generate normal code.
9816
9817 operands[0] is the destination
9818 operands[1] is the length
57e84f18 9819 operands[3] is the alignment */
fba73eb1
DE
9820
9821int
9822expand_block_clear (rtx operands[])
9823{
9824 rtx orig_dest = operands[0];
9825 rtx bytes_rtx = operands[1];
57e84f18 9826 rtx align_rtx = operands[3];
5514620a
GK
9827 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9828 HOST_WIDE_INT align;
9829 HOST_WIDE_INT bytes;
fba73eb1
DE
9830 int offset;
9831 int clear_bytes;
5514620a 9832 int clear_step;
fba73eb1
DE
9833
9834 /* If this is not a fixed size move, just call memcpy */
9835 if (! constp)
9836 return 0;
9837
37409796
NS
9838 /* This must be a fixed size alignment */
9839 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
9840 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9841
9842 /* Anything to clear? */
9843 bytes = INTVAL (bytes_rtx);
9844 if (bytes <= 0)
9845 return 1;
9846
5514620a
GK
9847 /* Use the builtin memset after a point, to avoid huge code bloat.
9848 When optimize_size, avoid any significant code bloat; calling
9849 memset is about 4 instructions, so allow for one instruction to
9850 load zero and three to do clearing. */
9851 if (TARGET_ALTIVEC && align >= 128)
9852 clear_step = 16;
9853 else if (TARGET_POWERPC64 && align >= 32)
9854 clear_step = 8;
21d818ff
NF
9855 else if (TARGET_SPE && align >= 64)
9856 clear_step = 8;
5514620a
GK
9857 else
9858 clear_step = 4;
fba73eb1 9859
5514620a
GK
9860 if (optimize_size && bytes > 3 * clear_step)
9861 return 0;
9862 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
9863 return 0;
9864
9865 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9866 {
fba73eb1
DE
9867 enum machine_mode mode = BLKmode;
9868 rtx dest;
f676971a 9869
5514620a
GK
9870 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9871 {
9872 clear_bytes = 16;
9873 mode = V4SImode;
9874 }
21d818ff
NF
9875 else if (bytes >= 8 && TARGET_SPE && align >= 64)
9876 {
9877 clear_bytes = 8;
9878 mode = V2SImode;
9879 }
5514620a 9880 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
9881 /* 64-bit loads and stores require word-aligned
9882 displacements. */
9883 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
9884 {
9885 clear_bytes = 8;
9886 mode = DImode;
fba73eb1 9887 }
5514620a 9888 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
9889 { /* move 4 bytes */
9890 clear_bytes = 4;
9891 mode = SImode;
fba73eb1 9892 }
ec53fc93 9893 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
9894 { /* move 2 bytes */
9895 clear_bytes = 2;
9896 mode = HImode;
fba73eb1
DE
9897 }
9898 else /* move 1 byte at a time */
9899 {
9900 clear_bytes = 1;
9901 mode = QImode;
fba73eb1 9902 }
f676971a 9903
fba73eb1 9904 dest = adjust_address (orig_dest, mode, offset);
f676971a 9905
5514620a 9906 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
9907 }
9908
9909 return 1;
9910}
9911
35aff10b 9912\f
7e69e155
MM
9913/* Expand a block move operation, and return 1 if successful. Return 0
9914 if we should let the compiler generate normal code.
9915
9916 operands[0] is the destination
9917 operands[1] is the source
9918 operands[2] is the length
9919 operands[3] is the alignment */
9920
3933e0e1
MM
9921#define MAX_MOVE_REG 4
9922
7e69e155 9923int
a2369ed3 9924expand_block_move (rtx operands[])
7e69e155 9925{
b6c9286a
MM
9926 rtx orig_dest = operands[0];
9927 rtx orig_src = operands[1];
7e69e155 9928 rtx bytes_rtx = operands[2];
7e69e155 9929 rtx align_rtx = operands[3];
3933e0e1 9930 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 9931 int align;
3933e0e1
MM
9932 int bytes;
9933 int offset;
7e69e155 9934 int move_bytes;
cabfd258
GK
9935 rtx stores[MAX_MOVE_REG];
9936 int num_reg = 0;
7e69e155 9937
3933e0e1 9938 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 9939 if (! constp)
3933e0e1
MM
9940 return 0;
9941
37409796
NS
9942 /* This must be a fixed size alignment */
9943 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 9944 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 9945
7e69e155 9946 /* Anything to move? */
3933e0e1
MM
9947 bytes = INTVAL (bytes_rtx);
9948 if (bytes <= 0)
7e69e155
MM
9949 return 1;
9950
ea9982a8 9951 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 9952 reg_parm_stack_space. */
ea9982a8 9953 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
9954 return 0;
9955
cabfd258 9956 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 9957 {
cabfd258 9958 union {
70128ad9 9959 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 9960 rtx (*mov) (rtx, rtx);
cabfd258
GK
9961 } gen_func;
9962 enum machine_mode mode = BLKmode;
9963 rtx src, dest;
f676971a 9964
5514620a
GK
9965 /* Altivec first, since it will be faster than a string move
9966 when it applies, and usually not significantly larger. */
9967 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9968 {
9969 move_bytes = 16;
9970 mode = V4SImode;
9971 gen_func.mov = gen_movv4si;
9972 }
21d818ff
NF
9973 else if (TARGET_SPE && bytes >= 8 && align >= 64)
9974 {
9975 move_bytes = 8;
9976 mode = V2SImode;
9977 gen_func.mov = gen_movv2si;
9978 }
5514620a 9979 else if (TARGET_STRING
cabfd258
GK
9980 && bytes > 24 /* move up to 32 bytes at a time */
9981 && ! fixed_regs[5]
9982 && ! fixed_regs[6]
9983 && ! fixed_regs[7]
9984 && ! fixed_regs[8]
9985 && ! fixed_regs[9]
9986 && ! fixed_regs[10]
9987 && ! fixed_regs[11]
9988 && ! fixed_regs[12])
7e69e155 9989 {
cabfd258 9990 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 9991 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
9992 }
9993 else if (TARGET_STRING
9994 && bytes > 16 /* move up to 24 bytes at a time */
9995 && ! fixed_regs[5]
9996 && ! fixed_regs[6]
9997 && ! fixed_regs[7]
9998 && ! fixed_regs[8]
9999 && ! fixed_regs[9]
10000 && ! fixed_regs[10])
10001 {
10002 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10003 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10004 }
10005 else if (TARGET_STRING
10006 && bytes > 8 /* move up to 16 bytes at a time */
10007 && ! fixed_regs[5]
10008 && ! fixed_regs[6]
10009 && ! fixed_regs[7]
10010 && ! fixed_regs[8])
10011 {
10012 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10013 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10014 }
10015 else if (bytes >= 8 && TARGET_POWERPC64
10016 /* 64-bit loads and stores require word-aligned
10017 displacements. */
fba73eb1 10018 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10019 {
10020 move_bytes = 8;
10021 mode = DImode;
10022 gen_func.mov = gen_movdi;
10023 }
10024 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10025 { /* move up to 8 bytes at a time */
10026 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10027 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10028 }
cd7d9ca4 10029 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10030 { /* move 4 bytes */
10031 move_bytes = 4;
10032 mode = SImode;
10033 gen_func.mov = gen_movsi;
10034 }
ec53fc93 10035 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10036 { /* move 2 bytes */
10037 move_bytes = 2;
10038 mode = HImode;
10039 gen_func.mov = gen_movhi;
10040 }
10041 else if (TARGET_STRING && bytes > 1)
10042 { /* move up to 4 bytes at a time */
10043 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10044 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10045 }
10046 else /* move 1 byte at a time */
10047 {
10048 move_bytes = 1;
10049 mode = QImode;
10050 gen_func.mov = gen_movqi;
10051 }
f676971a 10052
cabfd258
GK
10053 src = adjust_address (orig_src, mode, offset);
10054 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10055
10056 if (mode != BLKmode)
cabfd258
GK
10057 {
10058 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10059
cabfd258
GK
10060 emit_insn ((*gen_func.mov) (tmp_reg, src));
10061 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10062 }
3933e0e1 10063
cabfd258
GK
10064 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10065 {
10066 int i;
10067 for (i = 0; i < num_reg; i++)
10068 emit_insn (stores[i]);
10069 num_reg = 0;
10070 }
35aff10b 10071
cabfd258 10072 if (mode == BLKmode)
7e69e155 10073 {
70128ad9 10074 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10075 patterns require zero offset. */
10076 if (!REG_P (XEXP (src, 0)))
b6c9286a 10077 {
cabfd258
GK
10078 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10079 src = replace_equiv_address (src, src_reg);
b6c9286a 10080 }
cabfd258 10081 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10082
cabfd258 10083 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10084 {
cabfd258
GK
10085 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10086 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10087 }
cabfd258 10088 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10089
70128ad9 10090 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10091 GEN_INT (move_bytes & 31),
10092 align_rtx));
7e69e155 10093 }
7e69e155
MM
10094 }
10095
10096 return 1;
10097}
10098
d62294f5 10099\f
9caa3eb2
DE
10100/* Return a string to perform a load_multiple operation.
10101 operands[0] is the vector.
10102 operands[1] is the source address.
10103 operands[2] is the first destination register. */
10104
10105const char *
a2369ed3 10106rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10107{
10108 /* We have to handle the case where the pseudo used to contain the address
10109 is assigned to one of the output registers. */
10110 int i, j;
10111 int words = XVECLEN (operands[0], 0);
10112 rtx xop[10];
10113
10114 if (XVECLEN (operands[0], 0) == 1)
10115 return "{l|lwz} %2,0(%1)";
10116
10117 for (i = 0; i < words; i++)
10118 if (refers_to_regno_p (REGNO (operands[2]) + i,
10119 REGNO (operands[2]) + i + 1, operands[1], 0))
10120 {
10121 if (i == words-1)
10122 {
10123 xop[0] = GEN_INT (4 * (words-1));
10124 xop[1] = operands[1];
10125 xop[2] = operands[2];
10126 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10127 return "";
10128 }
10129 else if (i == 0)
10130 {
10131 xop[0] = GEN_INT (4 * (words-1));
10132 xop[1] = operands[1];
10133 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10134 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10135 return "";
10136 }
10137 else
10138 {
10139 for (j = 0; j < words; j++)
10140 if (j != i)
10141 {
10142 xop[0] = GEN_INT (j * 4);
10143 xop[1] = operands[1];
10144 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10145 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10146 }
10147 xop[0] = GEN_INT (i * 4);
10148 xop[1] = operands[1];
10149 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10150 return "";
10151 }
10152 }
10153
10154 return "{lsi|lswi} %2,%1,%N0";
10155}
10156
9878760c 10157\f
a4f6c312
SS
10158/* A validation routine: say whether CODE, a condition code, and MODE
10159 match. The other alternatives either don't make sense or should
10160 never be generated. */
39a10a29 10161
48d72335 10162void
a2369ed3 10163validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10164{
37409796
NS
10165 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10166 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10167 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10168
10169 /* These don't make sense. */
37409796
NS
10170 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10171 || mode != CCUNSmode);
39a10a29 10172
37409796
NS
10173 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10174 || mode == CCUNSmode);
39a10a29 10175
37409796
NS
10176 gcc_assert (mode == CCFPmode
10177 || (code != ORDERED && code != UNORDERED
10178 && code != UNEQ && code != LTGT
10179 && code != UNGT && code != UNLT
10180 && code != UNGE && code != UNLE));
f676971a
EC
10181
10182 /* These should never be generated except for
bc9ec0e0 10183 flag_finite_math_only. */
37409796
NS
10184 gcc_assert (mode != CCFPmode
10185 || flag_finite_math_only
10186 || (code != LE && code != GE
10187 && code != UNEQ && code != LTGT
10188 && code != UNGT && code != UNLT));
39a10a29
GK
10189
10190 /* These are invalid; the information is not there. */
37409796 10191 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10192}
10193
9878760c
RK
10194\f
10195/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10196 mask required to convert the result of a rotate insn into a shift
b1765bde 10197 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10198
10199int
a2369ed3 10200includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10201{
e2c953b6
DE
10202 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10203
10204 shift_mask <<= INTVAL (shiftop);
9878760c 10205
b1765bde 10206 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10207}
10208
10209/* Similar, but for right shift. */
10210
10211int
a2369ed3 10212includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10213{
a7653a2c 10214 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10215
10216 shift_mask >>= INTVAL (shiftop);
10217
b1765bde 10218 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10219}
10220
c5059423
AM
10221/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10222 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10223 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10224
10225int
a2369ed3 10226includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10227{
c5059423
AM
10228 if (GET_CODE (andop) == CONST_INT)
10229 {
02071907 10230 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10231
c5059423 10232 c = INTVAL (andop);
02071907 10233 if (c == 0 || c == ~0)
c5059423 10234 return 0;
e2c953b6 10235
02071907 10236 shift_mask = ~0;
c5059423
AM
10237 shift_mask <<= INTVAL (shiftop);
10238
b6d08ca1 10239 /* Find the least significant one bit. */
c5059423
AM
10240 lsb = c & -c;
10241
10242 /* It must coincide with the LSB of the shift mask. */
10243 if (-lsb != shift_mask)
10244 return 0;
e2c953b6 10245
c5059423
AM
10246 /* Invert to look for the next transition (if any). */
10247 c = ~c;
10248
10249 /* Remove the low group of ones (originally low group of zeros). */
10250 c &= -lsb;
10251
10252 /* Again find the lsb, and check we have all 1's above. */
10253 lsb = c & -c;
10254 return c == -lsb;
10255 }
10256 else if (GET_CODE (andop) == CONST_DOUBLE
10257 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10258 {
02071907
AM
10259 HOST_WIDE_INT low, high, lsb;
10260 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10261
10262 low = CONST_DOUBLE_LOW (andop);
10263 if (HOST_BITS_PER_WIDE_INT < 64)
10264 high = CONST_DOUBLE_HIGH (andop);
10265
10266 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10267 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10268 return 0;
10269
10270 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10271 {
02071907 10272 shift_mask_high = ~0;
c5059423
AM
10273 if (INTVAL (shiftop) > 32)
10274 shift_mask_high <<= INTVAL (shiftop) - 32;
10275
10276 lsb = high & -high;
10277
10278 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10279 return 0;
10280
10281 high = ~high;
10282 high &= -lsb;
10283
10284 lsb = high & -high;
10285 return high == -lsb;
10286 }
10287
02071907 10288 shift_mask_low = ~0;
c5059423
AM
10289 shift_mask_low <<= INTVAL (shiftop);
10290
10291 lsb = low & -low;
10292
10293 if (-lsb != shift_mask_low)
10294 return 0;
10295
10296 if (HOST_BITS_PER_WIDE_INT < 64)
10297 high = ~high;
10298 low = ~low;
10299 low &= -lsb;
10300
10301 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10302 {
10303 lsb = high & -high;
10304 return high == -lsb;
10305 }
10306
10307 lsb = low & -low;
10308 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10309 }
10310 else
10311 return 0;
10312}
e2c953b6 10313
c5059423
AM
10314/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10315 to perform a left shift. It must have SHIFTOP or more least
c1207243 10316 significant 0's, with the remainder of the word 1's. */
e2c953b6 10317
c5059423 10318int
a2369ed3 10319includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10320{
e2c953b6 10321 if (GET_CODE (andop) == CONST_INT)
c5059423 10322 {
02071907 10323 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10324
02071907 10325 shift_mask = ~0;
c5059423
AM
10326 shift_mask <<= INTVAL (shiftop);
10327 c = INTVAL (andop);
10328
c1207243 10329 /* Find the least significant one bit. */
c5059423
AM
10330 lsb = c & -c;
10331
10332 /* It must be covered by the shift mask.
a4f6c312 10333 This test also rejects c == 0. */
c5059423
AM
10334 if ((lsb & shift_mask) == 0)
10335 return 0;
10336
10337 /* Check we have all 1's above the transition, and reject all 1's. */
10338 return c == -lsb && lsb != 1;
10339 }
10340 else if (GET_CODE (andop) == CONST_DOUBLE
10341 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10342 {
02071907 10343 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10344
10345 low = CONST_DOUBLE_LOW (andop);
10346
10347 if (HOST_BITS_PER_WIDE_INT < 64)
10348 {
02071907 10349 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10350
10351 high = CONST_DOUBLE_HIGH (andop);
10352
10353 if (low == 0)
10354 {
02071907 10355 shift_mask_high = ~0;
c5059423
AM
10356 if (INTVAL (shiftop) > 32)
10357 shift_mask_high <<= INTVAL (shiftop) - 32;
10358
10359 lsb = high & -high;
10360
10361 if ((lsb & shift_mask_high) == 0)
10362 return 0;
10363
10364 return high == -lsb;
10365 }
10366 if (high != ~0)
10367 return 0;
10368 }
10369
02071907 10370 shift_mask_low = ~0;
c5059423
AM
10371 shift_mask_low <<= INTVAL (shiftop);
10372
10373 lsb = low & -low;
10374
10375 if ((lsb & shift_mask_low) == 0)
10376 return 0;
10377
10378 return low == -lsb && lsb != 1;
10379 }
e2c953b6 10380 else
c5059423 10381 return 0;
9878760c 10382}
35068b43 10383
11ac38b2
DE
10384/* Return 1 if operands will generate a valid arguments to rlwimi
10385instruction for insert with right shift in 64-bit mode. The mask may
10386not start on the first bit or stop on the last bit because wrap-around
10387effects of instruction do not correspond to semantics of RTL insn. */
10388
10389int
10390insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10391{
429ec7dc
DE
10392 if (INTVAL (startop) > 32
10393 && INTVAL (startop) < 64
10394 && INTVAL (sizeop) > 1
10395 && INTVAL (sizeop) + INTVAL (startop) < 64
10396 && INTVAL (shiftop) > 0
10397 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10398 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10399 return 1;
10400
10401 return 0;
10402}
10403
35068b43 10404/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10405 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10406
10407int
a2369ed3 10408registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10409{
10410 /* We might have been passed a SUBREG. */
f676971a 10411 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10412 return 0;
f676971a 10413
90f81f99
AP
10414 /* We might have been passed non floating point registers. */
10415 if (!FP_REGNO_P (REGNO (reg1))
10416 || !FP_REGNO_P (REGNO (reg2)))
10417 return 0;
35068b43
RK
10418
10419 return (REGNO (reg1) == REGNO (reg2) - 1);
10420}
10421
a4f6c312
SS
10422/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10423 addr1 and addr2 must be in consecutive memory locations
10424 (addr2 == addr1 + 8). */
35068b43
RK
10425
10426int
90f81f99 10427mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10428{
90f81f99 10429 rtx addr1, addr2;
bb8df8a6
EC
10430 unsigned int reg1, reg2;
10431 int offset1, offset2;
35068b43 10432
90f81f99
AP
10433 /* The mems cannot be volatile. */
10434 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10435 return 0;
f676971a 10436
90f81f99
AP
10437 addr1 = XEXP (mem1, 0);
10438 addr2 = XEXP (mem2, 0);
10439
35068b43
RK
10440 /* Extract an offset (if used) from the first addr. */
10441 if (GET_CODE (addr1) == PLUS)
10442 {
10443 /* If not a REG, return zero. */
10444 if (GET_CODE (XEXP (addr1, 0)) != REG)
10445 return 0;
10446 else
10447 {
c4ad648e 10448 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
10449 /* The offset must be constant! */
10450 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
10451 return 0;
10452 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
10453 }
10454 }
10455 else if (GET_CODE (addr1) != REG)
10456 return 0;
10457 else
10458 {
10459 reg1 = REGNO (addr1);
10460 /* This was a simple (mem (reg)) expression. Offset is 0. */
10461 offset1 = 0;
10462 }
10463
bb8df8a6
EC
10464 /* And now for the second addr. */
10465 if (GET_CODE (addr2) == PLUS)
10466 {
10467 /* If not a REG, return zero. */
10468 if (GET_CODE (XEXP (addr2, 0)) != REG)
10469 return 0;
10470 else
10471 {
10472 reg2 = REGNO (XEXP (addr2, 0));
10473 /* The offset must be constant. */
10474 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
10475 return 0;
10476 offset2 = INTVAL (XEXP (addr2, 1));
10477 }
10478 }
10479 else if (GET_CODE (addr2) != REG)
35068b43 10480 return 0;
bb8df8a6
EC
10481 else
10482 {
10483 reg2 = REGNO (addr2);
10484 /* This was a simple (mem (reg)) expression. Offset is 0. */
10485 offset2 = 0;
10486 }
35068b43 10487
bb8df8a6
EC
10488 /* Both of these must have the same base register. */
10489 if (reg1 != reg2)
35068b43
RK
10490 return 0;
10491
10492 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 10493 if (offset2 != offset1 + 8)
35068b43
RK
10494 return 0;
10495
10496 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
10497 instructions. */
10498 return 1;
10499}
9878760c
RK
10500\f
10501/* Return the register class of a scratch register needed to copy IN into
10502 or out of a register in CLASS in MODE. If it can be done directly,
10503 NO_REGS is returned. */
10504
10505enum reg_class
3c4774e0
R
10506rs6000_secondary_reload_class (enum reg_class class,
10507 enum machine_mode mode ATTRIBUTE_UNUSED,
10508 rtx in)
9878760c 10509{
5accd822 10510 int regno;
9878760c 10511
ab82a49f
AP
10512 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10513#if TARGET_MACHO
c4ad648e 10514 && MACHOPIC_INDIRECT
ab82a49f 10515#endif
c4ad648e 10516 ))
46fad5b7
DJ
10517 {
10518 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
10519 other than BASE_REGS for TARGET_ELF. So indicate that a
10520 register from BASE_REGS is needed as an intermediate
10521 register.
f676971a 10522
46fad5b7
DJ
10523 On Darwin, pic addresses require a load from memory, which
10524 needs a base register. */
10525 if (class != BASE_REGS
c4ad648e
AM
10526 && (GET_CODE (in) == SYMBOL_REF
10527 || GET_CODE (in) == HIGH
10528 || GET_CODE (in) == LABEL_REF
10529 || GET_CODE (in) == CONST))
10530 return BASE_REGS;
46fad5b7 10531 }
e7b7998a 10532
5accd822
DE
10533 if (GET_CODE (in) == REG)
10534 {
10535 regno = REGNO (in);
10536 if (regno >= FIRST_PSEUDO_REGISTER)
10537 {
10538 regno = true_regnum (in);
10539 if (regno >= FIRST_PSEUDO_REGISTER)
10540 regno = -1;
10541 }
10542 }
10543 else if (GET_CODE (in) == SUBREG)
10544 {
10545 regno = true_regnum (in);
10546 if (regno >= FIRST_PSEUDO_REGISTER)
10547 regno = -1;
10548 }
10549 else
10550 regno = -1;
10551
9878760c
RK
10552 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10553 into anything. */
10554 if (class == GENERAL_REGS || class == BASE_REGS
10555 || (regno >= 0 && INT_REGNO_P (regno)))
10556 return NO_REGS;
10557
10558 /* Constants, memory, and FP registers can go into FP registers. */
10559 if ((regno == -1 || FP_REGNO_P (regno))
10560 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10561 return NO_REGS;
10562
0ac081f6
AH
10563 /* Memory, and AltiVec registers can go into AltiVec registers. */
10564 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10565 && class == ALTIVEC_REGS)
10566 return NO_REGS;
10567
9878760c
RK
10568 /* We can copy among the CR registers. */
10569 if ((class == CR_REGS || class == CR0_REGS)
10570 && regno >= 0 && CR_REGNO_P (regno))
10571 return NO_REGS;
10572
10573 /* Otherwise, we need GENERAL_REGS. */
10574 return GENERAL_REGS;
10575}
10576\f
10577/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 10578 know this is a valid comparison.
9878760c
RK
10579
10580 SCC_P is 1 if this is for an scc. That means that %D will have been
10581 used instead of %C, so the bits will be in different places.
10582
b4ac57ab 10583 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
10584
10585int
a2369ed3 10586ccr_bit (rtx op, int scc_p)
9878760c
RK
10587{
10588 enum rtx_code code = GET_CODE (op);
10589 enum machine_mode cc_mode;
10590 int cc_regnum;
10591 int base_bit;
9ebbca7d 10592 rtx reg;
9878760c 10593
ec8e098d 10594 if (!COMPARISON_P (op))
9878760c
RK
10595 return -1;
10596
9ebbca7d
GK
10597 reg = XEXP (op, 0);
10598
37409796 10599 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
10600
10601 cc_mode = GET_MODE (reg);
10602 cc_regnum = REGNO (reg);
10603 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 10604
39a10a29 10605 validate_condition_mode (code, cc_mode);
c5defebb 10606
b7053a3f
GK
10607 /* When generating a sCOND operation, only positive conditions are
10608 allowed. */
37409796
NS
10609 gcc_assert (!scc_p
10610 || code == EQ || code == GT || code == LT || code == UNORDERED
10611 || code == GTU || code == LTU);
f676971a 10612
9878760c
RK
10613 switch (code)
10614 {
10615 case NE:
10616 return scc_p ? base_bit + 3 : base_bit + 2;
10617 case EQ:
10618 return base_bit + 2;
1c882ea4 10619 case GT: case GTU: case UNLE:
9878760c 10620 return base_bit + 1;
1c882ea4 10621 case LT: case LTU: case UNGE:
9878760c 10622 return base_bit;
1c882ea4
GK
10623 case ORDERED: case UNORDERED:
10624 return base_bit + 3;
9878760c
RK
10625
10626 case GE: case GEU:
39a10a29 10627 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
10628 unordered position. So test that bit. For integer, this is ! LT
10629 unless this is an scc insn. */
39a10a29 10630 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
10631
10632 case LE: case LEU:
39a10a29 10633 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 10634
9878760c 10635 default:
37409796 10636 gcc_unreachable ();
9878760c
RK
10637 }
10638}
1ff7789b 10639\f
8d30c4ee 10640/* Return the GOT register. */
1ff7789b 10641
9390387d 10642rtx
a2369ed3 10643rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 10644{
a4f6c312
SS
10645 /* The second flow pass currently (June 1999) can't update
10646 regs_ever_live without disturbing other parts of the compiler, so
10647 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
10648 if (!can_create_pseudo_p ()
10649 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 10650 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 10651
8d30c4ee 10652 current_function_uses_pic_offset_table = 1;
3cb999d8 10653
1ff7789b
MM
10654 return pic_offset_table_rtx;
10655}
a7df97e6 10656\f
e2500fed
GK
10657/* Function to init struct machine_function.
10658 This will be called, via a pointer variable,
10659 from push_function_context. */
a7df97e6 10660
e2500fed 10661static struct machine_function *
863d938c 10662rs6000_init_machine_status (void)
a7df97e6 10663{
e2500fed 10664 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 10665}
9878760c 10666\f
0ba1b2ff
AM
10667/* These macros test for integers and extract the low-order bits. */
10668#define INT_P(X) \
10669((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
10670 && GET_MODE (X) == VOIDmode)
10671
10672#define INT_LOWPART(X) \
10673 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10674
10675int
a2369ed3 10676extract_MB (rtx op)
0ba1b2ff
AM
10677{
10678 int i;
10679 unsigned long val = INT_LOWPART (op);
10680
10681 /* If the high bit is zero, the value is the first 1 bit we find
10682 from the left. */
10683 if ((val & 0x80000000) == 0)
10684 {
37409796 10685 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10686
10687 i = 1;
10688 while (((val <<= 1) & 0x80000000) == 0)
10689 ++i;
10690 return i;
10691 }
10692
10693 /* If the high bit is set and the low bit is not, or the mask is all
10694 1's, the value is zero. */
10695 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
10696 return 0;
10697
10698 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10699 from the right. */
10700 i = 31;
10701 while (((val >>= 1) & 1) != 0)
10702 --i;
10703
10704 return i;
10705}
10706
10707int
a2369ed3 10708extract_ME (rtx op)
0ba1b2ff
AM
10709{
10710 int i;
10711 unsigned long val = INT_LOWPART (op);
10712
10713 /* If the low bit is zero, the value is the first 1 bit we find from
10714 the right. */
10715 if ((val & 1) == 0)
10716 {
37409796 10717 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10718
10719 i = 30;
10720 while (((val >>= 1) & 1) == 0)
10721 --i;
10722
10723 return i;
10724 }
10725
10726 /* If the low bit is set and the high bit is not, or the mask is all
10727 1's, the value is 31. */
10728 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
10729 return 31;
10730
10731 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10732 from the left. */
10733 i = 0;
10734 while (((val <<= 1) & 0x80000000) != 0)
10735 ++i;
10736
10737 return i;
10738}
10739
c4501e62
JJ
10740/* Locate some local-dynamic symbol still in use by this function
10741 so that we can print its name in some tls_ld pattern. */
10742
10743static const char *
863d938c 10744rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
10745{
10746 rtx insn;
10747
10748 if (cfun->machine->some_ld_name)
10749 return cfun->machine->some_ld_name;
10750
10751 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10752 if (INSN_P (insn)
10753 && for_each_rtx (&PATTERN (insn),
10754 rs6000_get_some_local_dynamic_name_1, 0))
10755 return cfun->machine->some_ld_name;
10756
37409796 10757 gcc_unreachable ();
c4501e62
JJ
10758}
10759
10760/* Helper function for rs6000_get_some_local_dynamic_name. */
10761
10762static int
a2369ed3 10763rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
10764{
10765 rtx x = *px;
10766
10767 if (GET_CODE (x) == SYMBOL_REF)
10768 {
10769 const char *str = XSTR (x, 0);
10770 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10771 {
10772 cfun->machine->some_ld_name = str;
10773 return 1;
10774 }
10775 }
10776
10777 return 0;
10778}
10779
85b776df
AM
10780/* Write out a function code label. */
10781
10782void
10783rs6000_output_function_entry (FILE *file, const char *fname)
10784{
10785 if (fname[0] != '.')
10786 {
10787 switch (DEFAULT_ABI)
10788 {
10789 default:
37409796 10790 gcc_unreachable ();
85b776df
AM
10791
10792 case ABI_AIX:
10793 if (DOT_SYMBOLS)
10794 putc ('.', file);
10795 else
10796 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10797 break;
10798
10799 case ABI_V4:
10800 case ABI_DARWIN:
10801 break;
10802 }
10803 }
10804 if (TARGET_AIX)
10805 RS6000_OUTPUT_BASENAME (file, fname);
10806 else
10807 assemble_name (file, fname);
10808}
10809
9878760c
RK
10810/* Print an operand. Recognize special options, documented below. */
10811
38c1f2d7 10812#if TARGET_ELF
d9407988 10813#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 10814#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
10815#else
10816#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 10817#define SMALL_DATA_REG 0
ba5e43aa
MM
10818#endif
10819
9878760c 10820void
a2369ed3 10821print_operand (FILE *file, rtx x, int code)
9878760c
RK
10822{
10823 int i;
a260abc9 10824 HOST_WIDE_INT val;
0ba1b2ff 10825 unsigned HOST_WIDE_INT uval;
9878760c
RK
10826
10827 switch (code)
10828 {
a8b3aeda 10829 case '.':
a85d226b
RK
10830 /* Write out an instruction after the call which may be replaced
10831 with glue code by the loader. This depends on the AIX version. */
10832 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
10833 return;
10834
81eace42
GK
10835 /* %a is output_address. */
10836
9854d9ed
RK
10837 case 'A':
10838 /* If X is a constant integer whose low-order 5 bits are zero,
10839 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 10840 in the AIX assembler where "sri" with a zero shift count
20e26713 10841 writes a trash instruction. */
9854d9ed 10842 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 10843 putc ('l', file);
9854d9ed 10844 else
76229ac8 10845 putc ('r', file);
9854d9ed
RK
10846 return;
10847
10848 case 'b':
e2c953b6
DE
10849 /* If constant, low-order 16 bits of constant, unsigned.
10850 Otherwise, write normally. */
10851 if (INT_P (x))
10852 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10853 else
10854 print_operand (file, x, 0);
cad12a8d
RK
10855 return;
10856
a260abc9
DE
10857 case 'B':
10858 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10859 for 64-bit mask direction. */
9390387d 10860 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 10861 return;
a260abc9 10862
81eace42
GK
10863 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10864 output_operand. */
10865
423c1189
AH
10866 case 'c':
10867 /* X is a CR register. Print the number of the GT bit of the CR. */
10868 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10869 output_operand_lossage ("invalid %%E value");
10870 else
10871 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10872 return;
10873
10874 case 'D':
cef6b86c 10875 /* Like 'J' but get to the GT bit only. */
37409796 10876 gcc_assert (GET_CODE (x) == REG);
423c1189 10877
cef6b86c
EB
10878 /* Bit 1 is GT bit. */
10879 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 10880
cef6b86c
EB
10881 /* Add one for shift count in rlinm for scc. */
10882 fprintf (file, "%d", i + 1);
423c1189
AH
10883 return;
10884
9854d9ed 10885 case 'E':
39a10a29 10886 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
10887 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10888 output_operand_lossage ("invalid %%E value");
78fbdbf7 10889 else
39a10a29 10890 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 10891 return;
9854d9ed
RK
10892
10893 case 'f':
10894 /* X is a CR register. Print the shift count needed to move it
10895 to the high-order four bits. */
10896 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10897 output_operand_lossage ("invalid %%f value");
10898 else
9ebbca7d 10899 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10900 return;
10901
10902 case 'F':
10903 /* Similar, but print the count for the rotate in the opposite
10904 direction. */
10905 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10906 output_operand_lossage ("invalid %%F value");
10907 else
9ebbca7d 10908 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10909 return;
10910
10911 case 'G':
10912 /* X is a constant integer. If it is negative, print "m",
43aa4e05 10913 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
10914 if (GET_CODE (x) != CONST_INT)
10915 output_operand_lossage ("invalid %%G value");
10916 else if (INTVAL (x) >= 0)
76229ac8 10917 putc ('z', file);
9854d9ed 10918 else
76229ac8 10919 putc ('m', file);
9854d9ed 10920 return;
e2c953b6 10921
9878760c 10922 case 'h':
a4f6c312
SS
10923 /* If constant, output low-order five bits. Otherwise, write
10924 normally. */
9878760c 10925 if (INT_P (x))
5f59ecb7 10926 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
10927 else
10928 print_operand (file, x, 0);
10929 return;
10930
64305719 10931 case 'H':
a4f6c312
SS
10932 /* If constant, output low-order six bits. Otherwise, write
10933 normally. */
64305719 10934 if (INT_P (x))
5f59ecb7 10935 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
10936 else
10937 print_operand (file, x, 0);
10938 return;
10939
9854d9ed
RK
10940 case 'I':
10941 /* Print `i' if this is a constant, else nothing. */
9878760c 10942 if (INT_P (x))
76229ac8 10943 putc ('i', file);
9878760c
RK
10944 return;
10945
9854d9ed
RK
10946 case 'j':
10947 /* Write the bit number in CCR for jump. */
10948 i = ccr_bit (x, 0);
10949 if (i == -1)
10950 output_operand_lossage ("invalid %%j code");
9878760c 10951 else
9854d9ed 10952 fprintf (file, "%d", i);
9878760c
RK
10953 return;
10954
9854d9ed
RK
10955 case 'J':
10956 /* Similar, but add one for shift count in rlinm for scc and pass
10957 scc flag to `ccr_bit'. */
10958 i = ccr_bit (x, 1);
10959 if (i == -1)
10960 output_operand_lossage ("invalid %%J code");
10961 else
a0466a68
RK
10962 /* If we want bit 31, write a shift count of zero, not 32. */
10963 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
10964 return;
10965
9854d9ed
RK
10966 case 'k':
10967 /* X must be a constant. Write the 1's complement of the
10968 constant. */
9878760c 10969 if (! INT_P (x))
9854d9ed 10970 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
10971 else
10972 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
10973 return;
10974
81eace42 10975 case 'K':
9ebbca7d
GK
10976 /* X must be a symbolic constant on ELF. Write an
10977 expression suitable for an 'addi' that adds in the low 16
10978 bits of the MEM. */
10979 if (GET_CODE (x) != CONST)
10980 {
10981 print_operand_address (file, x);
10982 fputs ("@l", file);
10983 }
10984 else
10985 {
10986 if (GET_CODE (XEXP (x, 0)) != PLUS
10987 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10988 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10989 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 10990 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
10991 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10992 fputs ("@l", file);
ed8d2920
MM
10993 /* For GNU as, there must be a non-alphanumeric character
10994 between 'l' and the number. The '-' is added by
10995 print_operand() already. */
10996 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10997 fputs ("+", file);
9ebbca7d
GK
10998 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10999 }
81eace42
GK
11000 return;
11001
11002 /* %l is output_asm_label. */
9ebbca7d 11003
9854d9ed
RK
11004 case 'L':
11005 /* Write second word of DImode or DFmode reference. Works on register
11006 or non-indexed memory only. */
11007 if (GET_CODE (x) == REG)
fb5c67a7 11008 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11009 else if (GET_CODE (x) == MEM)
11010 {
11011 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11012 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11013 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11014 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11015 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11016 UNITS_PER_WORD));
6fb5fa3c
DB
11017 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11018 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11019 UNITS_PER_WORD));
9854d9ed 11020 else
d7624dc0
RK
11021 output_address (XEXP (adjust_address_nv (x, SImode,
11022 UNITS_PER_WORD),
11023 0));
ed8908e7 11024
ba5e43aa 11025 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11026 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11027 reg_names[SMALL_DATA_REG]);
9854d9ed 11028 }
9878760c 11029 return;
f676971a 11030
9878760c
RK
11031 case 'm':
11032 /* MB value for a mask operand. */
b1765bde 11033 if (! mask_operand (x, SImode))
9878760c
RK
11034 output_operand_lossage ("invalid %%m value");
11035
0ba1b2ff 11036 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11037 return;
11038
11039 case 'M':
11040 /* ME value for a mask operand. */
b1765bde 11041 if (! mask_operand (x, SImode))
a260abc9 11042 output_operand_lossage ("invalid %%M value");
9878760c 11043
0ba1b2ff 11044 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11045 return;
11046
81eace42
GK
11047 /* %n outputs the negative of its operand. */
11048
9878760c
RK
11049 case 'N':
11050 /* Write the number of elements in the vector times 4. */
11051 if (GET_CODE (x) != PARALLEL)
11052 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11053 else
11054 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11055 return;
11056
11057 case 'O':
11058 /* Similar, but subtract 1 first. */
11059 if (GET_CODE (x) != PARALLEL)
1427100a 11060 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11061 else
11062 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11063 return;
11064
9854d9ed
RK
11065 case 'p':
11066 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11067 if (! INT_P (x)
2bfcf297 11068 || INT_LOWPART (x) < 0
9854d9ed
RK
11069 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11070 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11071 else
11072 fprintf (file, "%d", i);
9854d9ed
RK
11073 return;
11074
9878760c
RK
11075 case 'P':
11076 /* The operand must be an indirect memory reference. The result
8bb418a3 11077 is the register name. */
9878760c
RK
11078 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11079 || REGNO (XEXP (x, 0)) >= 32)
11080 output_operand_lossage ("invalid %%P value");
e2c953b6 11081 else
fb5c67a7 11082 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11083 return;
11084
dfbdccdb
GK
11085 case 'q':
11086 /* This outputs the logical code corresponding to a boolean
11087 expression. The expression may have one or both operands
39a10a29 11088 negated (if one, only the first one). For condition register
c4ad648e
AM
11089 logical operations, it will also treat the negated
11090 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11091 {
63bc1d05 11092 const char *const *t = 0;
dfbdccdb
GK
11093 const char *s;
11094 enum rtx_code code = GET_CODE (x);
11095 static const char * const tbl[3][3] = {
11096 { "and", "andc", "nor" },
11097 { "or", "orc", "nand" },
11098 { "xor", "eqv", "xor" } };
11099
11100 if (code == AND)
11101 t = tbl[0];
11102 else if (code == IOR)
11103 t = tbl[1];
11104 else if (code == XOR)
11105 t = tbl[2];
11106 else
11107 output_operand_lossage ("invalid %%q value");
11108
11109 if (GET_CODE (XEXP (x, 0)) != NOT)
11110 s = t[0];
11111 else
11112 {
11113 if (GET_CODE (XEXP (x, 1)) == NOT)
11114 s = t[2];
11115 else
11116 s = t[1];
11117 }
f676971a 11118
dfbdccdb
GK
11119 fputs (s, file);
11120 }
11121 return;
11122
2c4a9cff
DE
11123 case 'Q':
11124 if (TARGET_MFCRF)
3b6ce0af 11125 fputc (',', file);
5efb1046 11126 /* FALLTHRU */
2c4a9cff
DE
11127 else
11128 return;
11129
9854d9ed
RK
11130 case 'R':
11131 /* X is a CR register. Print the mask for `mtcrf'. */
11132 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11133 output_operand_lossage ("invalid %%R value");
11134 else
9ebbca7d 11135 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11136 return;
9854d9ed
RK
11137
11138 case 's':
11139 /* Low 5 bits of 32 - value */
11140 if (! INT_P (x))
11141 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11142 else
11143 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11144 return;
9854d9ed 11145
a260abc9 11146 case 'S':
0ba1b2ff 11147 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11148 CONST_INT 32-bit mask is considered sign-extended so any
11149 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11150 if (! mask64_operand (x, DImode))
a260abc9
DE
11151 output_operand_lossage ("invalid %%S value");
11152
0ba1b2ff 11153 uval = INT_LOWPART (x);
a260abc9 11154
0ba1b2ff 11155 if (uval & 1) /* Clear Left */
a260abc9 11156 {
f099d360
GK
11157#if HOST_BITS_PER_WIDE_INT > 64
11158 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11159#endif
0ba1b2ff 11160 i = 64;
a260abc9 11161 }
0ba1b2ff 11162 else /* Clear Right */
a260abc9 11163 {
0ba1b2ff 11164 uval = ~uval;
f099d360
GK
11165#if HOST_BITS_PER_WIDE_INT > 64
11166 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11167#endif
0ba1b2ff 11168 i = 63;
a260abc9 11169 }
0ba1b2ff
AM
11170 while (uval != 0)
11171 --i, uval >>= 1;
37409796 11172 gcc_assert (i >= 0);
0ba1b2ff
AM
11173 fprintf (file, "%d", i);
11174 return;
a260abc9 11175
a3170dc6
AH
11176 case 't':
11177 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11178 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11179
11180 /* Bit 3 is OV bit. */
11181 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11182
11183 /* If we want bit 31, write a shift count of zero, not 32. */
11184 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11185 return;
11186
cccf3bdc
DE
11187 case 'T':
11188 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11189 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11190 && REGNO (x) != CTR_REGNO))
cccf3bdc 11191 output_operand_lossage ("invalid %%T value");
1de43f85 11192 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11193 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11194 else
11195 fputs ("ctr", file);
11196 return;
11197
9854d9ed 11198 case 'u':
802a0058 11199 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11200 if (! INT_P (x))
11201 output_operand_lossage ("invalid %%u value");
e2c953b6 11202 else
f676971a 11203 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11204 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11205 return;
11206
802a0058
MM
11207 case 'v':
11208 /* High-order 16 bits of constant for use in signed operand. */
11209 if (! INT_P (x))
11210 output_operand_lossage ("invalid %%v value");
e2c953b6 11211 else
134c32f6
DE
11212 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11213 (INT_LOWPART (x) >> 16) & 0xffff);
11214 return;
802a0058 11215
9854d9ed
RK
11216 case 'U':
11217 /* Print `u' if this has an auto-increment or auto-decrement. */
11218 if (GET_CODE (x) == MEM
11219 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11220 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11221 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11222 putc ('u', file);
9854d9ed 11223 return;
9878760c 11224
e0cd0770
JC
11225 case 'V':
11226 /* Print the trap code for this operand. */
11227 switch (GET_CODE (x))
11228 {
11229 case EQ:
11230 fputs ("eq", file); /* 4 */
11231 break;
11232 case NE:
11233 fputs ("ne", file); /* 24 */
11234 break;
11235 case LT:
11236 fputs ("lt", file); /* 16 */
11237 break;
11238 case LE:
11239 fputs ("le", file); /* 20 */
11240 break;
11241 case GT:
11242 fputs ("gt", file); /* 8 */
11243 break;
11244 case GE:
11245 fputs ("ge", file); /* 12 */
11246 break;
11247 case LTU:
11248 fputs ("llt", file); /* 2 */
11249 break;
11250 case LEU:
11251 fputs ("lle", file); /* 6 */
11252 break;
11253 case GTU:
11254 fputs ("lgt", file); /* 1 */
11255 break;
11256 case GEU:
11257 fputs ("lge", file); /* 5 */
11258 break;
11259 default:
37409796 11260 gcc_unreachable ();
e0cd0770
JC
11261 }
11262 break;
11263
9854d9ed
RK
11264 case 'w':
11265 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11266 normally. */
11267 if (INT_P (x))
f676971a 11268 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11269 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11270 else
11271 print_operand (file, x, 0);
9878760c
RK
11272 return;
11273
9854d9ed 11274 case 'W':
e2c953b6 11275 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11276 val = (GET_CODE (x) == CONST_INT
11277 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11278
11279 if (val < 0)
11280 i = -1;
9854d9ed 11281 else
e2c953b6
DE
11282 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11283 if ((val <<= 1) < 0)
11284 break;
11285
11286#if HOST_BITS_PER_WIDE_INT == 32
11287 if (GET_CODE (x) == CONST_INT && i >= 0)
11288 i += 32; /* zero-extend high-part was all 0's */
11289 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11290 {
11291 val = CONST_DOUBLE_LOW (x);
11292
37409796
NS
11293 gcc_assert (val);
11294 if (val < 0)
e2c953b6
DE
11295 --i;
11296 else
11297 for ( ; i < 64; i++)
11298 if ((val <<= 1) < 0)
11299 break;
11300 }
11301#endif
11302
11303 fprintf (file, "%d", i + 1);
9854d9ed 11304 return;
9878760c 11305
9854d9ed
RK
11306 case 'X':
11307 if (GET_CODE (x) == MEM
6fb5fa3c
DB
11308 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
11309 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
11310 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 11311 putc ('x', file);
9854d9ed 11312 return;
9878760c 11313
9854d9ed
RK
11314 case 'Y':
11315 /* Like 'L', for third word of TImode */
11316 if (GET_CODE (x) == REG)
fb5c67a7 11317 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11318 else if (GET_CODE (x) == MEM)
9878760c 11319 {
9854d9ed
RK
11320 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11321 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11322 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
11323 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11324 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11325 else
d7624dc0 11326 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11327 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11328 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11329 reg_names[SMALL_DATA_REG]);
9878760c
RK
11330 }
11331 return;
f676971a 11332
9878760c 11333 case 'z':
b4ac57ab
RS
11334 /* X is a SYMBOL_REF. Write out the name preceded by a
11335 period and without any trailing data in brackets. Used for function
4d30c363
MM
11336 names. If we are configured for System V (or the embedded ABI) on
11337 the PowerPC, do not emit the period, since those systems do not use
11338 TOCs and the like. */
37409796 11339 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11340
c4ad648e
AM
11341 /* Mark the decl as referenced so that cgraph will output the
11342 function. */
9bf6462a 11343 if (SYMBOL_REF_DECL (x))
c4ad648e 11344 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11345
85b776df 11346 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11347 if (TARGET_MACHO)
11348 {
11349 const char *name = XSTR (x, 0);
a031e781 11350#if TARGET_MACHO
3b48085e 11351 if (MACHOPIC_INDIRECT
11abc112
MM
11352 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11353 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11354#endif
11355 assemble_name (file, name);
11356 }
85b776df 11357 else if (!DOT_SYMBOLS)
9739c90c 11358 assemble_name (file, XSTR (x, 0));
85b776df
AM
11359 else
11360 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11361 return;
11362
9854d9ed
RK
11363 case 'Z':
11364 /* Like 'L', for last word of TImode. */
11365 if (GET_CODE (x) == REG)
fb5c67a7 11366 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11367 else if (GET_CODE (x) == MEM)
11368 {
11369 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11370 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11371 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
11372 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11373 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11374 else
d7624dc0 11375 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11376 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11377 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11378 reg_names[SMALL_DATA_REG]);
9854d9ed 11379 }
5c23c401 11380 return;
0ac081f6 11381
a3170dc6 11382 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11383 case 'y':
11384 {
11385 rtx tmp;
11386
37409796 11387 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11388
11389 tmp = XEXP (x, 0);
11390
90d3ff1c 11391 /* Ugly hack because %y is overloaded. */
8ef65e3d 11392 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11393 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11394 || GET_MODE (x) == TFmode
11395 || GET_MODE (x) == TImode))
a3170dc6
AH
11396 {
11397 /* Handle [reg]. */
11398 if (GET_CODE (tmp) == REG)
11399 {
11400 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11401 break;
11402 }
11403 /* Handle [reg+UIMM]. */
11404 else if (GET_CODE (tmp) == PLUS &&
11405 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11406 {
11407 int x;
11408
37409796 11409 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11410
11411 x = INTVAL (XEXP (tmp, 1));
11412 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11413 break;
11414 }
11415
11416 /* Fall through. Must be [reg+reg]. */
11417 }
850e8d3d
DN
11418 if (TARGET_ALTIVEC
11419 && GET_CODE (tmp) == AND
11420 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11421 && INTVAL (XEXP (tmp, 1)) == -16)
11422 tmp = XEXP (tmp, 0);
0ac081f6 11423 if (GET_CODE (tmp) == REG)
c62f2db5 11424 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11425 else
0ac081f6 11426 {
37409796 11427 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11428 && REG_P (XEXP (tmp, 0))
11429 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11430
0ac081f6
AH
11431 if (REGNO (XEXP (tmp, 0)) == 0)
11432 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11433 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11434 else
11435 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11436 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11437 }
0ac081f6
AH
11438 break;
11439 }
f676971a 11440
9878760c
RK
11441 case 0:
11442 if (GET_CODE (x) == REG)
11443 fprintf (file, "%s", reg_names[REGNO (x)]);
11444 else if (GET_CODE (x) == MEM)
11445 {
11446 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11447 know the width from the mode. */
11448 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
11449 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
11450 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 11451 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
11452 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
11453 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
11454 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11455 output_address (XEXP (XEXP (x, 0), 1));
9878760c 11456 else
a54d04b7 11457 output_address (XEXP (x, 0));
9878760c
RK
11458 }
11459 else
a54d04b7 11460 output_addr_const (file, x);
a85d226b 11461 return;
9878760c 11462
c4501e62
JJ
11463 case '&':
11464 assemble_name (file, rs6000_get_some_local_dynamic_name ());
11465 return;
11466
9878760c
RK
11467 default:
11468 output_operand_lossage ("invalid %%xn code");
11469 }
11470}
11471\f
11472/* Print the address of an operand. */
11473
11474void
a2369ed3 11475print_operand_address (FILE *file, rtx x)
9878760c
RK
11476{
11477 if (GET_CODE (x) == REG)
4697a36c 11478 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
11479 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
11480 || GET_CODE (x) == LABEL_REF)
9878760c
RK
11481 {
11482 output_addr_const (file, x);
ba5e43aa 11483 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11484 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11485 reg_names[SMALL_DATA_REG]);
37409796
NS
11486 else
11487 gcc_assert (!TARGET_TOC);
9878760c
RK
11488 }
11489 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
11490 {
9024f4b8 11491 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 11492 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
11493 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
11494 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 11495 else
4697a36c
MM
11496 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
11497 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
11498 }
11499 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
11500 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
11501 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
11502#if TARGET_ELF
11503 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11504 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
11505 {
11506 output_addr_const (file, XEXP (x, 1));
11507 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11508 }
c859cda6
DJ
11509#endif
11510#if TARGET_MACHO
11511 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11512 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
11513 {
11514 fprintf (file, "lo16(");
11515 output_addr_const (file, XEXP (x, 1));
11516 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11517 }
3cb999d8 11518#endif
4d588c14 11519 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 11520 {
2bfcf297 11521 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 11522 {
2bfcf297
DB
11523 rtx contains_minus = XEXP (x, 1);
11524 rtx minus, symref;
11525 const char *name;
f676971a 11526
9ebbca7d 11527 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 11528 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
11529 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11530 contains_minus = XEXP (contains_minus, 0);
11531
2bfcf297
DB
11532 minus = XEXP (contains_minus, 0);
11533 symref = XEXP (minus, 0);
11534 XEXP (contains_minus, 0) = symref;
11535 if (TARGET_ELF)
11536 {
11537 char *newname;
11538
11539 name = XSTR (symref, 0);
11540 newname = alloca (strlen (name) + sizeof ("@toc"));
11541 strcpy (newname, name);
11542 strcat (newname, "@toc");
11543 XSTR (symref, 0) = newname;
11544 }
11545 output_addr_const (file, XEXP (x, 1));
11546 if (TARGET_ELF)
11547 XSTR (symref, 0) = name;
9ebbca7d
GK
11548 XEXP (contains_minus, 0) = minus;
11549 }
11550 else
11551 output_addr_const (file, XEXP (x, 1));
11552
11553 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11554 }
9878760c 11555 else
37409796 11556 gcc_unreachable ();
9878760c
RK
11557}
11558\f
88cad84b 11559/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
11560 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11561 is defined. It also needs to handle DI-mode objects on 64-bit
11562 targets. */
11563
11564static bool
a2369ed3 11565rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 11566{
f4f4921e 11567#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 11568 /* Special handling for SI values. */
84dcde01 11569 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 11570 {
301d03af 11571 static int recurse = 0;
f676971a 11572
301d03af
RS
11573 /* For -mrelocatable, we mark all addresses that need to be fixed up
11574 in the .fixup section. */
11575 if (TARGET_RELOCATABLE
d6b5193b
RS
11576 && in_section != toc_section
11577 && in_section != text_section
4325ca90 11578 && !unlikely_text_section_p (in_section)
301d03af
RS
11579 && !recurse
11580 && GET_CODE (x) != CONST_INT
11581 && GET_CODE (x) != CONST_DOUBLE
11582 && CONSTANT_P (x))
11583 {
11584 char buf[256];
11585
11586 recurse = 1;
11587 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11588 fixuplabelno++;
11589 ASM_OUTPUT_LABEL (asm_out_file, buf);
11590 fprintf (asm_out_file, "\t.long\t(");
11591 output_addr_const (asm_out_file, x);
11592 fprintf (asm_out_file, ")@fixup\n");
11593 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11594 ASM_OUTPUT_ALIGN (asm_out_file, 2);
11595 fprintf (asm_out_file, "\t.long\t");
11596 assemble_name (asm_out_file, buf);
11597 fprintf (asm_out_file, "\n\t.previous\n");
11598 recurse = 0;
11599 return true;
11600 }
11601 /* Remove initial .'s to turn a -mcall-aixdesc function
11602 address into the address of the descriptor, not the function
11603 itself. */
11604 else if (GET_CODE (x) == SYMBOL_REF
11605 && XSTR (x, 0)[0] == '.'
11606 && DEFAULT_ABI == ABI_AIX)
11607 {
11608 const char *name = XSTR (x, 0);
11609 while (*name == '.')
11610 name++;
11611
11612 fprintf (asm_out_file, "\t.long\t%s\n", name);
11613 return true;
11614 }
11615 }
f4f4921e 11616#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
11617 return default_assemble_integer (x, size, aligned_p);
11618}
93638d7a
AM
11619
11620#ifdef HAVE_GAS_HIDDEN
11621/* Emit an assembler directive to set symbol visibility for DECL to
11622 VISIBILITY_TYPE. */
11623
5add3202 11624static void
a2369ed3 11625rs6000_assemble_visibility (tree decl, int vis)
93638d7a 11626{
93638d7a
AM
11627 /* Functions need to have their entry point symbol visibility set as
11628 well as their descriptor symbol visibility. */
85b776df
AM
11629 if (DEFAULT_ABI == ABI_AIX
11630 && DOT_SYMBOLS
11631 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 11632 {
25fdb4dc 11633 static const char * const visibility_types[] = {
c4ad648e 11634 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
11635 };
11636
11637 const char *name, *type;
93638d7a
AM
11638
11639 name = ((* targetm.strip_name_encoding)
11640 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 11641 type = visibility_types[vis];
93638d7a 11642
25fdb4dc
RH
11643 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11644 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 11645 }
25fdb4dc
RH
11646 else
11647 default_assemble_visibility (decl, vis);
93638d7a
AM
11648}
11649#endif
301d03af 11650\f
39a10a29 11651enum rtx_code
a2369ed3 11652rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
11653{
11654 /* Reversal of FP compares takes care -- an ordered compare
11655 becomes an unordered compare and vice versa. */
f676971a 11656 if (mode == CCFPmode
bc9ec0e0
GK
11657 && (!flag_finite_math_only
11658 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11659 || code == UNEQ || code == LTGT))
bab6226b 11660 return reverse_condition_maybe_unordered (code);
39a10a29 11661 else
bab6226b 11662 return reverse_condition (code);
39a10a29
GK
11663}
11664
39a10a29
GK
11665/* Generate a compare for CODE. Return a brand-new rtx that
11666 represents the result of the compare. */
a4f6c312 11667
39a10a29 11668static rtx
a2369ed3 11669rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
11670{
11671 enum machine_mode comp_mode;
11672 rtx compare_result;
11673
11674 if (rs6000_compare_fp_p)
11675 comp_mode = CCFPmode;
11676 else if (code == GTU || code == LTU
c4ad648e 11677 || code == GEU || code == LEU)
39a10a29 11678 comp_mode = CCUNSmode;
60934f9c
NS
11679 else if ((code == EQ || code == NE)
11680 && GET_CODE (rs6000_compare_op0) == SUBREG
11681 && GET_CODE (rs6000_compare_op1) == SUBREG
11682 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
11683 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
11684 /* These are unsigned values, perhaps there will be a later
11685 ordering compare that can be shared with this one.
11686 Unfortunately we cannot detect the signedness of the operands
11687 for non-subregs. */
11688 comp_mode = CCUNSmode;
39a10a29
GK
11689 else
11690 comp_mode = CCmode;
11691
11692 /* First, the compare. */
11693 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 11694
cef6b86c 11695 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 11696 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 11697 && rs6000_compare_fp_p)
a3170dc6 11698 {
64022b5d 11699 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
11700 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
11701
11702 if (op_mode == VOIDmode)
11703 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 11704
cef6b86c
EB
11705 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11706 This explains the following mess. */
423c1189 11707
a3170dc6
AH
11708 switch (code)
11709 {
423c1189 11710 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
11711 switch (op_mode)
11712 {
11713 case SFmode:
11714 cmp = flag_unsafe_math_optimizations
11715 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
11716 rs6000_compare_op1)
11717 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
11718 rs6000_compare_op1);
11719 break;
11720
11721 case DFmode:
11722 cmp = flag_unsafe_math_optimizations
11723 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
11724 rs6000_compare_op1)
11725 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
11726 rs6000_compare_op1);
11727 break;
11728
17caeff2
JM
11729 case TFmode:
11730 cmp = flag_unsafe_math_optimizations
11731 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
11732 rs6000_compare_op1)
11733 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
11734 rs6000_compare_op1);
11735 break;
11736
37409796
NS
11737 default:
11738 gcc_unreachable ();
11739 }
a3170dc6 11740 break;
bb8df8a6 11741
423c1189 11742 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
11743 switch (op_mode)
11744 {
11745 case SFmode:
11746 cmp = flag_unsafe_math_optimizations
11747 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
11748 rs6000_compare_op1)
11749 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
11750 rs6000_compare_op1);
11751 break;
bb8df8a6 11752
37409796
NS
11753 case DFmode:
11754 cmp = flag_unsafe_math_optimizations
11755 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
11756 rs6000_compare_op1)
11757 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
11758 rs6000_compare_op1);
11759 break;
11760
17caeff2
JM
11761 case TFmode:
11762 cmp = flag_unsafe_math_optimizations
11763 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
11764 rs6000_compare_op1)
11765 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
11766 rs6000_compare_op1);
11767 break;
11768
37409796
NS
11769 default:
11770 gcc_unreachable ();
11771 }
a3170dc6 11772 break;
bb8df8a6 11773
423c1189 11774 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
11775 switch (op_mode)
11776 {
11777 case SFmode:
11778 cmp = flag_unsafe_math_optimizations
11779 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11780 rs6000_compare_op1)
11781 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11782 rs6000_compare_op1);
11783 break;
bb8df8a6 11784
37409796
NS
11785 case DFmode:
11786 cmp = flag_unsafe_math_optimizations
11787 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11788 rs6000_compare_op1)
11789 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11790 rs6000_compare_op1);
11791 break;
11792
17caeff2
JM
11793 case TFmode:
11794 cmp = flag_unsafe_math_optimizations
11795 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
11796 rs6000_compare_op1)
11797 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
11798 rs6000_compare_op1);
11799 break;
11800
37409796
NS
11801 default:
11802 gcc_unreachable ();
11803 }
a3170dc6 11804 break;
4d4cbc0e 11805 default:
37409796 11806 gcc_unreachable ();
a3170dc6
AH
11807 }
11808
11809 /* Synthesize LE and GE from LT/GT || EQ. */
11810 if (code == LE || code == GE || code == LEU || code == GEU)
11811 {
a3170dc6
AH
11812 emit_insn (cmp);
11813
11814 switch (code)
11815 {
11816 case LE: code = LT; break;
11817 case GE: code = GT; break;
11818 case LEU: code = LT; break;
11819 case GEU: code = GT; break;
37409796 11820 default: gcc_unreachable ();
a3170dc6
AH
11821 }
11822
a3170dc6
AH
11823 compare_result2 = gen_reg_rtx (CCFPmode);
11824
11825 /* Do the EQ. */
37409796
NS
11826 switch (op_mode)
11827 {
11828 case SFmode:
11829 cmp = flag_unsafe_math_optimizations
11830 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11831 rs6000_compare_op1)
11832 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11833 rs6000_compare_op1);
11834 break;
11835
11836 case DFmode:
11837 cmp = flag_unsafe_math_optimizations
11838 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11839 rs6000_compare_op1)
11840 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11841 rs6000_compare_op1);
11842 break;
11843
17caeff2
JM
11844 case TFmode:
11845 cmp = flag_unsafe_math_optimizations
11846 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
11847 rs6000_compare_op1)
11848 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
11849 rs6000_compare_op1);
11850 break;
11851
37409796
NS
11852 default:
11853 gcc_unreachable ();
11854 }
a3170dc6
AH
11855 emit_insn (cmp);
11856
a3170dc6 11857 /* OR them together. */
64022b5d
AH
11858 or_result = gen_reg_rtx (CCFPmode);
11859 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11860 compare_result2);
a3170dc6
AH
11861 compare_result = or_result;
11862 code = EQ;
11863 }
11864 else
11865 {
a3170dc6 11866 if (code == NE || code == LTGT)
a3170dc6 11867 code = NE;
423c1189
AH
11868 else
11869 code = EQ;
a3170dc6
AH
11870 }
11871
11872 emit_insn (cmp);
11873 }
11874 else
de17c25f
DE
11875 {
11876 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11877 CLOBBERs to match cmptf_internal2 pattern. */
11878 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11879 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 11880 && !TARGET_IEEEQUAD
de17c25f
DE
11881 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11882 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11883 gen_rtvec (9,
11884 gen_rtx_SET (VOIDmode,
11885 compare_result,
11886 gen_rtx_COMPARE (comp_mode,
11887 rs6000_compare_op0,
11888 rs6000_compare_op1)),
11889 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11890 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11891 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11892 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11893 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11894 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11895 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11896 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
11897 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11898 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11899 {
11900 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11901 comp_mode = CCEQmode;
11902 compare_result = gen_reg_rtx (CCEQmode);
11903 if (TARGET_64BIT)
11904 emit_insn (gen_stack_protect_testdi (compare_result,
11905 rs6000_compare_op0, op1));
11906 else
11907 emit_insn (gen_stack_protect_testsi (compare_result,
11908 rs6000_compare_op0, op1));
11909 }
de17c25f
DE
11910 else
11911 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11912 gen_rtx_COMPARE (comp_mode,
11913 rs6000_compare_op0,
11914 rs6000_compare_op1)));
11915 }
f676971a 11916
ca5adc63 11917 /* Some kinds of FP comparisons need an OR operation;
e7108df9 11918 under flag_finite_math_only we don't bother. */
39a10a29 11919 if (rs6000_compare_fp_p
e7108df9 11920 && !flag_finite_math_only
8ef65e3d 11921 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
11922 && (code == LE || code == GE
11923 || code == UNEQ || code == LTGT
11924 || code == UNGT || code == UNLT))
11925 {
11926 enum rtx_code or1, or2;
11927 rtx or1_rtx, or2_rtx, compare2_rtx;
11928 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 11929
39a10a29
GK
11930 switch (code)
11931 {
11932 case LE: or1 = LT; or2 = EQ; break;
11933 case GE: or1 = GT; or2 = EQ; break;
11934 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11935 case LTGT: or1 = LT; or2 = GT; break;
11936 case UNGT: or1 = UNORDERED; or2 = GT; break;
11937 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 11938 default: gcc_unreachable ();
39a10a29
GK
11939 }
11940 validate_condition_mode (or1, comp_mode);
11941 validate_condition_mode (or2, comp_mode);
1c563bed
KH
11942 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11943 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
11944 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11945 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11946 const_true_rtx);
11947 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11948
11949 compare_result = or_result;
11950 code = EQ;
11951 }
11952
11953 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 11954
1c563bed 11955 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
11956}
11957
11958
11959/* Emit the RTL for an sCOND pattern. */
11960
11961void
a2369ed3 11962rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
11963{
11964 rtx condition_rtx;
11965 enum machine_mode op_mode;
b7053a3f 11966 enum rtx_code cond_code;
39a10a29
GK
11967
11968 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
11969 cond_code = GET_CODE (condition_rtx);
11970
8ef65e3d 11971 if (rs6000_compare_fp_p
423c1189
AH
11972 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11973 {
11974 rtx t;
11975
11976 PUT_MODE (condition_rtx, SImode);
11977 t = XEXP (condition_rtx, 0);
11978
37409796 11979 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
11980
11981 if (cond_code == NE)
64022b5d 11982 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 11983
64022b5d 11984 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
11985 return;
11986 }
11987
b7053a3f
GK
11988 if (cond_code == NE
11989 || cond_code == GE || cond_code == LE
11990 || cond_code == GEU || cond_code == LEU
11991 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11992 {
11993 rtx not_result = gen_reg_rtx (CCEQmode);
11994 rtx not_op, rev_cond_rtx;
11995 enum machine_mode cc_mode;
f676971a 11996
b7053a3f
GK
11997 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11998
1c563bed 11999 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12000 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12001 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12002 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12003 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12004 }
39a10a29
GK
12005
12006 op_mode = GET_MODE (rs6000_compare_op0);
12007 if (op_mode == VOIDmode)
12008 op_mode = GET_MODE (rs6000_compare_op1);
12009
12010 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12011 {
12012 PUT_MODE (condition_rtx, DImode);
12013 convert_move (result, condition_rtx, 0);
12014 }
12015 else
12016 {
12017 PUT_MODE (condition_rtx, SImode);
12018 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12019 }
12020}
12021
39a10a29
GK
12022/* Emit a branch of kind CODE to location LOC. */
12023
12024void
a2369ed3 12025rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12026{
12027 rtx condition_rtx, loc_ref;
12028
12029 condition_rtx = rs6000_generate_compare (code);
12030 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12031 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12032 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12033 loc_ref, pc_rtx)));
12034}
12035
12a4e8c5
GK
12036/* Return the string to output a conditional branch to LABEL, which is
12037 the operand number of the label, or -1 if the branch is really a
f676971a 12038 conditional return.
12a4e8c5
GK
12039
12040 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12041 condition code register and its mode specifies what kind of
12042 comparison we made.
12043
a0ab749a 12044 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12045
12046 INSN is the insn. */
12047
12048char *
a2369ed3 12049output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12050{
12051 static char string[64];
12052 enum rtx_code code = GET_CODE (op);
12053 rtx cc_reg = XEXP (op, 0);
12054 enum machine_mode mode = GET_MODE (cc_reg);
12055 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12056 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12057 int really_reversed = reversed ^ need_longbranch;
12058 char *s = string;
12059 const char *ccode;
12060 const char *pred;
12061 rtx note;
12062
39a10a29
GK
12063 validate_condition_mode (code, mode);
12064
12065 /* Work out which way this really branches. We could use
12066 reverse_condition_maybe_unordered here always but this
12067 makes the resulting assembler clearer. */
12a4e8c5 12068 if (really_reversed)
de40e1df
DJ
12069 {
12070 /* Reversal of FP compares takes care -- an ordered compare
12071 becomes an unordered compare and vice versa. */
12072 if (mode == CCFPmode)
12073 code = reverse_condition_maybe_unordered (code);
12074 else
12075 code = reverse_condition (code);
12076 }
12a4e8c5 12077
8ef65e3d 12078 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12079 {
12080 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12081 to the GT bit. */
37409796
NS
12082 switch (code)
12083 {
12084 case EQ:
12085 /* Opposite of GT. */
12086 code = GT;
12087 break;
12088
12089 case NE:
12090 code = UNLE;
12091 break;
12092
12093 default:
12094 gcc_unreachable ();
12095 }
a3170dc6
AH
12096 }
12097
39a10a29 12098 switch (code)
12a4e8c5
GK
12099 {
12100 /* Not all of these are actually distinct opcodes, but
12101 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12102 case NE: case LTGT:
12103 ccode = "ne"; break;
12104 case EQ: case UNEQ:
12105 ccode = "eq"; break;
f676971a 12106 case GE: case GEU:
50a0b056 12107 ccode = "ge"; break;
f676971a 12108 case GT: case GTU: case UNGT:
50a0b056 12109 ccode = "gt"; break;
f676971a 12110 case LE: case LEU:
50a0b056 12111 ccode = "le"; break;
f676971a 12112 case LT: case LTU: case UNLT:
50a0b056 12113 ccode = "lt"; break;
12a4e8c5
GK
12114 case UNORDERED: ccode = "un"; break;
12115 case ORDERED: ccode = "nu"; break;
12116 case UNGE: ccode = "nl"; break;
12117 case UNLE: ccode = "ng"; break;
12118 default:
37409796 12119 gcc_unreachable ();
12a4e8c5 12120 }
f676971a
EC
12121
12122 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12123 The old mnemonics don't have a way to specify this information. */
f4857b9b 12124 pred = "";
12a4e8c5
GK
12125 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12126 if (note != NULL_RTX)
12127 {
12128 /* PROB is the difference from 50%. */
12129 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12130
12131 /* Only hint for highly probable/improbable branches on newer
12132 cpus as static prediction overrides processor dynamic
12133 prediction. For older cpus we may as well always hint, but
12134 assume not taken for branches that are very close to 50% as a
12135 mispredicted taken branch is more expensive than a
f676971a 12136 mispredicted not-taken branch. */
ec507f2d 12137 if (rs6000_always_hint
2c9e13f3
JH
12138 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12139 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12140 {
12141 if (abs (prob) > REG_BR_PROB_BASE / 20
12142 && ((prob > 0) ^ need_longbranch))
c4ad648e 12143 pred = "+";
f4857b9b
AM
12144 else
12145 pred = "-";
12146 }
12a4e8c5 12147 }
12a4e8c5
GK
12148
12149 if (label == NULL)
94a54f47 12150 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12151 else
94a54f47 12152 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12153
37c67319 12154 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12155 Assume they'd only be the first character.... */
37c67319
GK
12156 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12157 *s++ = '%';
94a54f47 12158 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12159
12160 if (label != NULL)
12161 {
12162 /* If the branch distance was too far, we may have to use an
12163 unconditional branch to go the distance. */
12164 if (need_longbranch)
44518ddd 12165 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12166 else
12167 s += sprintf (s, ",%s", label);
12168 }
12169
12170 return string;
12171}
50a0b056 12172
64022b5d 12173/* Return the string to flip the GT bit on a CR. */
423c1189 12174char *
64022b5d 12175output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12176{
12177 static char string[64];
12178 int a, b;
12179
37409796
NS
12180 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12181 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12182
64022b5d
AH
12183 /* GT bit. */
12184 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12185 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12186
12187 sprintf (string, "crnot %d,%d", a, b);
12188 return string;
12189}
12190
21213b4c
DP
12191/* Return insn index for the vector compare instruction for given CODE,
12192 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12193 not available. */
12194
12195static int
94ff898d 12196get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12197 enum machine_mode dest_mode,
12198 enum machine_mode op_mode)
12199{
12200 if (!TARGET_ALTIVEC)
12201 return INSN_NOT_AVAILABLE;
12202
12203 switch (code)
12204 {
12205 case EQ:
12206 if (dest_mode == V16QImode && op_mode == V16QImode)
12207 return UNSPEC_VCMPEQUB;
12208 if (dest_mode == V8HImode && op_mode == V8HImode)
12209 return UNSPEC_VCMPEQUH;
12210 if (dest_mode == V4SImode && op_mode == V4SImode)
12211 return UNSPEC_VCMPEQUW;
12212 if (dest_mode == V4SImode && op_mode == V4SFmode)
12213 return UNSPEC_VCMPEQFP;
12214 break;
12215 case GE:
12216 if (dest_mode == V4SImode && op_mode == V4SFmode)
12217 return UNSPEC_VCMPGEFP;
12218 case GT:
12219 if (dest_mode == V16QImode && op_mode == V16QImode)
12220 return UNSPEC_VCMPGTSB;
12221 if (dest_mode == V8HImode && op_mode == V8HImode)
12222 return UNSPEC_VCMPGTSH;
12223 if (dest_mode == V4SImode && op_mode == V4SImode)
12224 return UNSPEC_VCMPGTSW;
12225 if (dest_mode == V4SImode && op_mode == V4SFmode)
12226 return UNSPEC_VCMPGTFP;
12227 break;
12228 case GTU:
12229 if (dest_mode == V16QImode && op_mode == V16QImode)
12230 return UNSPEC_VCMPGTUB;
12231 if (dest_mode == V8HImode && op_mode == V8HImode)
12232 return UNSPEC_VCMPGTUH;
12233 if (dest_mode == V4SImode && op_mode == V4SImode)
12234 return UNSPEC_VCMPGTUW;
12235 break;
12236 default:
12237 break;
12238 }
12239 return INSN_NOT_AVAILABLE;
12240}
12241
12242/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12243 DMODE is expected destination mode. This is a recursive function. */
12244
12245static rtx
12246rs6000_emit_vector_compare (enum rtx_code rcode,
12247 rtx op0, rtx op1,
12248 enum machine_mode dmode)
12249{
12250 int vec_cmp_insn;
12251 rtx mask;
12252 enum machine_mode dest_mode;
12253 enum machine_mode op_mode = GET_MODE (op1);
12254
37409796
NS
12255 gcc_assert (TARGET_ALTIVEC);
12256 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12257
12258 /* Floating point vector compare instructions uses destination V4SImode.
12259 Move destination to appropriate mode later. */
12260 if (dmode == V4SFmode)
12261 dest_mode = V4SImode;
12262 else
12263 dest_mode = dmode;
12264
12265 mask = gen_reg_rtx (dest_mode);
12266 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12267
12268 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12269 {
12270 bool swap_operands = false;
12271 bool try_again = false;
12272 switch (rcode)
12273 {
12274 case LT:
12275 rcode = GT;
12276 swap_operands = true;
12277 try_again = true;
12278 break;
12279 case LTU:
12280 rcode = GTU;
12281 swap_operands = true;
12282 try_again = true;
12283 break;
12284 case NE:
370df7db
JC
12285 case UNLE:
12286 case UNLT:
12287 case UNGE:
12288 case UNGT:
12289 /* Invert condition and try again.
12290 e.g., A != B becomes ~(A==B). */
21213b4c 12291 {
370df7db 12292 enum rtx_code rev_code;
21213b4c 12293 enum insn_code nor_code;
d1123cde 12294 rtx eq_rtx;
370df7db
JC
12295
12296 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12297 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12298 dest_mode);
94ff898d 12299
166cdb08 12300 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 12301 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12302 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12303
12304 if (dmode != dest_mode)
12305 {
12306 rtx temp = gen_reg_rtx (dest_mode);
12307 convert_move (temp, mask, 0);
12308 return temp;
12309 }
12310 return mask;
12311 }
12312 break;
12313 case GE:
12314 case GEU:
12315 case LE:
12316 case LEU:
12317 /* Try GT/GTU/LT/LTU OR EQ */
12318 {
12319 rtx c_rtx, eq_rtx;
12320 enum insn_code ior_code;
12321 enum rtx_code new_code;
12322
37409796
NS
12323 switch (rcode)
12324 {
12325 case GE:
12326 new_code = GT;
12327 break;
12328
12329 case GEU:
12330 new_code = GTU;
12331 break;
12332
12333 case LE:
12334 new_code = LT;
12335 break;
12336
12337 case LEU:
12338 new_code = LTU;
12339 break;
12340
12341 default:
12342 gcc_unreachable ();
12343 }
21213b4c
DP
12344
12345 c_rtx = rs6000_emit_vector_compare (new_code,
12346 op0, op1, dest_mode);
12347 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12348 dest_mode);
12349
166cdb08 12350 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 12351 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12352 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12353 if (dmode != dest_mode)
12354 {
12355 rtx temp = gen_reg_rtx (dest_mode);
12356 convert_move (temp, mask, 0);
12357 return temp;
12358 }
12359 return mask;
12360 }
12361 break;
12362 default:
37409796 12363 gcc_unreachable ();
21213b4c
DP
12364 }
12365
12366 if (try_again)
12367 {
12368 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12369 /* You only get two chances. */
12370 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12371 }
12372
12373 if (swap_operands)
12374 {
12375 rtx tmp;
12376 tmp = op0;
12377 op0 = op1;
12378 op1 = tmp;
12379 }
12380 }
12381
915167f5
GK
12382 emit_insn (gen_rtx_SET (VOIDmode, mask,
12383 gen_rtx_UNSPEC (dest_mode,
12384 gen_rtvec (2, op0, op1),
12385 vec_cmp_insn)));
21213b4c
DP
12386 if (dmode != dest_mode)
12387 {
12388 rtx temp = gen_reg_rtx (dest_mode);
12389 convert_move (temp, mask, 0);
12390 return temp;
12391 }
12392 return mask;
12393}
12394
12395/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12396 valid insn doesn exist for given mode. */
12397
12398static int
12399get_vsel_insn (enum machine_mode mode)
12400{
12401 switch (mode)
12402 {
12403 case V4SImode:
12404 return UNSPEC_VSEL4SI;
12405 break;
12406 case V4SFmode:
12407 return UNSPEC_VSEL4SF;
12408 break;
12409 case V8HImode:
12410 return UNSPEC_VSEL8HI;
12411 break;
12412 case V16QImode:
12413 return UNSPEC_VSEL16QI;
12414 break;
12415 default:
12416 return INSN_NOT_AVAILABLE;
12417 break;
12418 }
12419 return INSN_NOT_AVAILABLE;
12420}
12421
12422/* Emit vector select insn where DEST is destination using
12423 operands OP1, OP2 and MASK. */
12424
12425static void
12426rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12427{
12428 rtx t, temp;
12429 enum machine_mode dest_mode = GET_MODE (dest);
12430 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12431
12432 temp = gen_reg_rtx (dest_mode);
94ff898d 12433
bb8df8a6 12434 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12435 select op2. */
915167f5
GK
12436 t = gen_rtx_SET (VOIDmode, temp,
12437 gen_rtx_UNSPEC (dest_mode,
12438 gen_rtvec (3, op2, op1, mask),
12439 vsel_insn_index));
21213b4c
DP
12440 emit_insn (t);
12441 emit_move_insn (dest, temp);
12442 return;
12443}
12444
94ff898d 12445/* Emit vector conditional expression.
21213b4c
DP
12446 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12447 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12448
12449int
12450rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
12451 rtx cond, rtx cc_op0, rtx cc_op1)
12452{
12453 enum machine_mode dest_mode = GET_MODE (dest);
12454 enum rtx_code rcode = GET_CODE (cond);
12455 rtx mask;
12456
12457 if (!TARGET_ALTIVEC)
12458 return 0;
12459
12460 /* Get the vector mask for the given relational operations. */
12461 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
12462
12463 rs6000_emit_vector_select (dest, op1, op2, mask);
12464
12465 return 1;
12466}
12467
50a0b056
GK
12468/* Emit a conditional move: move TRUE_COND to DEST if OP of the
12469 operands of the last comparison is nonzero/true, FALSE_COND if it
12470 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 12471
50a0b056 12472int
a2369ed3 12473rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
12474{
12475 enum rtx_code code = GET_CODE (op);
12476 rtx op0 = rs6000_compare_op0;
12477 rtx op1 = rs6000_compare_op1;
12478 REAL_VALUE_TYPE c1;
3148ad6d
DJ
12479 enum machine_mode compare_mode = GET_MODE (op0);
12480 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 12481 rtx temp;
add2402e 12482 bool is_against_zero;
50a0b056 12483
a3c9585f 12484 /* These modes should always match. */
a3170dc6
AH
12485 if (GET_MODE (op1) != compare_mode
12486 /* In the isel case however, we can use a compare immediate, so
12487 op1 may be a small constant. */
12488 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 12489 return 0;
178c3eff 12490 if (GET_MODE (true_cond) != result_mode)
3148ad6d 12491 return 0;
178c3eff 12492 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
12493 return 0;
12494
50a0b056 12495 /* First, work out if the hardware can do this at all, or
a3c9585f 12496 if it's too slow.... */
50a0b056 12497 if (! rs6000_compare_fp_p)
a3170dc6
AH
12498 {
12499 if (TARGET_ISEL)
12500 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
12501 return 0;
12502 }
8ef65e3d 12503 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 12504 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 12505 return 0;
50a0b056 12506
add2402e 12507 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 12508
add2402e
GK
12509 /* A floating-point subtract might overflow, underflow, or produce
12510 an inexact result, thus changing the floating-point flags, so it
12511 can't be generated if we care about that. It's safe if one side
12512 of the construct is zero, since then no subtract will be
12513 generated. */
ebb109ad 12514 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
12515 && flag_trapping_math && ! is_against_zero)
12516 return 0;
12517
50a0b056
GK
12518 /* Eliminate half of the comparisons by switching operands, this
12519 makes the remaining code simpler. */
12520 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 12521 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
12522 {
12523 code = reverse_condition_maybe_unordered (code);
12524 temp = true_cond;
12525 true_cond = false_cond;
12526 false_cond = temp;
12527 }
12528
12529 /* UNEQ and LTGT take four instructions for a comparison with zero,
12530 it'll probably be faster to use a branch here too. */
bc9ec0e0 12531 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 12532 return 0;
f676971a 12533
50a0b056
GK
12534 if (GET_CODE (op1) == CONST_DOUBLE)
12535 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 12536
b6d08ca1 12537 /* We're going to try to implement comparisons by performing
50a0b056
GK
12538 a subtract, then comparing against zero. Unfortunately,
12539 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 12540 know that the operand is finite and the comparison
50a0b056 12541 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 12542 if (HONOR_INFINITIES (compare_mode)
50a0b056 12543 && code != GT && code != UNGE
045572c7 12544 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
12545 /* Constructs of the form (a OP b ? a : b) are safe. */
12546 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 12547 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
12548 && ! rtx_equal_p (op1, true_cond))))
12549 return 0;
add2402e 12550
50a0b056
GK
12551 /* At this point we know we can use fsel. */
12552
12553 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
12554 if (! is_against_zero)
12555 {
12556 temp = gen_reg_rtx (compare_mode);
12557 emit_insn (gen_rtx_SET (VOIDmode, temp,
12558 gen_rtx_MINUS (compare_mode, op0, op1)));
12559 op0 = temp;
12560 op1 = CONST0_RTX (compare_mode);
12561 }
50a0b056
GK
12562
12563 /* If we don't care about NaNs we can reduce some of the comparisons
12564 down to faster ones. */
bc9ec0e0 12565 if (! HONOR_NANS (compare_mode))
50a0b056
GK
12566 switch (code)
12567 {
12568 case GT:
12569 code = LE;
12570 temp = true_cond;
12571 true_cond = false_cond;
12572 false_cond = temp;
12573 break;
12574 case UNGE:
12575 code = GE;
12576 break;
12577 case UNEQ:
12578 code = EQ;
12579 break;
12580 default:
12581 break;
12582 }
12583
12584 /* Now, reduce everything down to a GE. */
12585 switch (code)
12586 {
12587 case GE:
12588 break;
12589
12590 case LE:
3148ad6d
DJ
12591 temp = gen_reg_rtx (compare_mode);
12592 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12593 op0 = temp;
12594 break;
12595
12596 case ORDERED:
3148ad6d
DJ
12597 temp = gen_reg_rtx (compare_mode);
12598 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
12599 op0 = temp;
12600 break;
12601
12602 case EQ:
3148ad6d 12603 temp = gen_reg_rtx (compare_mode);
f676971a 12604 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
12605 gen_rtx_NEG (compare_mode,
12606 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
12607 op0 = temp;
12608 break;
12609
12610 case UNGE:
bc9ec0e0 12611 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 12612 temp = gen_reg_rtx (result_mode);
50a0b056 12613 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 12614 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12615 gen_rtx_GE (VOIDmode,
12616 op0, op1),
12617 true_cond, false_cond)));
bc9ec0e0
GK
12618 false_cond = true_cond;
12619 true_cond = temp;
50a0b056 12620
3148ad6d
DJ
12621 temp = gen_reg_rtx (compare_mode);
12622 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12623 op0 = temp;
12624 break;
12625
12626 case GT:
bc9ec0e0 12627 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 12628 temp = gen_reg_rtx (result_mode);
50a0b056 12629 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 12630 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12631 gen_rtx_GE (VOIDmode,
12632 op0, op1),
12633 true_cond, false_cond)));
bc9ec0e0
GK
12634 true_cond = false_cond;
12635 false_cond = temp;
50a0b056 12636
3148ad6d
DJ
12637 temp = gen_reg_rtx (compare_mode);
12638 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12639 op0 = temp;
12640 break;
12641
12642 default:
37409796 12643 gcc_unreachable ();
50a0b056
GK
12644 }
12645
12646 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 12647 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12648 gen_rtx_GE (VOIDmode,
12649 op0, op1),
12650 true_cond, false_cond)));
12651 return 1;
12652}
12653
a3170dc6
AH
12654/* Same as above, but for ints (isel). */
12655
12656static int
a2369ed3 12657rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
12658{
12659 rtx condition_rtx, cr;
12660
12661 /* All isel implementations thus far are 32-bits. */
12662 if (GET_MODE (rs6000_compare_op0) != SImode)
12663 return 0;
12664
12665 /* We still have to do the compare, because isel doesn't do a
12666 compare, it just looks at the CRx bits set by a previous compare
12667 instruction. */
12668 condition_rtx = rs6000_generate_compare (GET_CODE (op));
12669 cr = XEXP (condition_rtx, 0);
12670
12671 if (GET_MODE (cr) == CCmode)
12672 emit_insn (gen_isel_signed (dest, condition_rtx,
12673 true_cond, false_cond, cr));
12674 else
12675 emit_insn (gen_isel_unsigned (dest, condition_rtx,
12676 true_cond, false_cond, cr));
12677
12678 return 1;
12679}
12680
12681const char *
a2369ed3 12682output_isel (rtx *operands)
a3170dc6
AH
12683{
12684 enum rtx_code code;
12685
12686 code = GET_CODE (operands[1]);
12687 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
12688 {
12689 PUT_CODE (operands[1], reverse_condition (code));
12690 return "isel %0,%3,%2,%j1";
12691 }
12692 else
12693 return "isel %0,%2,%3,%j1";
12694}
12695
50a0b056 12696void
a2369ed3 12697rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
12698{
12699 enum machine_mode mode = GET_MODE (op0);
5dc8d536 12700 enum rtx_code c;
50a0b056 12701 rtx target;
5dc8d536
AH
12702
12703 if (code == SMAX || code == SMIN)
12704 c = GE;
12705 else
12706 c = GEU;
12707
50a0b056 12708 if (code == SMAX || code == UMAX)
f676971a 12709 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
12710 op0, op1, mode, 0);
12711 else
f676971a 12712 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 12713 op1, op0, mode, 0);
37409796 12714 gcc_assert (target);
50a0b056
GK
12715 if (target != dest)
12716 emit_move_insn (dest, target);
12717}
46c07df8 12718
915167f5
GK
12719/* Emit instructions to perform a load-reserved/store-conditional operation.
12720 The operation performed is an atomic
12721 (set M (CODE:MODE M OP))
12722 If not NULL, BEFORE is atomically set to M before the operation, and
12723 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 12724 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
12725 Either OP or M may be wrapped in a NOT operation. */
12726
12727void
12728rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
12729 rtx m, rtx op, rtx before_param, rtx after_param,
12730 bool sync_p)
12731{
12732 enum machine_mode used_mode;
12733 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
12734 rtx used_m;
12735 rtvec vec;
12736 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12737 rtx shift = NULL_RTX;
bb8df8a6 12738
915167f5
GK
12739 if (sync_p)
12740 emit_insn (gen_memory_barrier ());
bb8df8a6 12741
915167f5
GK
12742 if (GET_CODE (m) == NOT)
12743 used_m = XEXP (m, 0);
12744 else
12745 used_m = m;
12746
12747 /* If this is smaller than SImode, we'll have to use SImode with
12748 adjustments. */
12749 if (mode == QImode || mode == HImode)
12750 {
12751 rtx newop, oldop;
12752
12753 if (MEM_ALIGN (used_m) >= 32)
12754 {
12755 int ishift = 0;
12756 if (BYTES_BIG_ENDIAN)
12757 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 12758
915167f5
GK
12759 shift = GEN_INT (ishift);
12760 }
12761 else
12762 {
12763 rtx addrSI, aligned_addr;
a9c9d3fa 12764 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 12765
915167f5
GK
12766 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
12767 XEXP (used_m, 0)));
12768 shift = gen_reg_rtx (SImode);
12769
12770 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
12771 GEN_INT (shift_mask)));
12772 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
12773
12774 aligned_addr = expand_binop (Pmode, and_optab,
12775 XEXP (used_m, 0),
12776 GEN_INT (-4), NULL_RTX,
12777 1, OPTAB_LIB_WIDEN);
12778 used_m = change_address (used_m, SImode, aligned_addr);
12779 set_mem_align (used_m, 32);
12780 /* It's safe to keep the old alias set of USED_M, because
12781 the operation is atomic and only affects the original
12782 USED_M. */
12783 if (GET_CODE (m) == NOT)
12784 m = gen_rtx_NOT (SImode, used_m);
12785 else
12786 m = used_m;
12787 }
12788
12789 if (GET_CODE (op) == NOT)
12790 {
12791 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
12792 oldop = gen_rtx_NOT (SImode, oldop);
12793 }
12794 else
12795 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 12796
915167f5
GK
12797 switch (code)
12798 {
12799 case IOR:
12800 case XOR:
12801 newop = expand_binop (SImode, and_optab,
12802 oldop, GEN_INT (imask), NULL_RTX,
12803 1, OPTAB_LIB_WIDEN);
12804 emit_insn (gen_ashlsi3 (newop, newop, shift));
12805 break;
12806
12807 case AND:
12808 newop = expand_binop (SImode, ior_optab,
12809 oldop, GEN_INT (~imask), NULL_RTX,
12810 1, OPTAB_LIB_WIDEN);
a9c9d3fa 12811 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
12812 break;
12813
12814 case PLUS:
9f0076e5 12815 case MINUS:
915167f5
GK
12816 {
12817 rtx mask;
bb8df8a6 12818
915167f5
GK
12819 newop = expand_binop (SImode, and_optab,
12820 oldop, GEN_INT (imask), NULL_RTX,
12821 1, OPTAB_LIB_WIDEN);
12822 emit_insn (gen_ashlsi3 (newop, newop, shift));
12823
12824 mask = gen_reg_rtx (SImode);
12825 emit_move_insn (mask, GEN_INT (imask));
12826 emit_insn (gen_ashlsi3 (mask, mask, shift));
12827
9f0076e5
DE
12828 if (code == PLUS)
12829 newop = gen_rtx_PLUS (SImode, m, newop);
12830 else
12831 newop = gen_rtx_MINUS (SImode, m, newop);
12832 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
12833 newop = gen_rtx_IOR (SImode, newop,
12834 gen_rtx_AND (SImode,
12835 gen_rtx_NOT (SImode, mask),
12836 m));
12837 break;
12838 }
12839
12840 default:
12841 gcc_unreachable ();
12842 }
12843
a9c9d3fa
GK
12844 if (GET_CODE (m) == NOT)
12845 {
12846 rtx mask, xorm;
12847
12848 mask = gen_reg_rtx (SImode);
12849 emit_move_insn (mask, GEN_INT (imask));
12850 emit_insn (gen_ashlsi3 (mask, mask, shift));
12851
12852 xorm = gen_rtx_XOR (SImode, used_m, mask);
12853 /* Depending on the value of 'op', the XOR or the operation might
12854 be able to be simplified away. */
12855 newop = simplify_gen_binary (code, SImode, xorm, newop);
12856 }
915167f5
GK
12857 op = newop;
12858 used_mode = SImode;
12859 before = gen_reg_rtx (used_mode);
12860 after = gen_reg_rtx (used_mode);
12861 }
12862 else
12863 {
12864 used_mode = mode;
12865 before = before_param;
12866 after = after_param;
12867
12868 if (before == NULL_RTX)
12869 before = gen_reg_rtx (used_mode);
12870 if (after == NULL_RTX)
12871 after = gen_reg_rtx (used_mode);
12872 }
bb8df8a6 12873
9f0076e5
DE
12874 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12875 && used_mode != mode)
915167f5
GK
12876 the_op = op; /* Computed above. */
12877 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12878 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12879 else
12880 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12881
12882 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12883 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12884 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
12885 gen_rtx_UNSPEC (used_mode,
12886 gen_rtvec (1, the_op),
12887 UNSPEC_SYNC_OP));
915167f5
GK
12888 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12889
9f0076e5 12890 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
12891 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12892 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12893 else
12894 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12895 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12896
12897 /* Shift and mask the return values properly. */
12898 if (used_mode != mode && before_param)
12899 {
12900 emit_insn (gen_lshrsi3 (before, before, shift));
12901 convert_move (before_param, before, 1);
12902 }
12903
12904 if (used_mode != mode && after_param)
12905 {
12906 emit_insn (gen_lshrsi3 (after, after, shift));
12907 convert_move (after_param, after, 1);
12908 }
12909
12910 /* The previous sequence will end with a branch that's dependent on
12911 the conditional store, so placing an isync will ensure that no
12912 other instructions (especially, no load or store instructions)
12913 can start before the atomic operation completes. */
12914 if (sync_p)
12915 emit_insn (gen_isync ());
12916}
12917
b52110d4
DE
12918/* A subroutine of the atomic operation splitters. Jump to LABEL if
12919 COND is true. Mark the jump as unlikely to be taken. */
12920
12921static void
12922emit_unlikely_jump (rtx cond, rtx label)
12923{
12924 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12925 rtx x;
12926
12927 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12928 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12929 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12930}
12931
12932/* A subroutine of the atomic operation splitters. Emit a load-locked
12933 instruction in MODE. */
12934
12935static void
12936emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12937{
12938 rtx (*fn) (rtx, rtx) = NULL;
12939 if (mode == SImode)
12940 fn = gen_load_locked_si;
12941 else if (mode == DImode)
12942 fn = gen_load_locked_di;
12943 emit_insn (fn (reg, mem));
12944}
12945
12946/* A subroutine of the atomic operation splitters. Emit a store-conditional
12947 instruction in MODE. */
12948
12949static void
12950emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12951{
12952 rtx (*fn) (rtx, rtx, rtx) = NULL;
12953 if (mode == SImode)
12954 fn = gen_store_conditional_si;
12955 else if (mode == DImode)
12956 fn = gen_store_conditional_di;
12957
9f0076e5 12958 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
12959 if (PPC405_ERRATUM77)
12960 emit_insn (gen_memory_barrier ());
12961
12962 emit_insn (fn (res, mem, val));
12963}
12964
ea2c620c 12965/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 12966 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
12967 operand of the binary operator. BEFORE and AFTER are optional locations to
12968 return the value of MEM either before of after the operation. SCRATCH is
12969 a scratch register. */
12970
12971void
12972rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12973 rtx before, rtx after, rtx scratch)
12974{
12975 enum machine_mode mode = GET_MODE (mem);
12976 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12977
12978 emit_insn (gen_memory_barrier ());
12979
12980 label = gen_label_rtx ();
12981 emit_label (label);
12982 label = gen_rtx_LABEL_REF (VOIDmode, label);
12983
12984 if (before == NULL_RTX)
12985 before = scratch;
12986 emit_load_locked (mode, before, mem);
12987
12988 if (code == NOT)
12989 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12990 else if (code == AND)
12991 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12992 else
12993 x = gen_rtx_fmt_ee (code, mode, before, val);
12994
12995 if (after != NULL_RTX)
12996 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12997 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12998
12999 emit_store_conditional (mode, cond, mem, scratch);
13000
13001 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13002 emit_unlikely_jump (x, label);
13003
13004 emit_insn (gen_isync ());
13005}
13006
b52110d4
DE
13007/* Expand an atomic compare and swap operation. MEM is the memory on which
13008 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13009 value to be stored. SCRATCH is a scratch GPR. */
13010
13011void
13012rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13013 rtx scratch)
13014{
13015 enum machine_mode mode = GET_MODE (mem);
13016 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13017
13018 emit_insn (gen_memory_barrier ());
13019
13020 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13021 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13022 emit_label (XEXP (label1, 0));
13023
13024 emit_load_locked (mode, retval, mem);
13025
13026 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13027 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13028
13029 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13030 emit_unlikely_jump (x, label2);
13031
13032 emit_move_insn (scratch, newval);
13033 emit_store_conditional (mode, cond, mem, scratch);
13034
13035 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13036 emit_unlikely_jump (x, label1);
13037
13038 emit_insn (gen_isync ());
13039 emit_label (XEXP (label2, 0));
13040}
13041
13042/* Expand an atomic test and set operation. MEM is the memory on which
13043 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13044
13045void
13046rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13047{
13048 enum machine_mode mode = GET_MODE (mem);
13049 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13050
13051 emit_insn (gen_memory_barrier ());
13052
13053 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13054 emit_label (XEXP (label, 0));
13055
13056 emit_load_locked (mode, retval, mem);
13057 emit_move_insn (scratch, val);
13058 emit_store_conditional (mode, cond, mem, scratch);
13059
13060 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13061 emit_unlikely_jump (x, label);
13062
13063 emit_insn (gen_isync ());
13064}
13065
9fc75b97
DE
13066void
13067rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13068{
13069 enum machine_mode mode = GET_MODE (mem);
13070 rtx addrSI, align, wdst, shift, mask;
13071 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13072 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13073
13074 /* Shift amount for subword relative to aligned word. */
13075 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13076 shift = gen_reg_rtx (SImode);
13077 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13078 GEN_INT (shift_mask)));
13079 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13080
13081 /* Shift and mask old value into position within word. */
13082 oldval = convert_modes (SImode, mode, oldval, 1);
13083 oldval = expand_binop (SImode, and_optab,
13084 oldval, GEN_INT (imask), NULL_RTX,
13085 1, OPTAB_LIB_WIDEN);
13086 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13087
13088 /* Shift and mask new value into position within word. */
13089 newval = convert_modes (SImode, mode, newval, 1);
13090 newval = expand_binop (SImode, and_optab,
13091 newval, GEN_INT (imask), NULL_RTX,
13092 1, OPTAB_LIB_WIDEN);
13093 emit_insn (gen_ashlsi3 (newval, newval, shift));
13094
13095 /* Mask for insertion. */
13096 mask = gen_reg_rtx (SImode);
13097 emit_move_insn (mask, GEN_INT (imask));
13098 emit_insn (gen_ashlsi3 (mask, mask, shift));
13099
13100 /* Address of aligned word containing subword. */
13101 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13102 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13103 mem = change_address (mem, SImode, align);
13104 set_mem_align (mem, 32);
13105 MEM_VOLATILE_P (mem) = 1;
13106
13107 wdst = gen_reg_rtx (SImode);
13108 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13109 oldval, newval, mem));
13110
13111 emit_move_insn (dst, gen_lowpart (mode, wdst));
13112}
13113
13114void
13115rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13116 rtx oldval, rtx newval, rtx mem,
13117 rtx scratch)
13118{
13119 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13120
13121 emit_insn (gen_memory_barrier ());
13122 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13123 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13124 emit_label (XEXP (label1, 0));
13125
13126 emit_load_locked (SImode, scratch, mem);
13127
13128 /* Mask subword within loaded value for comparison with oldval.
13129 Use UNSPEC_AND to avoid clobber.*/
13130 emit_insn (gen_rtx_SET (SImode, dest,
13131 gen_rtx_UNSPEC (SImode,
13132 gen_rtvec (2, scratch, mask),
13133 UNSPEC_AND)));
13134
13135 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13136 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13137
13138 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13139 emit_unlikely_jump (x, label2);
13140
13141 /* Clear subword within loaded value for insertion of new value. */
13142 emit_insn (gen_rtx_SET (SImode, scratch,
13143 gen_rtx_AND (SImode,
13144 gen_rtx_NOT (SImode, mask), scratch)));
13145 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13146 emit_store_conditional (SImode, cond, mem, scratch);
13147
13148 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13149 emit_unlikely_jump (x, label1);
13150
13151 emit_insn (gen_isync ());
13152 emit_label (XEXP (label2, 0));
13153}
13154
13155
b52110d4 13156 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13157 multi-register moves. It will emit at most one instruction for
13158 each register that is accessed; that is, it won't emit li/lis pairs
13159 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13160 register. */
46c07df8 13161
46c07df8 13162void
a9baceb1 13163rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13164{
a9baceb1
GK
13165 /* The register number of the first register being moved. */
13166 int reg;
13167 /* The mode that is to be moved. */
13168 enum machine_mode mode;
13169 /* The mode that the move is being done in, and its size. */
13170 enum machine_mode reg_mode;
13171 int reg_mode_size;
13172 /* The number of registers that will be moved. */
13173 int nregs;
13174
13175 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13176 mode = GET_MODE (dst);
c8b622ff 13177 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13178 if (FP_REGNO_P (reg))
7393f7f8 13179 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13180 else if (ALTIVEC_REGNO_P (reg))
13181 reg_mode = V16QImode;
8521c414
JM
13182 else if (TARGET_E500_DOUBLE && mode == TFmode)
13183 reg_mode = DFmode;
a9baceb1
GK
13184 else
13185 reg_mode = word_mode;
13186 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13187
37409796 13188 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13189
a9baceb1
GK
13190 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13191 {
13192 /* Move register range backwards, if we might have destructive
13193 overlap. */
13194 int i;
13195 for (i = nregs - 1; i >= 0; i--)
f676971a 13196 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13197 simplify_gen_subreg (reg_mode, dst, mode,
13198 i * reg_mode_size),
13199 simplify_gen_subreg (reg_mode, src, mode,
13200 i * reg_mode_size)));
13201 }
46c07df8
HP
13202 else
13203 {
a9baceb1
GK
13204 int i;
13205 int j = -1;
13206 bool used_update = false;
46c07df8 13207
c1e55850 13208 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13209 {
13210 rtx breg;
3a1f863f 13211
a9baceb1
GK
13212 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13213 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13214 {
13215 rtx delta_rtx;
a9baceb1 13216 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13217 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13218 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13219 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13220 emit_insn (TARGET_32BIT
13221 ? gen_addsi3 (breg, breg, delta_rtx)
13222 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13223 src = replace_equiv_address (src, breg);
3a1f863f 13224 }
d04b6e6e 13225 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13226 {
13e2e16e 13227 rtx basereg;
c1e55850
GK
13228 basereg = gen_rtx_REG (Pmode, reg);
13229 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13230 src = replace_equiv_address (src, basereg);
c1e55850 13231 }
3a1f863f 13232
0423421f
AM
13233 breg = XEXP (src, 0);
13234 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13235 breg = XEXP (breg, 0);
13236
13237 /* If the base register we are using to address memory is
13238 also a destination reg, then change that register last. */
13239 if (REG_P (breg)
13240 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13241 && REGNO (breg) < REGNO (dst) + nregs)
13242 j = REGNO (breg) - REGNO (dst);
c4ad648e 13243 }
46c07df8 13244
a9baceb1 13245 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13246 {
13247 rtx breg;
13248
a9baceb1
GK
13249 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13250 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13251 {
13252 rtx delta_rtx;
a9baceb1 13253 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13254 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13255 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13256 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13257
13258 /* We have to update the breg before doing the store.
13259 Use store with update, if available. */
13260
13261 if (TARGET_UPDATE)
13262 {
a9baceb1 13263 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13264 emit_insn (TARGET_32BIT
13265 ? (TARGET_POWERPC64
13266 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13267 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13268 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13269 used_update = true;
3a1f863f
DE
13270 }
13271 else
a9baceb1
GK
13272 emit_insn (TARGET_32BIT
13273 ? gen_addsi3 (breg, breg, delta_rtx)
13274 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13275 dst = replace_equiv_address (dst, breg);
3a1f863f 13276 }
37409796 13277 else
d04b6e6e 13278 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13279 }
13280
46c07df8 13281 for (i = 0; i < nregs; i++)
f676971a 13282 {
3a1f863f
DE
13283 /* Calculate index to next subword. */
13284 ++j;
f676971a 13285 if (j == nregs)
3a1f863f 13286 j = 0;
46c07df8 13287
112cdef5 13288 /* If compiler already emitted move of first word by
a9baceb1 13289 store with update, no need to do anything. */
3a1f863f 13290 if (j == 0 && used_update)
a9baceb1 13291 continue;
f676971a 13292
a9baceb1
GK
13293 emit_insn (gen_rtx_SET (VOIDmode,
13294 simplify_gen_subreg (reg_mode, dst, mode,
13295 j * reg_mode_size),
13296 simplify_gen_subreg (reg_mode, src, mode,
13297 j * reg_mode_size)));
3a1f863f 13298 }
46c07df8
HP
13299 }
13300}
13301
12a4e8c5 13302\f
a4f6c312
SS
13303/* This page contains routines that are used to determine what the
13304 function prologue and epilogue code will do and write them out. */
9878760c 13305
a4f6c312
SS
13306/* Return the first fixed-point register that is required to be
13307 saved. 32 if none. */
9878760c
RK
13308
13309int
863d938c 13310first_reg_to_save (void)
9878760c
RK
13311{
13312 int first_reg;
13313
13314 /* Find lowest numbered live register. */
13315 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 13316 if (df_regs_ever_live_p (first_reg)
a38d360d 13317 && (! call_used_regs[first_reg]
1db02437 13318 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13319 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13320 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13321 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13322 break;
13323
ee890fe2 13324#if TARGET_MACHO
93638d7a
AM
13325 if (flag_pic
13326 && current_function_uses_pic_offset_table
13327 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13328 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13329#endif
13330
9878760c
RK
13331 return first_reg;
13332}
13333
13334/* Similar, for FP regs. */
13335
13336int
863d938c 13337first_fp_reg_to_save (void)
9878760c
RK
13338{
13339 int first_reg;
13340
13341 /* Find lowest numbered live register. */
13342 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 13343 if (df_regs_ever_live_p (first_reg))
9878760c
RK
13344 break;
13345
13346 return first_reg;
13347}
00b960c7
AH
13348
13349/* Similar, for AltiVec regs. */
13350
13351static int
863d938c 13352first_altivec_reg_to_save (void)
00b960c7
AH
13353{
13354 int i;
13355
13356 /* Stack frame remains as is unless we are in AltiVec ABI. */
13357 if (! TARGET_ALTIVEC_ABI)
13358 return LAST_ALTIVEC_REGNO + 1;
13359
22fa69da 13360 /* On Darwin, the unwind routines are compiled without
982afe02 13361 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13362 altivec registers when necessary. */
13363 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13364 && ! TARGET_ALTIVEC)
13365 return FIRST_ALTIVEC_REGNO + 20;
13366
00b960c7
AH
13367 /* Find lowest numbered live register. */
13368 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13369 if (df_regs_ever_live_p (i))
00b960c7
AH
13370 break;
13371
13372 return i;
13373}
13374
13375/* Return a 32-bit mask of the AltiVec registers we need to set in
13376 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13377 the 32-bit word is 0. */
13378
13379static unsigned int
863d938c 13380compute_vrsave_mask (void)
00b960c7
AH
13381{
13382 unsigned int i, mask = 0;
13383
22fa69da 13384 /* On Darwin, the unwind routines are compiled without
982afe02 13385 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13386 call-saved altivec registers when necessary. */
13387 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13388 && ! TARGET_ALTIVEC)
13389 mask |= 0xFFF;
13390
00b960c7
AH
13391 /* First, find out if we use _any_ altivec registers. */
13392 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13393 if (df_regs_ever_live_p (i))
00b960c7
AH
13394 mask |= ALTIVEC_REG_BIT (i);
13395
13396 if (mask == 0)
13397 return mask;
13398
00b960c7
AH
13399 /* Next, remove the argument registers from the set. These must
13400 be in the VRSAVE mask set by the caller, so we don't need to add
13401 them in again. More importantly, the mask we compute here is
13402 used to generate CLOBBERs in the set_vrsave insn, and we do not
13403 wish the argument registers to die. */
a6cf80f2 13404 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13405 mask &= ~ALTIVEC_REG_BIT (i);
13406
13407 /* Similarly, remove the return value from the set. */
13408 {
13409 bool yes = false;
13410 diddle_return_value (is_altivec_return_reg, &yes);
13411 if (yes)
13412 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13413 }
13414
13415 return mask;
13416}
13417
d62294f5 13418/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13419 size of prologues/epilogues by calling our own save/restore-the-world
13420 routines. */
d62294f5
FJ
13421
13422static void
f57fe068
AM
13423compute_save_world_info (rs6000_stack_t *info_ptr)
13424{
13425 info_ptr->world_save_p = 1;
13426 info_ptr->world_save_p
13427 = (WORLD_SAVE_P (info_ptr)
13428 && DEFAULT_ABI == ABI_DARWIN
13429 && ! (current_function_calls_setjmp && flag_exceptions)
13430 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13431 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13432 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13433 && info_ptr->cr_save_p);
f676971a 13434
d62294f5
FJ
13435 /* This will not work in conjunction with sibcalls. Make sure there
13436 are none. (This check is expensive, but seldom executed.) */
f57fe068 13437 if (WORLD_SAVE_P (info_ptr))
f676971a 13438 {
d62294f5
FJ
13439 rtx insn;
13440 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
13441 if ( GET_CODE (insn) == CALL_INSN
13442 && SIBLING_CALL_P (insn))
13443 {
13444 info_ptr->world_save_p = 0;
13445 break;
13446 }
d62294f5 13447 }
f676971a 13448
f57fe068 13449 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
13450 {
13451 /* Even if we're not touching VRsave, make sure there's room on the
13452 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 13453 will attempt to save it. */
d62294f5
FJ
13454 info_ptr->vrsave_size = 4;
13455
13456 /* "Save" the VRsave register too if we're saving the world. */
13457 if (info_ptr->vrsave_mask == 0)
c4ad648e 13458 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
13459
13460 /* Because the Darwin register save/restore routines only handle
c4ad648e 13461 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 13462 check. */
37409796
NS
13463 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
13464 && (info_ptr->first_altivec_reg_save
13465 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 13466 }
f676971a 13467 return;
d62294f5
FJ
13468}
13469
13470
00b960c7 13471static void
a2369ed3 13472is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
13473{
13474 bool *yes = (bool *) xyes;
13475 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
13476 *yes = true;
13477}
13478
4697a36c
MM
13479\f
13480/* Calculate the stack information for the current function. This is
13481 complicated by having two separate calling sequences, the AIX calling
13482 sequence and the V.4 calling sequence.
13483
592696dd 13484 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 13485 32-bit 64-bit
4697a36c 13486 SP----> +---------------------------------------+
a260abc9 13487 | back chain to caller | 0 0
4697a36c 13488 +---------------------------------------+
a260abc9 13489 | saved CR | 4 8 (8-11)
4697a36c 13490 +---------------------------------------+
a260abc9 13491 | saved LR | 8 16
4697a36c 13492 +---------------------------------------+
a260abc9 13493 | reserved for compilers | 12 24
4697a36c 13494 +---------------------------------------+
a260abc9 13495 | reserved for binders | 16 32
4697a36c 13496 +---------------------------------------+
a260abc9 13497 | saved TOC pointer | 20 40
4697a36c 13498 +---------------------------------------+
a260abc9 13499 | Parameter save area (P) | 24 48
4697a36c 13500 +---------------------------------------+
a260abc9 13501 | Alloca space (A) | 24+P etc.
802a0058 13502 +---------------------------------------+
a7df97e6 13503 | Local variable space (L) | 24+P+A
4697a36c 13504 +---------------------------------------+
a7df97e6 13505 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 13506 +---------------------------------------+
00b960c7
AH
13507 | Save area for AltiVec registers (W) | 24+P+A+L+X
13508 +---------------------------------------+
13509 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
13510 +---------------------------------------+
13511 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 13512 +---------------------------------------+
00b960c7
AH
13513 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
13514 +---------------------------------------+
13515 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
13516 +---------------------------------------+
13517 old SP->| back chain to caller's caller |
13518 +---------------------------------------+
13519
5376a30c
KR
13520 The required alignment for AIX configurations is two words (i.e., 8
13521 or 16 bytes).
13522
13523
4697a36c
MM
13524 V.4 stack frames look like:
13525
13526 SP----> +---------------------------------------+
13527 | back chain to caller | 0
13528 +---------------------------------------+
5eb387b8 13529 | caller's saved LR | 4
4697a36c
MM
13530 +---------------------------------------+
13531 | Parameter save area (P) | 8
13532 +---------------------------------------+
a7df97e6 13533 | Alloca space (A) | 8+P
f676971a 13534 +---------------------------------------+
a7df97e6 13535 | Varargs save area (V) | 8+P+A
f676971a 13536 +---------------------------------------+
a7df97e6 13537 | Local variable space (L) | 8+P+A+V
f676971a 13538 +---------------------------------------+
a7df97e6 13539 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 13540 +---------------------------------------+
00b960c7
AH
13541 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
13542 +---------------------------------------+
13543 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
13544 +---------------------------------------+
13545 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
13546 +---------------------------------------+
c4ad648e
AM
13547 | SPE: area for 64-bit GP registers |
13548 +---------------------------------------+
13549 | SPE alignment padding |
13550 +---------------------------------------+
00b960c7 13551 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 13552 +---------------------------------------+
00b960c7 13553 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 13554 +---------------------------------------+
00b960c7 13555 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
13556 +---------------------------------------+
13557 old SP->| back chain to caller's caller |
13558 +---------------------------------------+
b6c9286a 13559
5376a30c
KR
13560 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13561 given. (But note below and in sysv4.h that we require only 8 and
13562 may round up the size of our stack frame anyways. The historical
13563 reason is early versions of powerpc-linux which didn't properly
13564 align the stack at program startup. A happy side-effect is that
13565 -mno-eabi libraries can be used with -meabi programs.)
13566
50d440bc 13567 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
13568 the stack alignment requirements may differ. If -mno-eabi is not
13569 given, the required stack alignment is 8 bytes; if -mno-eabi is
13570 given, the required alignment is 16 bytes. (But see V.4 comment
13571 above.) */
4697a36c 13572
61b2fbe7
MM
13573#ifndef ABI_STACK_BOUNDARY
13574#define ABI_STACK_BOUNDARY STACK_BOUNDARY
13575#endif
13576
d1d0c603 13577static rs6000_stack_t *
863d938c 13578rs6000_stack_info (void)
4697a36c 13579{
022123e6 13580 static rs6000_stack_t info;
4697a36c 13581 rs6000_stack_t *info_ptr = &info;
327e5343 13582 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 13583 int ehrd_size;
64045029 13584 int save_align;
44688022 13585 HOST_WIDE_INT non_fixed_size;
4697a36c 13586
022123e6 13587 memset (&info, 0, sizeof (info));
4697a36c 13588
c19de7aa
AH
13589 if (TARGET_SPE)
13590 {
13591 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 13592 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
13593 cfun->machine->insn_chain_scanned_p
13594 = spe_func_has_64bit_regs_p () + 1;
13595 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
13596 }
13597
a4f6c312 13598 /* Select which calling sequence. */
178274da 13599 info_ptr->abi = DEFAULT_ABI;
9878760c 13600
a4f6c312 13601 /* Calculate which registers need to be saved & save area size. */
4697a36c 13602 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 13603 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
906fb125 13604 even if it currently looks like we won't. */
2bfcf297 13605 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
13606 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
13607 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
1db02437
FS
13608 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
13609 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
906fb125
GK
13610 else
13611 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
4697a36c 13612
a3170dc6
AH
13613 /* For the SPE, we have an additional upper 32-bits on each GPR.
13614 Ideally we should save the entire 64-bits only when the upper
13615 half is used in SIMD instructions. Since we only record
13616 registers live (not the size they are used in), this proves
13617 difficult because we'd have to traverse the instruction chain at
13618 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
13619 so we opt to save the GPRs in 64-bits always if but one register
13620 gets used in 64-bits. Otherwise, all the registers in the frame
13621 get saved in 32-bits.
a3170dc6 13622
c19de7aa 13623 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 13624 traditional GP save area will be empty. */
c19de7aa 13625 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13626 info_ptr->gp_size = 0;
13627
4697a36c
MM
13628 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13629 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13630
00b960c7
AH
13631 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13632 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13633 - info_ptr->first_altivec_reg_save);
13634
592696dd 13635 /* Does this function call anything? */
71f123ca
FS
13636 info_ptr->calls_p = (! current_function_is_leaf
13637 || cfun->machine->ra_needs_full_frame);
b6c9286a 13638
a4f6c312 13639 /* Determine if we need to save the link register. */
022123e6
AM
13640 if ((DEFAULT_ABI == ABI_AIX
13641 && current_function_profile
13642 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
13643#ifdef TARGET_RELOCATABLE
13644 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13645#endif
13646 || (info_ptr->first_fp_reg_save != 64
13647 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 13648 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 13649 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
13650 || info_ptr->calls_p
13651 || rs6000_ra_ever_killed ())
4697a36c
MM
13652 {
13653 info_ptr->lr_save_p = 1;
1de43f85 13654 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
13655 }
13656
9ebbca7d 13657 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
13658 if (df_regs_ever_live_p (CR2_REGNO)
13659 || df_regs_ever_live_p (CR3_REGNO)
13660 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
13661 {
13662 info_ptr->cr_save_p = 1;
178274da 13663 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
13664 info_ptr->cr_size = reg_size;
13665 }
13666
83720594
RH
13667 /* If the current function calls __builtin_eh_return, then we need
13668 to allocate stack space for registers that will hold data for
13669 the exception handler. */
13670 if (current_function_calls_eh_return)
13671 {
13672 unsigned int i;
13673 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
13674 continue;
a3170dc6
AH
13675
13676 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
13677 ehrd_size = i * (TARGET_SPE_ABI
13678 && info_ptr->spe_64bit_regs_used != 0
13679 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
13680 }
13681 else
13682 ehrd_size = 0;
13683
592696dd 13684 /* Determine various sizes. */
4697a36c
MM
13685 info_ptr->reg_size = reg_size;
13686 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 13687 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 13688 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 13689 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
13690 if (FRAME_GROWS_DOWNWARD)
13691 info_ptr->vars_size
5b667039
JJ
13692 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
13693 + info_ptr->parm_size,
7d5175e1 13694 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
13695 - (info_ptr->fixed_size + info_ptr->vars_size
13696 + info_ptr->parm_size);
00b960c7 13697
c19de7aa 13698 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13699 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
13700 else
13701 info_ptr->spe_gp_size = 0;
13702
4d774ff8
HP
13703 if (TARGET_ALTIVEC_ABI)
13704 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 13705 else
4d774ff8
HP
13706 info_ptr->vrsave_mask = 0;
13707
13708 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
13709 info_ptr->vrsave_size = 4;
13710 else
13711 info_ptr->vrsave_size = 0;
b6c9286a 13712
d62294f5
FJ
13713 compute_save_world_info (info_ptr);
13714
592696dd 13715 /* Calculate the offsets. */
178274da 13716 switch (DEFAULT_ABI)
4697a36c 13717 {
b6c9286a 13718 case ABI_NONE:
24d304eb 13719 default:
37409796 13720 gcc_unreachable ();
b6c9286a
MM
13721
13722 case ABI_AIX:
ee890fe2 13723 case ABI_DARWIN:
b6c9286a
MM
13724 info_ptr->fp_save_offset = - info_ptr->fp_size;
13725 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
13726
13727 if (TARGET_ALTIVEC_ABI)
13728 {
13729 info_ptr->vrsave_save_offset
13730 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
13731
982afe02 13732 /* Align stack so vector save area is on a quadword boundary.
9278121c 13733 The padding goes above the vectors. */
00b960c7
AH
13734 if (info_ptr->altivec_size != 0)
13735 info_ptr->altivec_padding_size
9278121c 13736 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
13737 else
13738 info_ptr->altivec_padding_size = 0;
13739
13740 info_ptr->altivec_save_offset
13741 = info_ptr->vrsave_save_offset
13742 - info_ptr->altivec_padding_size
13743 - info_ptr->altivec_size;
9278121c
GK
13744 gcc_assert (info_ptr->altivec_size == 0
13745 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
13746
13747 /* Adjust for AltiVec case. */
13748 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
13749 }
13750 else
13751 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
13752 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
13753 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
13754 break;
13755
13756 case ABI_V4:
b6c9286a
MM
13757 info_ptr->fp_save_offset = - info_ptr->fp_size;
13758 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 13759 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 13760
c19de7aa 13761 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
13762 {
13763 /* Align stack so SPE GPR save area is aligned on a
13764 double-word boundary. */
13765 if (info_ptr->spe_gp_size != 0)
13766 info_ptr->spe_padding_size
13767 = 8 - (-info_ptr->cr_save_offset % 8);
13768 else
13769 info_ptr->spe_padding_size = 0;
13770
13771 info_ptr->spe_gp_save_offset
13772 = info_ptr->cr_save_offset
13773 - info_ptr->spe_padding_size
13774 - info_ptr->spe_gp_size;
13775
13776 /* Adjust for SPE case. */
022123e6 13777 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 13778 }
a3170dc6 13779 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
13780 {
13781 info_ptr->vrsave_save_offset
13782 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
13783
13784 /* Align stack so vector save area is on a quadword boundary. */
13785 if (info_ptr->altivec_size != 0)
13786 info_ptr->altivec_padding_size
13787 = 16 - (-info_ptr->vrsave_save_offset % 16);
13788 else
13789 info_ptr->altivec_padding_size = 0;
13790
13791 info_ptr->altivec_save_offset
13792 = info_ptr->vrsave_save_offset
13793 - info_ptr->altivec_padding_size
13794 - info_ptr->altivec_size;
13795
13796 /* Adjust for AltiVec case. */
022123e6 13797 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
13798 }
13799 else
022123e6
AM
13800 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
13801 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
13802 info_ptr->lr_save_offset = reg_size;
13803 break;
4697a36c
MM
13804 }
13805
64045029 13806 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
13807 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
13808 + info_ptr->gp_size
13809 + info_ptr->altivec_size
13810 + info_ptr->altivec_padding_size
a3170dc6
AH
13811 + info_ptr->spe_gp_size
13812 + info_ptr->spe_padding_size
00b960c7
AH
13813 + ehrd_size
13814 + info_ptr->cr_size
022123e6 13815 + info_ptr->vrsave_size,
64045029 13816 save_align);
00b960c7 13817
44688022 13818 non_fixed_size = (info_ptr->vars_size
ff381587 13819 + info_ptr->parm_size
5b667039 13820 + info_ptr->save_size);
ff381587 13821
44688022
AM
13822 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
13823 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
13824
13825 /* Determine if we need to allocate any stack frame:
13826
a4f6c312
SS
13827 For AIX we need to push the stack if a frame pointer is needed
13828 (because the stack might be dynamically adjusted), if we are
13829 debugging, if we make calls, or if the sum of fp_save, gp_save,
13830 and local variables are more than the space needed to save all
13831 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13832 + 18*8 = 288 (GPR13 reserved).
ff381587 13833
a4f6c312
SS
13834 For V.4 we don't have the stack cushion that AIX uses, but assume
13835 that the debugger can handle stackless frames. */
ff381587
MM
13836
13837 if (info_ptr->calls_p)
13838 info_ptr->push_p = 1;
13839
178274da 13840 else if (DEFAULT_ABI == ABI_V4)
44688022 13841 info_ptr->push_p = non_fixed_size != 0;
ff381587 13842
178274da
AM
13843 else if (frame_pointer_needed)
13844 info_ptr->push_p = 1;
13845
13846 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
13847 info_ptr->push_p = 1;
13848
ff381587 13849 else
44688022 13850 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 13851
a4f6c312 13852 /* Zero offsets if we're not saving those registers. */
8dda1a21 13853 if (info_ptr->fp_size == 0)
4697a36c
MM
13854 info_ptr->fp_save_offset = 0;
13855
8dda1a21 13856 if (info_ptr->gp_size == 0)
4697a36c
MM
13857 info_ptr->gp_save_offset = 0;
13858
00b960c7
AH
13859 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
13860 info_ptr->altivec_save_offset = 0;
13861
13862 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
13863 info_ptr->vrsave_save_offset = 0;
13864
c19de7aa
AH
13865 if (! TARGET_SPE_ABI
13866 || info_ptr->spe_64bit_regs_used == 0
13867 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
13868 info_ptr->spe_gp_save_offset = 0;
13869
c81fc13e 13870 if (! info_ptr->lr_save_p)
4697a36c
MM
13871 info_ptr->lr_save_offset = 0;
13872
c81fc13e 13873 if (! info_ptr->cr_save_p)
4697a36c
MM
13874 info_ptr->cr_save_offset = 0;
13875
13876 return info_ptr;
13877}
13878
c19de7aa
AH
13879/* Return true if the current function uses any GPRs in 64-bit SIMD
13880 mode. */
13881
13882static bool
863d938c 13883spe_func_has_64bit_regs_p (void)
c19de7aa
AH
13884{
13885 rtx insns, insn;
13886
13887 /* Functions that save and restore all the call-saved registers will
13888 need to save/restore the registers in 64-bits. */
13889 if (current_function_calls_eh_return
13890 || current_function_calls_setjmp
13891 || current_function_has_nonlocal_goto)
13892 return true;
13893
13894 insns = get_insns ();
13895
13896 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13897 {
13898 if (INSN_P (insn))
13899 {
13900 rtx i;
13901
b5a5beb9
AH
13902 /* FIXME: This should be implemented with attributes...
13903
13904 (set_attr "spe64" "true")....then,
13905 if (get_spe64(insn)) return true;
13906
13907 It's the only reliable way to do the stuff below. */
13908
c19de7aa 13909 i = PATTERN (insn);
f82f556d
AH
13910 if (GET_CODE (i) == SET)
13911 {
13912 enum machine_mode mode = GET_MODE (SET_SRC (i));
13913
13914 if (SPE_VECTOR_MODE (mode))
13915 return true;
17caeff2 13916 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
13917 return true;
13918 }
c19de7aa
AH
13919 }
13920 }
13921
13922 return false;
13923}
13924
d1d0c603 13925static void
a2369ed3 13926debug_stack_info (rs6000_stack_t *info)
9878760c 13927{
d330fd93 13928 const char *abi_string;
24d304eb 13929
c81fc13e 13930 if (! info)
4697a36c
MM
13931 info = rs6000_stack_info ();
13932
13933 fprintf (stderr, "\nStack information for function %s:\n",
13934 ((current_function_decl && DECL_NAME (current_function_decl))
13935 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13936 : "<unknown>"));
13937
24d304eb
RK
13938 switch (info->abi)
13939 {
b6c9286a
MM
13940 default: abi_string = "Unknown"; break;
13941 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 13942 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 13943 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 13944 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
13945 }
13946
13947 fprintf (stderr, "\tABI = %5s\n", abi_string);
13948
00b960c7
AH
13949 if (TARGET_ALTIVEC_ABI)
13950 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13951
a3170dc6
AH
13952 if (TARGET_SPE_ABI)
13953 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13954
4697a36c
MM
13955 if (info->first_gp_reg_save != 32)
13956 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13957
13958 if (info->first_fp_reg_save != 64)
13959 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 13960
00b960c7
AH
13961 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13962 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13963 info->first_altivec_reg_save);
13964
4697a36c
MM
13965 if (info->lr_save_p)
13966 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 13967
4697a36c
MM
13968 if (info->cr_save_p)
13969 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13970
00b960c7
AH
13971 if (info->vrsave_mask)
13972 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13973
4697a36c
MM
13974 if (info->push_p)
13975 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13976
13977 if (info->calls_p)
13978 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13979
4697a36c
MM
13980 if (info->gp_save_offset)
13981 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13982
13983 if (info->fp_save_offset)
13984 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13985
00b960c7
AH
13986 if (info->altivec_save_offset)
13987 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13988 info->altivec_save_offset);
13989
a3170dc6
AH
13990 if (info->spe_gp_save_offset)
13991 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13992 info->spe_gp_save_offset);
13993
00b960c7
AH
13994 if (info->vrsave_save_offset)
13995 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13996 info->vrsave_save_offset);
13997
4697a36c
MM
13998 if (info->lr_save_offset)
13999 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14000
14001 if (info->cr_save_offset)
14002 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14003
14004 if (info->varargs_save_offset)
14005 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14006
14007 if (info->total_size)
d1d0c603
JJ
14008 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14009 info->total_size);
4697a36c 14010
4697a36c 14011 if (info->vars_size)
d1d0c603
JJ
14012 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14013 info->vars_size);
4697a36c
MM
14014
14015 if (info->parm_size)
14016 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14017
14018 if (info->fixed_size)
14019 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14020
14021 if (info->gp_size)
14022 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14023
a3170dc6
AH
14024 if (info->spe_gp_size)
14025 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14026
4697a36c
MM
14027 if (info->fp_size)
14028 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14029
00b960c7
AH
14030 if (info->altivec_size)
14031 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14032
14033 if (info->vrsave_size)
14034 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14035
14036 if (info->altivec_padding_size)
14037 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14038 info->altivec_padding_size);
14039
a3170dc6
AH
14040 if (info->spe_padding_size)
14041 fprintf (stderr, "\tspe_padding_size = %5d\n",
14042 info->spe_padding_size);
14043
4697a36c
MM
14044 if (info->cr_size)
14045 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14046
14047 if (info->save_size)
14048 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14049
14050 if (info->reg_size != 4)
14051 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14052
14053 fprintf (stderr, "\n");
9878760c 14054}
71f123ca
FS
14055
14056rtx
a2369ed3 14057rs6000_return_addr (int count, rtx frame)
71f123ca 14058{
a4f6c312
SS
14059 /* Currently we don't optimize very well between prolog and body
14060 code and for PIC code the code can be actually quite bad, so
14061 don't try to be too clever here. */
f1384257 14062 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14063 {
14064 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14065
14066 return
14067 gen_rtx_MEM
14068 (Pmode,
14069 memory_address
14070 (Pmode,
14071 plus_constant (copy_to_reg
14072 (gen_rtx_MEM (Pmode,
14073 memory_address (Pmode, frame))),
14074 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14075 }
14076
8c29550d 14077 cfun->machine->ra_need_lr = 1;
1de43f85 14078 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14079}
14080
5e1bf043
DJ
14081/* Say whether a function is a candidate for sibcall handling or not.
14082 We do not allow indirect calls to be optimized into sibling calls.
14083 Also, we can't do it if there are any vector parameters; there's
14084 nowhere to put the VRsave code so it works; note that functions with
14085 vector parameters are required to have a prototype, so the argument
14086 type info must be available here. (The tail recursion case can work
14087 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14088static bool
a2369ed3 14089rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14090{
14091 tree type;
4977bab6 14092 if (decl)
5e1bf043
DJ
14093 {
14094 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14095 {
4977bab6 14096 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14097 type; type = TREE_CHAIN (type))
14098 {
c15b529f 14099 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14100 return false;
5e1bf043 14101 }
c4ad648e 14102 }
5e1bf043 14103 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14104 || ((*targetm.binds_local_p) (decl)
14105 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14106 {
4977bab6 14107 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14108
14109 if (!lookup_attribute ("longcall", attr_list)
14110 || lookup_attribute ("shortcall", attr_list))
4977bab6 14111 return true;
2bcc50d0 14112 }
5e1bf043 14113 }
4977bab6 14114 return false;
5e1bf043
DJ
14115}
14116
e7e64a25
AS
14117/* NULL if INSN insn is valid within a low-overhead loop.
14118 Otherwise return why doloop cannot be applied.
9419649c
DE
14119 PowerPC uses the COUNT register for branch on table instructions. */
14120
e7e64a25
AS
14121static const char *
14122rs6000_invalid_within_doloop (rtx insn)
9419649c
DE
14123{
14124 if (CALL_P (insn))
e7e64a25 14125 return "Function call in the loop.";
9419649c
DE
14126
14127 if (JUMP_P (insn)
14128 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14129 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14130 return "Computed branch in the loop.";
9419649c 14131
e7e64a25 14132 return NULL;
9419649c
DE
14133}
14134
71f123ca 14135static int
863d938c 14136rs6000_ra_ever_killed (void)
71f123ca
FS
14137{
14138 rtx top;
5e1bf043
DJ
14139 rtx reg;
14140 rtx insn;
71f123ca 14141
dd292d0a 14142 if (current_function_is_thunk)
71f123ca 14143 return 0;
eb0424da 14144
36f7e964
AH
14145 /* regs_ever_live has LR marked as used if any sibcalls are present,
14146 but this should not force saving and restoring in the
14147 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14148 clobbers LR, so that is inappropriate. */
36f7e964 14149
5e1bf043
DJ
14150 /* Also, the prologue can generate a store into LR that
14151 doesn't really count, like this:
36f7e964 14152
5e1bf043
DJ
14153 move LR->R0
14154 bcl to set PIC register
14155 move LR->R31
14156 move R0->LR
36f7e964
AH
14157
14158 When we're called from the epilogue, we need to avoid counting
14159 this as a store. */
f676971a 14160
71f123ca
FS
14161 push_topmost_sequence ();
14162 top = get_insns ();
14163 pop_topmost_sequence ();
1de43f85 14164 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14165
5e1bf043
DJ
14166 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14167 {
14168 if (INSN_P (insn))
14169 {
022123e6
AM
14170 if (CALL_P (insn))
14171 {
14172 if (!SIBLING_CALL_P (insn))
14173 return 1;
14174 }
1de43f85 14175 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14176 return 1;
36f7e964
AH
14177 else if (set_of (reg, insn) != NULL_RTX
14178 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14179 return 1;
14180 }
14181 }
14182 return 0;
71f123ca 14183}
4697a36c 14184\f
9ebbca7d 14185/* Emit instructions needed to load the TOC register.
c7ca610e 14186 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14187 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14188
14189void
a2369ed3 14190rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14191{
6fb5fa3c 14192 rtx dest;
1db02437 14193 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14194
7f970b70 14195 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14196 {
7f970b70 14197 char buf[30];
e65a3857 14198 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14199
14200 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14201 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14202 if (flag_pic == 2)
14203 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14204 else
14205 got = rs6000_got_sym ();
14206 tmp1 = tmp2 = dest;
14207 if (!fromprolog)
14208 {
14209 tmp1 = gen_reg_rtx (Pmode);
14210 tmp2 = gen_reg_rtx (Pmode);
14211 }
6fb5fa3c
DB
14212 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14213 emit_move_insn (tmp1,
1de43f85 14214 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14215 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14216 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14217 }
14218 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14219 {
6fb5fa3c 14220 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14221 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14222 }
14223 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14224 {
14225 char buf[30];
20b71b17
AM
14226 rtx temp0 = (fromprolog
14227 ? gen_rtx_REG (Pmode, 0)
14228 : gen_reg_rtx (Pmode));
20b71b17 14229
20b71b17
AM
14230 if (fromprolog)
14231 {
ccbca5e4 14232 rtx symF, symL;
38c1f2d7 14233
20b71b17
AM
14234 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14235 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14236
20b71b17
AM
14237 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14238 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14239
6fb5fa3c
DB
14240 emit_insn (gen_load_toc_v4_PIC_1 (symF));
14241 emit_move_insn (dest,
1de43f85 14242 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 14243 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
14244 }
14245 else
20b71b17
AM
14246 {
14247 rtx tocsym;
20b71b17
AM
14248
14249 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14250 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14251 emit_move_insn (dest,
1de43f85 14252 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 14253 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14254 }
6fb5fa3c 14255 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 14256 }
20b71b17
AM
14257 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14258 {
14259 /* This is for AIX code running in non-PIC ELF32. */
14260 char buf[30];
14261 rtx realsym;
14262 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14263 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14264
6fb5fa3c
DB
14265 emit_insn (gen_elf_high (dest, realsym));
14266 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 14267 }
37409796 14268 else
9ebbca7d 14269 {
37409796 14270 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14271
9ebbca7d 14272 if (TARGET_32BIT)
6fb5fa3c 14273 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14274 else
6fb5fa3c 14275 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
14276 }
14277}
14278
d1d0c603
JJ
14279/* Emit instructions to restore the link register after determining where
14280 its value has been stored. */
14281
14282void
14283rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14284{
14285 rs6000_stack_t *info = rs6000_stack_info ();
14286 rtx operands[2];
14287
14288 operands[0] = source;
14289 operands[1] = scratch;
14290
14291 if (info->lr_save_p)
14292 {
14293 rtx frame_rtx = stack_pointer_rtx;
14294 HOST_WIDE_INT sp_offset = 0;
14295 rtx tmp;
14296
14297 if (frame_pointer_needed
14298 || current_function_calls_alloca
14299 || info->total_size > 32767)
14300 {
0be76840 14301 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14302 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14303 frame_rtx = operands[1];
14304 }
14305 else if (info->push_p)
14306 sp_offset = info->total_size;
14307
14308 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14309 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14310 emit_move_insn (tmp, operands[0]);
14311 }
14312 else
1de43f85 14313 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
14314}
14315
4862826d 14316static GTY(()) alias_set_type set = -1;
f103e34d 14317
4862826d 14318alias_set_type
863d938c 14319get_TOC_alias_set (void)
9ebbca7d 14320{
f103e34d
GK
14321 if (set == -1)
14322 set = new_alias_set ();
14323 return set;
f676971a 14324}
9ebbca7d 14325
c1207243 14326/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14327 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14328 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14329#if TARGET_ELF
3c9eb5f4 14330static int
f676971a 14331uses_TOC (void)
9ebbca7d 14332{
c4501e62 14333 rtx insn;
38c1f2d7 14334
c4501e62
JJ
14335 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14336 if (INSN_P (insn))
14337 {
14338 rtx pat = PATTERN (insn);
14339 int i;
9ebbca7d 14340
f676971a 14341 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14342 for (i = 0; i < XVECLEN (pat, 0); i++)
14343 {
14344 rtx sub = XVECEXP (pat, 0, i);
14345 if (GET_CODE (sub) == USE)
14346 {
14347 sub = XEXP (sub, 0);
14348 if (GET_CODE (sub) == UNSPEC
14349 && XINT (sub, 1) == UNSPEC_TOC)
14350 return 1;
14351 }
14352 }
14353 }
14354 return 0;
9ebbca7d 14355}
c954844a 14356#endif
38c1f2d7 14357
9ebbca7d 14358rtx
f676971a 14359create_TOC_reference (rtx symbol)
9ebbca7d 14360{
b3a13419 14361 if (!can_create_pseudo_p ())
6fb5fa3c 14362 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 14363 return gen_rtx_PLUS (Pmode,
a8a05998 14364 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14365 gen_rtx_CONST (Pmode,
14366 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14367 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14368}
38c1f2d7 14369
fc4767bb
JJ
14370/* If _Unwind_* has been called from within the same module,
14371 toc register is not guaranteed to be saved to 40(1) on function
14372 entry. Save it there in that case. */
c7ca610e 14373
9ebbca7d 14374void
863d938c 14375rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14376{
14377 rtx mem;
14378 rtx stack_top = gen_reg_rtx (Pmode);
14379 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14380 rtx opcode = gen_reg_rtx (SImode);
14381 rtx tocompare = gen_reg_rtx (SImode);
14382 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14383
8308679f 14384 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14385 emit_move_insn (stack_top, mem);
14386
8308679f
DE
14387 mem = gen_frame_mem (Pmode,
14388 gen_rtx_PLUS (Pmode, stack_top,
14389 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14390 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14391 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14392 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14393 : 0xE8410028, SImode));
9ebbca7d 14394
fc4767bb 14395 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14396 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14397 no_toc_save_needed);
9ebbca7d 14398
8308679f
DE
14399 mem = gen_frame_mem (Pmode,
14400 gen_rtx_PLUS (Pmode, stack_top,
14401 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14402 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14403 emit_label (no_toc_save_needed);
9ebbca7d 14404}
38c1f2d7 14405\f
0be76840
DE
14406/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14407 and the change to the stack pointer. */
ba4828e0 14408
9ebbca7d 14409static void
863d938c 14410rs6000_emit_stack_tie (void)
9ebbca7d 14411{
0be76840
DE
14412 rtx mem = gen_frame_mem (BLKmode,
14413 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14414
9ebbca7d
GK
14415 emit_insn (gen_stack_tie (mem));
14416}
38c1f2d7 14417
9ebbca7d
GK
14418/* Emit the correct code for allocating stack space, as insns.
14419 If COPY_R12, make sure a copy of the old frame is left in r12.
14420 The generated code may use hard register 0 as a temporary. */
14421
14422static void
a2369ed3 14423rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14424{
9ebbca7d
GK
14425 rtx insn;
14426 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14427 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14428 rtx todec = gen_int_mode (-size, Pmode);
14429
14430 if (INTVAL (todec) != -size)
14431 {
d4ee4d25 14432 warning (0, "stack frame too large");
61168ff1
RS
14433 emit_insn (gen_trap ());
14434 return;
14435 }
a157febd
GK
14436
14437 if (current_function_limit_stack)
14438 {
14439 if (REG_P (stack_limit_rtx)
f676971a 14440 && REGNO (stack_limit_rtx) > 1
a157febd
GK
14441 && REGNO (stack_limit_rtx) <= 31)
14442 {
5b71a4e7 14443 emit_insn (TARGET_32BIT
9ebbca7d
GK
14444 ? gen_addsi3 (tmp_reg,
14445 stack_limit_rtx,
14446 GEN_INT (size))
14447 : gen_adddi3 (tmp_reg,
14448 stack_limit_rtx,
14449 GEN_INT (size)));
5b71a4e7 14450
9ebbca7d
GK
14451 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14452 const0_rtx));
a157febd
GK
14453 }
14454 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 14455 && TARGET_32BIT
f607bc57 14456 && DEFAULT_ABI == ABI_V4)
a157febd 14457 {
9ebbca7d 14458 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
14459 gen_rtx_PLUS (Pmode,
14460 stack_limit_rtx,
9ebbca7d 14461 GEN_INT (size)));
5b71a4e7 14462
9ebbca7d
GK
14463 emit_insn (gen_elf_high (tmp_reg, toload));
14464 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
14465 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14466 const0_rtx));
a157febd
GK
14467 }
14468 else
d4ee4d25 14469 warning (0, "stack limit expression is not supported");
a157febd
GK
14470 }
14471
9ebbca7d
GK
14472 if (copy_r12 || ! TARGET_UPDATE)
14473 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
14474
38c1f2d7
MM
14475 if (TARGET_UPDATE)
14476 {
9ebbca7d 14477 if (size > 32767)
38c1f2d7 14478 {
9ebbca7d 14479 /* Need a note here so that try_split doesn't get confused. */
9390387d 14480 if (get_last_insn () == NULL_RTX)
2e040219 14481 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
14482 insn = emit_move_insn (tmp_reg, todec);
14483 try_split (PATTERN (insn), insn, 0);
14484 todec = tmp_reg;
38c1f2d7 14485 }
5b71a4e7
DE
14486
14487 insn = emit_insn (TARGET_32BIT
14488 ? gen_movsi_update (stack_reg, stack_reg,
14489 todec, stack_reg)
c4ad648e 14490 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 14491 todec, stack_reg));
38c1f2d7
MM
14492 }
14493 else
14494 {
5b71a4e7
DE
14495 insn = emit_insn (TARGET_32BIT
14496 ? gen_addsi3 (stack_reg, stack_reg, todec)
14497 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
14498 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
14499 gen_rtx_REG (Pmode, 12));
14500 }
f676971a 14501
9ebbca7d 14502 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 14503 REG_NOTES (insn) =
9ebbca7d 14504 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 14505 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
14506 gen_rtx_PLUS (Pmode, stack_reg,
14507 GEN_INT (-size))),
14508 REG_NOTES (insn));
14509}
14510
a4f6c312
SS
14511/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14512 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14513 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
14514 deduce these equivalences by itself so it wasn't necessary to hold
14515 its hand so much. */
9ebbca7d
GK
14516
14517static void
f676971a 14518rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 14519 rtx reg2, rtx rreg)
9ebbca7d
GK
14520{
14521 rtx real, temp;
14522
e56c4463
JL
14523 /* copy_rtx will not make unique copies of registers, so we need to
14524 ensure we don't have unwanted sharing here. */
14525 if (reg == reg2)
14526 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14527
14528 if (reg == rreg)
14529 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14530
9ebbca7d
GK
14531 real = copy_rtx (PATTERN (insn));
14532
89e7058f
AH
14533 if (reg2 != NULL_RTX)
14534 real = replace_rtx (real, reg2, rreg);
f676971a
EC
14535
14536 real = replace_rtx (real, reg,
9ebbca7d
GK
14537 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14538 STACK_POINTER_REGNUM),
14539 GEN_INT (val)));
f676971a 14540
9ebbca7d
GK
14541 /* We expect that 'real' is either a SET or a PARALLEL containing
14542 SETs (and possibly other stuff). In a PARALLEL, all the SETs
14543 are important so they all have to be marked RTX_FRAME_RELATED_P. */
14544
14545 if (GET_CODE (real) == SET)
14546 {
14547 rtx set = real;
f676971a 14548
9ebbca7d
GK
14549 temp = simplify_rtx (SET_SRC (set));
14550 if (temp)
14551 SET_SRC (set) = temp;
14552 temp = simplify_rtx (SET_DEST (set));
14553 if (temp)
14554 SET_DEST (set) = temp;
14555 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 14556 {
9ebbca7d
GK
14557 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14558 if (temp)
14559 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 14560 }
38c1f2d7 14561 }
37409796 14562 else
9ebbca7d
GK
14563 {
14564 int i;
37409796
NS
14565
14566 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
14567 for (i = 0; i < XVECLEN (real, 0); i++)
14568 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
14569 {
14570 rtx set = XVECEXP (real, 0, i);
f676971a 14571
9ebbca7d
GK
14572 temp = simplify_rtx (SET_SRC (set));
14573 if (temp)
14574 SET_SRC (set) = temp;
14575 temp = simplify_rtx (SET_DEST (set));
14576 if (temp)
14577 SET_DEST (set) = temp;
14578 if (GET_CODE (SET_DEST (set)) == MEM)
14579 {
14580 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14581 if (temp)
14582 XEXP (SET_DEST (set), 0) = temp;
14583 }
14584 RTX_FRAME_RELATED_P (set) = 1;
14585 }
14586 }
c19de7aa
AH
14587
14588 if (TARGET_SPE)
14589 real = spe_synthesize_frame_save (real);
14590
9ebbca7d
GK
14591 RTX_FRAME_RELATED_P (insn) = 1;
14592 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14593 real,
14594 REG_NOTES (insn));
38c1f2d7
MM
14595}
14596
c19de7aa
AH
14597/* Given an SPE frame note, return a PARALLEL of SETs with the
14598 original note, plus a synthetic register save. */
14599
14600static rtx
a2369ed3 14601spe_synthesize_frame_save (rtx real)
c19de7aa
AH
14602{
14603 rtx synth, offset, reg, real2;
14604
14605 if (GET_CODE (real) != SET
14606 || GET_MODE (SET_SRC (real)) != V2SImode)
14607 return real;
14608
14609 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14610 frame related note. The parallel contains a set of the register
41f3a930 14611 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
14612 This is so we can differentiate between 64-bit and 32-bit saves.
14613 Words cannot describe this nastiness. */
14614
37409796
NS
14615 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14616 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14617 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
14618
14619 /* Transform:
14620 (set (mem (plus (reg x) (const y)))
14621 (reg z))
14622 into:
14623 (set (mem (plus (reg x) (const y+4)))
41f3a930 14624 (reg z+1200))
c19de7aa
AH
14625 */
14626
14627 real2 = copy_rtx (real);
14628 PUT_MODE (SET_DEST (real2), SImode);
14629 reg = SET_SRC (real2);
14630 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14631 synth = copy_rtx (real2);
14632
14633 if (BYTES_BIG_ENDIAN)
14634 {
14635 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14636 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14637 }
14638
14639 reg = SET_SRC (synth);
41f3a930 14640
c19de7aa 14641 synth = replace_rtx (synth, reg,
41f3a930 14642 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
14643
14644 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14645 synth = replace_rtx (synth, offset,
14646 GEN_INT (INTVAL (offset)
14647 + (BYTES_BIG_ENDIAN ? 0 : 4)));
14648
14649 RTX_FRAME_RELATED_P (synth) = 1;
14650 RTX_FRAME_RELATED_P (real2) = 1;
14651 if (BYTES_BIG_ENDIAN)
14652 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14653 else
14654 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14655
14656 return real;
14657}
14658
00b960c7
AH
14659/* Returns an insn that has a vrsave set operation with the
14660 appropriate CLOBBERs. */
14661
14662static rtx
a2369ed3 14663generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
14664{
14665 int nclobs, i;
14666 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 14667 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 14668
a004eb82
AH
14669 clobs[0]
14670 = gen_rtx_SET (VOIDmode,
14671 vrsave,
14672 gen_rtx_UNSPEC_VOLATILE (SImode,
14673 gen_rtvec (2, reg, vrsave),
3aca4bff 14674 UNSPECV_SET_VRSAVE));
00b960c7
AH
14675
14676 nclobs = 1;
14677
9aa86737
AH
14678 /* We need to clobber the registers in the mask so the scheduler
14679 does not move sets to VRSAVE before sets of AltiVec registers.
14680
14681 However, if the function receives nonlocal gotos, reload will set
14682 all call saved registers live. We will end up with:
14683
14684 (set (reg 999) (mem))
14685 (parallel [ (set (reg vrsave) (unspec blah))
14686 (clobber (reg 999))])
14687
14688 The clobber will cause the store into reg 999 to be dead, and
14689 flow will attempt to delete an epilogue insn. In this case, we
14690 need an unspec use/set of the register. */
00b960c7
AH
14691
14692 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 14693 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
14694 {
14695 if (!epiloguep || call_used_regs [i])
14696 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
14697 gen_rtx_REG (V4SImode, i));
14698 else
14699 {
14700 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
14701
14702 clobs[nclobs++]
a004eb82
AH
14703 = gen_rtx_SET (VOIDmode,
14704 reg,
14705 gen_rtx_UNSPEC (V4SImode,
14706 gen_rtvec (1, reg), 27));
9aa86737
AH
14707 }
14708 }
00b960c7
AH
14709
14710 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
14711
14712 for (i = 0; i < nclobs; ++i)
14713 XVECEXP (insn, 0, i) = clobs[i];
14714
14715 return insn;
14716}
14717
89e7058f
AH
14718/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14719 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
14720
14721static void
f676971a 14722emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 14723 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
14724{
14725 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
14726 rtx replacea, replaceb;
14727
14728 int_rtx = GEN_INT (offset);
14729
14730 /* Some cases that need register indexed addressing. */
14731 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 14732 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
14733 || (TARGET_SPE_ABI
14734 && SPE_VECTOR_MODE (mode)
14735 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
14736 {
14737 /* Whomever calls us must make sure r11 is available in the
c4ad648e 14738 flow path of instructions in the prologue. */
89e7058f
AH
14739 offset_rtx = gen_rtx_REG (Pmode, 11);
14740 emit_move_insn (offset_rtx, int_rtx);
14741
14742 replacea = offset_rtx;
14743 replaceb = int_rtx;
14744 }
14745 else
14746 {
14747 offset_rtx = int_rtx;
14748 replacea = NULL_RTX;
14749 replaceb = NULL_RTX;
14750 }
14751
14752 reg = gen_rtx_REG (mode, regno);
14753 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 14754 mem = gen_frame_mem (mode, addr);
89e7058f
AH
14755
14756 insn = emit_move_insn (mem, reg);
14757
14758 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
14759}
14760
a3170dc6
AH
14761/* Emit an offset memory reference suitable for a frame store, while
14762 converting to a valid addressing mode. */
14763
14764static rtx
a2369ed3 14765gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
14766{
14767 rtx int_rtx, offset_rtx;
14768
14769 int_rtx = GEN_INT (offset);
14770
4d4cbc0e
AH
14771 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
14772 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
14773 {
14774 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14775 emit_move_insn (offset_rtx, int_rtx);
14776 }
14777 else
14778 offset_rtx = int_rtx;
14779
0be76840 14780 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
14781}
14782
6d0a8091
DJ
14783/* Look for user-defined global regs. We should not save and restore these,
14784 and cannot use stmw/lmw if there are any in its range. */
14785
14786static bool
14787no_global_regs_above (int first_greg)
14788{
14789 int i;
14790 for (i = 0; i < 32 - first_greg; i++)
14791 if (global_regs[first_greg + i])
14792 return false;
14793 return true;
14794}
14795
699c914a
MS
14796#ifndef TARGET_FIX_AND_CONTINUE
14797#define TARGET_FIX_AND_CONTINUE 0
14798#endif
14799
52ff33d0
NF
14800/* Determine whether the gp REG is really used. */
14801
14802static bool
14803rs6000_reg_live_or_pic_offset_p (int reg)
14804{
6fb5fa3c 14805 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
14806 && (!call_used_regs[reg]
14807 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14808 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14809 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14810 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14811 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
14812}
14813
9ebbca7d
GK
14814/* Emit function prologue as insns. */
14815
9878760c 14816void
863d938c 14817rs6000_emit_prologue (void)
9878760c 14818{
4697a36c 14819 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 14820 enum machine_mode reg_mode = Pmode;
327e5343 14821 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
14822 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14823 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
14824 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 14825 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
14826 rtx insn;
14827 int saving_FPRs_inline;
14828 int using_store_multiple;
14829 HOST_WIDE_INT sp_offset = 0;
f676971a 14830
699c914a
MS
14831 if (TARGET_FIX_AND_CONTINUE)
14832 {
14833 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 14834 address by modifying the first 5 instructions of the function
699c914a
MS
14835 to branch to the overriding function. This is necessary to
14836 permit function pointers that point to the old function to
14837 actually forward to the new function. */
14838 emit_insn (gen_nop ());
14839 emit_insn (gen_nop ());
de2ab0ca 14840 emit_insn (gen_nop ());
699c914a
MS
14841 emit_insn (gen_nop ());
14842 emit_insn (gen_nop ());
14843 }
14844
14845 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14846 {
14847 reg_mode = V2SImode;
14848 reg_size = 8;
14849 }
a3170dc6 14850
9ebbca7d 14851 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
14852 && (!TARGET_SPE_ABI
14853 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
14854 && info->first_gp_reg_save < 31
14855 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 14856 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 14857 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 14858 || current_function_calls_eh_return
8c29550d 14859 || cfun->machine->ra_need_lr);
9ebbca7d
GK
14860
14861 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
14862 if (! WORLD_SAVE_P (info)
14863 && info->push_p
acd0b319
AM
14864 && (DEFAULT_ABI == ABI_V4
14865 || current_function_calls_eh_return))
9ebbca7d
GK
14866 {
14867 if (info->total_size < 32767)
14868 sp_offset = info->total_size;
14869 else
14870 frame_reg_rtx = frame_ptr_rtx;
f676971a 14871 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
14872 (frame_reg_rtx != sp_reg_rtx
14873 && (info->cr_save_p
14874 || info->lr_save_p
14875 || info->first_fp_reg_save < 64
14876 || info->first_gp_reg_save < 32
14877 )));
14878 if (frame_reg_rtx != sp_reg_rtx)
14879 rs6000_emit_stack_tie ();
14880 }
14881
d62294f5 14882 /* Handle world saves specially here. */
f57fe068 14883 if (WORLD_SAVE_P (info))
d62294f5
FJ
14884 {
14885 int i, j, sz;
14886 rtx treg;
14887 rtvec p;
22fa69da 14888 rtx reg0;
d62294f5
FJ
14889
14890 /* save_world expects lr in r0. */
22fa69da 14891 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 14892 if (info->lr_save_p)
c4ad648e 14893 {
22fa69da 14894 insn = emit_move_insn (reg0,
1de43f85 14895 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
14896 RTX_FRAME_RELATED_P (insn) = 1;
14897 }
d62294f5
FJ
14898
14899 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 14900 assumptions about the offsets of various bits of the stack
992d08b1 14901 frame. */
37409796
NS
14902 gcc_assert (info->gp_save_offset == -220
14903 && info->fp_save_offset == -144
14904 && info->lr_save_offset == 8
14905 && info->cr_save_offset == 4
14906 && info->push_p
14907 && info->lr_save_p
14908 && (!current_function_calls_eh_return
14909 || info->ehrd_offset == -432)
14910 && info->vrsave_save_offset == -224
22fa69da 14911 && info->altivec_save_offset == -416);
d62294f5
FJ
14912
14913 treg = gen_rtx_REG (SImode, 11);
14914 emit_move_insn (treg, GEN_INT (-info->total_size));
14915
14916 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 14917 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
14918
14919 /* Preserve CR2 for save_world prologues */
22fa69da 14920 sz = 5;
d62294f5
FJ
14921 sz += 32 - info->first_gp_reg_save;
14922 sz += 64 - info->first_fp_reg_save;
14923 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14924 p = rtvec_alloc (sz);
14925 j = 0;
14926 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 14927 gen_rtx_REG (SImode,
1de43f85 14928 LR_REGNO));
d62294f5 14929 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14930 gen_rtx_SYMBOL_REF (Pmode,
14931 "*save_world"));
d62294f5 14932 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
14933 properly. */
14934 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14935 {
14936 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14937 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14938 GEN_INT (info->fp_save_offset
14939 + sp_offset + 8 * i));
0be76840 14940 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
14941
14942 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14943 }
d62294f5 14944 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14945 {
14946 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14947 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14948 GEN_INT (info->altivec_save_offset
14949 + sp_offset + 16 * i));
0be76840 14950 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
14951
14952 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14953 }
d62294f5 14954 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14955 {
14956 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14957 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14958 GEN_INT (info->gp_save_offset
14959 + sp_offset + reg_size * i));
0be76840 14960 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14961
14962 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14963 }
14964
14965 {
14966 /* CR register traditionally saved as CR2. */
14967 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14968 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14969 GEN_INT (info->cr_save_offset
14970 + sp_offset));
0be76840 14971 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14972
14973 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14974 }
22fa69da
GK
14975 /* Explain about use of R0. */
14976 if (info->lr_save_p)
14977 {
14978 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14979 GEN_INT (info->lr_save_offset
14980 + sp_offset));
14981 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 14982
22fa69da
GK
14983 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
14984 }
14985 /* Explain what happens to the stack pointer. */
14986 {
14987 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
14988 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
14989 }
d62294f5
FJ
14990
14991 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14992 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
14993 treg, GEN_INT (-info->total_size));
14994 sp_offset = info->total_size;
d62294f5
FJ
14995 }
14996
9ebbca7d 14997 /* If we use the link register, get it into r0. */
f57fe068 14998 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 14999 {
52ff33d0
NF
15000 rtx addr, reg, mem;
15001
f8a57be8 15002 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 15003 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 15004 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
15005
15006 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15007 GEN_INT (info->lr_save_offset + sp_offset));
15008 reg = gen_rtx_REG (Pmode, 0);
15009 mem = gen_rtx_MEM (Pmode, addr);
15010 /* This should not be of rs6000_sr_alias_set, because of
15011 __builtin_return_address. */
15012
15013 insn = emit_move_insn (mem, reg);
15014 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15015 NULL_RTX, NULL_RTX);
f8a57be8 15016 }
9ebbca7d
GK
15017
15018 /* If we need to save CR, put it into r12. */
f57fe068 15019 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 15020 {
f8a57be8 15021 rtx set;
f676971a 15022
9ebbca7d 15023 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
15024 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15025 RTX_FRAME_RELATED_P (insn) = 1;
15026 /* Now, there's no way that dwarf2out_frame_debug_expr is going
15027 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
15028 But that's OK. All we have to do is specify that _one_ condition
15029 code register is saved in this stack slot. The thrower's epilogue
15030 will then restore all the call-saved registers.
15031 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
15032 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
15033 gen_rtx_REG (SImode, CR2_REGNO));
15034 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15035 set,
15036 REG_NOTES (insn));
9ebbca7d
GK
15037 }
15038
a4f6c312
SS
15039 /* Do any required saving of fpr's. If only one or two to save, do
15040 it ourselves. Otherwise, call function. */
f57fe068 15041 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
15042 {
15043 int i;
15044 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15045 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15046 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15047 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15048 info->first_fp_reg_save + i,
15049 info->fp_save_offset + sp_offset + 8 * i,
15050 info->total_size);
9ebbca7d 15051 }
f57fe068 15052 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15053 {
15054 int i;
15055 char rname[30];
520a57c8 15056 const char *alloc_rname;
9ebbca7d
GK
15057 rtvec p;
15058 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15059
15060 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15061 gen_rtx_REG (Pmode,
1de43f85 15062 LR_REGNO));
9ebbca7d
GK
15063 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15064 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15065 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15066 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15067 gen_rtx_SYMBOL_REF (Pmode,
15068 alloc_rname));
15069 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15070 {
15071 rtx addr, reg, mem;
15072 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15073 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15074 GEN_INT (info->fp_save_offset
9ebbca7d 15075 + sp_offset + 8*i));
0be76840 15076 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15077
15078 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15079 }
15080 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15081 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15082 NULL_RTX, NULL_RTX);
15083 }
b6c9286a 15084
9ebbca7d
GK
15085 /* Save GPRs. This is done as a PARALLEL if we are using
15086 the store-multiple instructions. */
f57fe068 15087 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15088 {
308c142a 15089 rtvec p;
9ebbca7d
GK
15090 int i;
15091 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15092 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15093 {
15094 rtx addr, reg, mem;
15095 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15096 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15097 GEN_INT (info->gp_save_offset
15098 + sp_offset
9ebbca7d 15099 + reg_size * i));
0be76840 15100 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15101
15102 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15103 }
15104 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15105 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15106 NULL_RTX, NULL_RTX);
b6c9286a 15107 }
52ff33d0
NF
15108 else if (!WORLD_SAVE_P (info)
15109 && TARGET_SPE_ABI
15110 && info->spe_64bit_regs_used != 0
15111 && info->first_gp_reg_save != 32)
15112 {
15113 int i;
15114 rtx spe_save_area_ptr;
15115 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15116 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15117 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15118
15119 /* Determine whether we can address all of the registers that need
15120 to be saved with an offset from the stack pointer that fits in
15121 the small const field for SPE memory instructions. */
15122 int spe_regs_addressable_via_sp
15123 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15124 + (32 - info->first_gp_reg_save - 1) * reg_size);
15125 int spe_offset;
15126
15127 if (spe_regs_addressable_via_sp)
15128 {
15129 spe_save_area_ptr = sp_reg_rtx;
15130 spe_offset = info->spe_gp_save_offset + sp_offset;
15131 }
15132 else
15133 {
15134 /* Make r11 point to the start of the SPE save area. We need
15135 to be careful here if r11 is holding the static chain. If
15136 it is, then temporarily save it in r0. We would use r0 as
15137 our base register here, but using r0 as a base register in
15138 loads and stores means something different from what we
15139 would like. */
15140 if (using_static_chain_p)
15141 {
15142 rtx r0 = gen_rtx_REG (Pmode, 0);
15143
15144 gcc_assert (info->first_gp_reg_save > 11);
15145
15146 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15147 }
15148
15149 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
15150 emit_insn (gen_addsi3 (spe_save_area_ptr, sp_reg_rtx,
15151 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15152
15153 spe_offset = 0;
15154 }
15155
15156 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15157 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15158 {
15159 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15160 rtx offset, addr, mem;
15161
15162 /* We're doing all this to ensure that the offset fits into
15163 the immediate offset of 'evstdd'. */
15164 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15165
15166 offset = GEN_INT (reg_size * i + spe_offset);
15167 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15168 mem = gen_rtx_MEM (V2SImode, addr);
15169
15170 insn = emit_move_insn (mem, reg);
15171
15172 rs6000_frame_related (insn, spe_save_area_ptr,
15173 info->spe_gp_save_offset
15174 + sp_offset + reg_size * i,
15175 offset, const0_rtx);
15176 }
15177
15178 /* Move the static chain pointer back. */
15179 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15180 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15181 }
f57fe068 15182 else if (!WORLD_SAVE_P (info))
b6c9286a 15183 {
9ebbca7d
GK
15184 int i;
15185 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15186 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15187 {
15188 rtx addr, reg, mem;
15189 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15190
52ff33d0
NF
15191 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15192 GEN_INT (info->gp_save_offset
15193 + sp_offset
15194 + reg_size * i));
15195 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15196
52ff33d0
NF
15197 insn = emit_move_insn (mem, reg);
15198 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15199 NULL_RTX, NULL_RTX);
15200 }
9ebbca7d
GK
15201 }
15202
83720594
RH
15203 /* ??? There's no need to emit actual instructions here, but it's the
15204 easiest way to get the frame unwind information emitted. */
22fa69da 15205 if (current_function_calls_eh_return)
83720594 15206 {
78e1b90d
DE
15207 unsigned int i, regno;
15208
fc4767bb
JJ
15209 /* In AIX ABI we need to pretend we save r2 here. */
15210 if (TARGET_AIX)
15211 {
15212 rtx addr, reg, mem;
15213
15214 reg = gen_rtx_REG (reg_mode, 2);
15215 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15216 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15217 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15218
15219 insn = emit_move_insn (mem, reg);
f676971a 15220 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15221 NULL_RTX, NULL_RTX);
15222 PATTERN (insn) = gen_blockage ();
15223 }
15224
83720594
RH
15225 for (i = 0; ; ++i)
15226 {
83720594
RH
15227 regno = EH_RETURN_DATA_REGNO (i);
15228 if (regno == INVALID_REGNUM)
15229 break;
15230
89e7058f
AH
15231 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15232 info->ehrd_offset + sp_offset
15233 + reg_size * (int) i,
15234 info->total_size);
83720594
RH
15235 }
15236 }
15237
9ebbca7d 15238 /* Save CR if we use any that must be preserved. */
f57fe068 15239 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15240 {
15241 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15242 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15243 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15244 /* See the large comment above about why CR2_REGNO is used. */
15245 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15246
9ebbca7d
GK
15247 /* If r12 was used to hold the original sp, copy cr into r0 now
15248 that it's free. */
15249 if (REGNO (frame_reg_rtx) == 12)
15250 {
f8a57be8
GK
15251 rtx set;
15252
9ebbca7d 15253 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15254 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15255 RTX_FRAME_RELATED_P (insn) = 1;
15256 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15257 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15258 set,
15259 REG_NOTES (insn));
f676971a 15260
9ebbca7d
GK
15261 }
15262 insn = emit_move_insn (mem, cr_save_rtx);
15263
f676971a 15264 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15265 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15266 }
15267
f676971a 15268 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15269 for which it was done previously. */
f57fe068 15270 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15271 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 15272 {
bcb2d701 15273 if (info->total_size < 32767)
2b2c2fe5 15274 sp_offset = info->total_size;
bcb2d701
EC
15275 else
15276 frame_reg_rtx = frame_ptr_rtx;
15277 rs6000_emit_allocate_stack (info->total_size,
15278 (frame_reg_rtx != sp_reg_rtx
15279 && ((info->altivec_size != 0)
15280 || (info->vrsave_mask != 0)
15281 )));
15282 if (frame_reg_rtx != sp_reg_rtx)
15283 rs6000_emit_stack_tie ();
2b2c2fe5 15284 }
9ebbca7d
GK
15285
15286 /* Set frame pointer, if needed. */
15287 if (frame_pointer_needed)
15288 {
7d5175e1 15289 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15290 sp_reg_rtx);
15291 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15292 }
9878760c 15293
2b2c2fe5
EC
15294 /* Save AltiVec registers if needed. Save here because the red zone does
15295 not include AltiVec registers. */
15296 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15297 {
15298 int i;
15299
15300 /* There should be a non inline version of this, for when we
15301 are saving lots of vector registers. */
15302 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15303 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15304 {
15305 rtx areg, savereg, mem;
15306 int offset;
15307
15308 offset = info->altivec_save_offset + sp_offset
15309 + 16 * (i - info->first_altivec_reg_save);
15310
15311 savereg = gen_rtx_REG (V4SImode, i);
15312
15313 areg = gen_rtx_REG (Pmode, 0);
15314 emit_move_insn (areg, GEN_INT (offset));
15315
15316 /* AltiVec addressing mode is [reg+reg]. */
15317 mem = gen_frame_mem (V4SImode,
15318 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15319
15320 insn = emit_move_insn (mem, savereg);
15321
15322 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15323 areg, GEN_INT (offset));
15324 }
15325 }
15326
15327 /* VRSAVE is a bit vector representing which AltiVec registers
15328 are used. The OS uses this to determine which vector
15329 registers to save on a context switch. We need to save
15330 VRSAVE on the stack frame, add whatever AltiVec registers we
15331 used in this function, and do the corresponding magic in the
15332 epilogue. */
15333
15334 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15335 && info->vrsave_mask != 0)
15336 {
15337 rtx reg, mem, vrsave;
15338 int offset;
15339
15340 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15341 as frame_reg_rtx and r11 as the static chain pointer for
15342 nested functions. */
15343 reg = gen_rtx_REG (SImode, 0);
15344 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15345 if (TARGET_MACHO)
15346 emit_insn (gen_get_vrsave_internal (reg));
15347 else
15348 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15349
15350 if (!WORLD_SAVE_P (info))
15351 {
15352 /* Save VRSAVE. */
15353 offset = info->vrsave_save_offset + sp_offset;
15354 mem = gen_frame_mem (SImode,
15355 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15356 GEN_INT (offset)));
15357 insn = emit_move_insn (mem, reg);
15358 }
15359
15360 /* Include the registers in the mask. */
15361 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15362
15363 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15364 }
15365
1db02437 15366 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15367 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15368 || (DEFAULT_ABI == ABI_V4
15369 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 15370 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
15371 {
15372 /* If emit_load_toc_table will use the link register, we need to save
15373 it. We use R12 for this purpose because emit_load_toc_table
15374 can use register 0. This allows us to use a plain 'blr' to return
15375 from the procedure more often. */
15376 int save_LR_around_toc_setup = (TARGET_ELF
15377 && DEFAULT_ABI != ABI_AIX
15378 && flag_pic
15379 && ! info->lr_save_p
15380 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15381 if (save_LR_around_toc_setup)
15382 {
1de43f85 15383 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 15384
c4ad648e 15385 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 15386 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15387
c4ad648e 15388 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15389
c4ad648e 15390 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
15391 RTX_FRAME_RELATED_P (insn) = 1;
15392 }
15393 else
15394 rs6000_emit_load_toc_table (TRUE);
15395 }
ee890fe2 15396
fcce224d 15397#if TARGET_MACHO
ee890fe2
SS
15398 if (DEFAULT_ABI == ABI_DARWIN
15399 && flag_pic && current_function_uses_pic_offset_table)
15400 {
1de43f85 15401 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 15402 rtx src = machopic_function_base_sym ();
ee890fe2 15403
6d0a8091
DJ
15404 /* Save and restore LR locally around this call (in R0). */
15405 if (!info->lr_save_p)
6fb5fa3c 15406 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 15407
6fb5fa3c 15408 emit_insn (gen_load_macho_picbase (src));
ee890fe2 15409
6fb5fa3c
DB
15410 emit_move_insn (gen_rtx_REG (Pmode,
15411 RS6000_PIC_OFFSET_TABLE_REGNUM),
15412 lr);
6d0a8091
DJ
15413
15414 if (!info->lr_save_p)
6fb5fa3c 15415 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 15416 }
fcce224d 15417#endif
9ebbca7d
GK
15418}
15419
9ebbca7d 15420/* Write function prologue. */
a4f6c312 15421
08c148a8 15422static void
f676971a 15423rs6000_output_function_prologue (FILE *file,
a2369ed3 15424 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15425{
15426 rs6000_stack_t *info = rs6000_stack_info ();
15427
4697a36c
MM
15428 if (TARGET_DEBUG_STACK)
15429 debug_stack_info (info);
9878760c 15430
a4f6c312
SS
15431 /* Write .extern for any function we will call to save and restore
15432 fp values. */
15433 if (info->first_fp_reg_save < 64
15434 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 15435 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 15436 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
15437 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
15438 RESTORE_FP_SUFFIX);
9878760c 15439
c764f757
RK
15440 /* Write .extern for AIX common mode routines, if needed. */
15441 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
15442 {
f6709c70
JW
15443 fputs ("\t.extern __mulh\n", file);
15444 fputs ("\t.extern __mull\n", file);
15445 fputs ("\t.extern __divss\n", file);
15446 fputs ("\t.extern __divus\n", file);
15447 fputs ("\t.extern __quoss\n", file);
15448 fputs ("\t.extern __quous\n", file);
c764f757
RK
15449 common_mode_defined = 1;
15450 }
9878760c 15451
9ebbca7d 15452 if (! HAVE_prologue)
979721f8 15453 {
9ebbca7d 15454 start_sequence ();
9dda4cc8 15455
a4f6c312
SS
15456 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
15457 the "toplevel" insn chain. */
2e040219 15458 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15459 rs6000_emit_prologue ();
2e040219 15460 emit_note (NOTE_INSN_DELETED);
178c3eff 15461
a3c9585f 15462 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15463 {
15464 rtx insn;
15465 unsigned addr = 0;
15466 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15467 {
15468 INSN_ADDRESSES_NEW (insn, addr);
15469 addr += 4;
15470 }
15471 }
9dda4cc8 15472
9ebbca7d 15473 if (TARGET_DEBUG_STACK)
a4f6c312 15474 debug_rtx_list (get_insns (), 100);
c9d691e9 15475 final (get_insns (), file, FALSE);
9ebbca7d 15476 end_sequence ();
979721f8
MM
15477 }
15478
9ebbca7d
GK
15479 rs6000_pic_labelno++;
15480}
f676971a 15481
9ebbca7d 15482/* Emit function epilogue as insns.
9878760c 15483
9ebbca7d
GK
15484 At present, dwarf2out_frame_debug_expr doesn't understand
15485 register restores, so we don't bother setting RTX_FRAME_RELATED_P
15486 anywhere in the epilogue. Most of the insns below would in any case
15487 need special notes to explain where r11 is in relation to the stack. */
9878760c 15488
9ebbca7d 15489void
a2369ed3 15490rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
15491{
15492 rs6000_stack_t *info;
15493 int restoring_FPRs_inline;
15494 int using_load_multiple;
d296e02e 15495 int using_mtcr_multiple;
9ebbca7d
GK
15496 int use_backchain_to_restore_sp;
15497 int sp_offset = 0;
15498 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
15499 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 15500 enum machine_mode reg_mode = Pmode;
327e5343 15501 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15502 int i;
15503
c19de7aa
AH
15504 info = rs6000_stack_info ();
15505
15506 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
15507 {
15508 reg_mode = V2SImode;
15509 reg_size = 8;
15510 }
15511
9ebbca7d 15512 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15513 && (!TARGET_SPE_ABI
15514 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15515 && info->first_gp_reg_save < 31
15516 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15517 restoring_FPRs_inline = (sibcall
83720594 15518 || current_function_calls_eh_return
9ebbca7d
GK
15519 || info->first_fp_reg_save == 64
15520 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 15521 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
15522 || current_function_calls_alloca
15523 || info->total_size > 32767);
d296e02e 15524 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
15525 || rs6000_cpu == PROCESSOR_PPC603
15526 || rs6000_cpu == PROCESSOR_PPC750
15527 || optimize_size);
15528
f57fe068 15529 if (WORLD_SAVE_P (info))
d62294f5
FJ
15530 {
15531 int i, j;
15532 char rname[30];
15533 const char *alloc_rname;
15534 rtvec p;
15535
15536 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
15537 stack slot (which is not likely to be our caller.)
15538 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
15539 rest_world is similar, except any R10 parameter is ignored.
15540 The exception-handling stuff that was here in 2.95 is no
15541 longer necessary. */
d62294f5
FJ
15542
15543 p = rtvec_alloc (9
15544 + 1
f676971a 15545 + 32 - info->first_gp_reg_save
c4ad648e
AM
15546 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
15547 + 63 + 1 - info->first_fp_reg_save);
d62294f5 15548
c4ad648e
AM
15549 strcpy (rname, ((current_function_calls_eh_return) ?
15550 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
15551 alloc_rname = ggc_strdup (rname);
15552
15553 j = 0;
15554 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
15555 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 15556 gen_rtx_REG (Pmode,
1de43f85 15557 LR_REGNO));
d62294f5 15558 RTVEC_ELT (p, j++)
c4ad648e 15559 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 15560 /* The instruction pattern requires a clobber here;
c4ad648e 15561 it is shared with the restVEC helper. */
d62294f5 15562 RTVEC_ELT (p, j++)
c4ad648e 15563 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
15564
15565 {
c4ad648e
AM
15566 /* CR register traditionally saved as CR2. */
15567 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15568 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15569 GEN_INT (info->cr_save_offset));
0be76840 15570 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15571
15572 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
15573 }
15574
15575 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15576 {
15577 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15578 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15579 GEN_INT (info->gp_save_offset
15580 + reg_size * i));
0be76840 15581 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15582
15583 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15584 }
d62294f5 15585 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15586 {
15587 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15588 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15589 GEN_INT (info->altivec_save_offset
15590 + 16 * i));
0be76840 15591 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15592
15593 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15594 }
d62294f5 15595 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
15596 {
15597 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15598 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15599 GEN_INT (info->fp_save_offset
15600 + 8 * i));
0be76840 15601 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15602
15603 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15604 }
d62294f5 15605 RTVEC_ELT (p, j++)
c4ad648e 15606 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 15607 RTVEC_ELT (p, j++)
c4ad648e 15608 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 15609 RTVEC_ELT (p, j++)
c4ad648e 15610 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 15611 RTVEC_ELT (p, j++)
c4ad648e 15612 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 15613 RTVEC_ELT (p, j++)
c4ad648e 15614 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
15615 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15616
15617 return;
15618 }
15619
2b2c2fe5 15620 /* Set sp_offset based on the stack push from the prologue. */
bcb2d701 15621 if (info->total_size < 32767)
2b2c2fe5 15622 sp_offset = info->total_size;
f676971a 15623
9aa86737
AH
15624 /* Restore AltiVec registers if needed. */
15625 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15626 {
15627 int i;
15628
15629 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15630 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15631 {
15632 rtx addr, areg, mem;
15633
15634 areg = gen_rtx_REG (Pmode, 0);
15635 emit_move_insn
15636 (areg, GEN_INT (info->altivec_save_offset
15637 + sp_offset
15638 + 16 * (i - info->first_altivec_reg_save)));
15639
15640 /* AltiVec addressing mode is [reg+reg]. */
15641 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 15642 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
15643
15644 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15645 }
15646 }
15647
15648 /* Restore VRSAVE if needed. */
44688022 15649 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 15650 && info->vrsave_mask != 0)
9aa86737
AH
15651 {
15652 rtx addr, mem, reg;
15653
15654 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15655 GEN_INT (info->vrsave_save_offset + sp_offset));
0be76840 15656 mem = gen_frame_mem (SImode, addr);
9aa86737
AH
15657 reg = gen_rtx_REG (SImode, 12);
15658 emit_move_insn (reg, mem);
15659
15660 emit_insn (generate_set_vrsave (reg, info, 1));
15661 }
15662
2b2c2fe5
EC
15663 sp_offset = 0;
15664
15665 /* If we have a frame pointer, a call to alloca, or a large stack
15666 frame, restore the old stack pointer using the backchain. Otherwise,
15667 we know what size to update it with. */
15668 if (use_backchain_to_restore_sp)
15669 {
15670 /* Under V.4, don't reset the stack pointer until after we're done
15671 loading the saved registers. */
15672 if (DEFAULT_ABI == ABI_V4)
15673 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
15674
15675 emit_move_insn (frame_reg_rtx,
15676 gen_rtx_MEM (Pmode, sp_reg_rtx));
15677 }
15678 else if (info->push_p)
15679 {
15680 if (DEFAULT_ABI == ABI_V4
15681 || current_function_calls_eh_return)
15682 sp_offset = info->total_size;
15683 else
15684 {
15685 emit_insn (TARGET_32BIT
15686 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15687 GEN_INT (info->total_size))
15688 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15689 GEN_INT (info->total_size)));
15690 }
15691 }
15692
9ebbca7d
GK
15693 /* Get the old lr if we saved it. */
15694 if (info->lr_save_p)
b6c9286a 15695 {
a3170dc6
AH
15696 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
15697 info->lr_save_offset + sp_offset);
ba4828e0 15698
9ebbca7d 15699 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 15700 }
f676971a 15701
9ebbca7d
GK
15702 /* Get the old cr if we saved it. */
15703 if (info->cr_save_p)
15704 {
15705 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15706 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15707 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 15708
9ebbca7d
GK
15709 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
15710 }
f676971a 15711
9ebbca7d 15712 /* Set LR here to try to overlap restores below. */
4697a36c 15713 if (info->lr_save_p)
1de43f85 15714 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 15715 gen_rtx_REG (Pmode, 0));
f676971a 15716
83720594
RH
15717 /* Load exception handler data registers, if needed. */
15718 if (current_function_calls_eh_return)
15719 {
78e1b90d
DE
15720 unsigned int i, regno;
15721
fc4767bb
JJ
15722 if (TARGET_AIX)
15723 {
15724 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15725 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15726 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15727
15728 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
15729 }
15730
83720594
RH
15731 for (i = 0; ; ++i)
15732 {
a3170dc6 15733 rtx mem;
83720594
RH
15734
15735 regno = EH_RETURN_DATA_REGNO (i);
15736 if (regno == INVALID_REGNUM)
15737 break;
15738
a3170dc6
AH
15739 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
15740 info->ehrd_offset + sp_offset
15741 + reg_size * (int) i);
83720594
RH
15742
15743 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
15744 }
15745 }
f676971a 15746
9ebbca7d
GK
15747 /* Restore GPRs. This is done as a PARALLEL if we are using
15748 the load-multiple instructions. */
15749 if (using_load_multiple)
979721f8 15750 {
9ebbca7d
GK
15751 rtvec p;
15752 p = rtvec_alloc (32 - info->first_gp_reg_save);
15753 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 15754 {
f676971a
EC
15755 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15756 GEN_INT (info->gp_save_offset
15757 + sp_offset
9ebbca7d 15758 + reg_size * i));
0be76840 15759 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 15760
f676971a 15761 RTVEC_ELT (p, i) =
9ebbca7d
GK
15762 gen_rtx_SET (VOIDmode,
15763 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15764 mem);
979721f8 15765 }
9ebbca7d 15766 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 15767 }
52ff33d0
NF
15768 else if (TARGET_SPE_ABI
15769 && info->spe_64bit_regs_used != 0
15770 && info->first_gp_reg_save != 32)
15771 {
15772 rtx spe_save_area_ptr;
15773 /* Determine whether we can address all of the registers that need
15774 to be saved with an offset from the stack pointer that fits in
15775 the small const field for SPE memory instructions. */
15776 int spe_regs_addressable_via_sp
15777 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15778 + (32 - info->first_gp_reg_save - 1) * reg_size);
15779 int spe_offset;
15780
15781 if (spe_regs_addressable_via_sp)
15782 {
15783 spe_save_area_ptr = frame_reg_rtx;
15784 spe_offset = info->spe_gp_save_offset + sp_offset;
15785 }
15786 else
15787 {
15788 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 15789 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
15790 There's no need to worry here because the static chain is passed
15791 anew to every function. */
15792 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
15793
15794 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
15795 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15796
15797 spe_offset = 0;
15798 }
15799
15800 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15801 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15802 {
15803 rtx offset, addr, mem;
15804
15805 /* We're doing all this to ensure that the immediate offset
15806 fits into the immediate field of 'evldd'. */
15807 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
15808
15809 offset = GEN_INT (spe_offset + reg_size * i);
15810 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15811 mem = gen_rtx_MEM (V2SImode, addr);
15812
15813 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15814 mem);
15815 }
15816 }
9ebbca7d
GK
15817 else
15818 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 15819 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 15820 {
f676971a
EC
15821 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15822 GEN_INT (info->gp_save_offset
15823 + sp_offset
9ebbca7d 15824 + reg_size * i));
0be76840 15825 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 15826
f676971a 15827 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 15828 info->first_gp_reg_save + i), mem);
9ebbca7d 15829 }
9878760c 15830
9ebbca7d
GK
15831 /* Restore fpr's if we need to do it without calling a function. */
15832 if (restoring_FPRs_inline)
15833 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15834 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
15835 && ! call_used_regs[info->first_fp_reg_save+i]))
15836 {
15837 rtx addr, mem;
15838 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
15839 GEN_INT (info->fp_save_offset
15840 + sp_offset
a4f6c312 15841 + 8 * i));
0be76840 15842 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15843
f676971a 15844 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
15845 info->first_fp_reg_save + i),
15846 mem);
15847 }
8d30c4ee 15848
9ebbca7d
GK
15849 /* If we saved cr, restore it here. Just those that were used. */
15850 if (info->cr_save_p)
979721f8 15851 {
9ebbca7d 15852 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 15853 int count = 0;
f676971a 15854
d296e02e 15855 if (using_mtcr_multiple)
979721f8 15856 {
9ebbca7d 15857 for (i = 0; i < 8; i++)
6fb5fa3c 15858 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 15859 count++;
37409796 15860 gcc_assert (count);
e35b9579
GK
15861 }
15862
d296e02e 15863 if (using_mtcr_multiple && count > 1)
e35b9579
GK
15864 {
15865 rtvec p;
15866 int ndx;
f676971a 15867
e35b9579 15868 p = rtvec_alloc (count);
9ebbca7d 15869
e35b9579 15870 ndx = 0;
9ebbca7d 15871 for (i = 0; i < 8; i++)
6fb5fa3c 15872 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
15873 {
15874 rtvec r = rtvec_alloc (2);
15875 RTVEC_ELT (r, 0) = r12_rtx;
15876 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 15877 RTVEC_ELT (p, ndx) =
f676971a 15878 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 15879 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 15880 ndx++;
9ebbca7d
GK
15881 }
15882 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 15883 gcc_assert (ndx == count);
979721f8
MM
15884 }
15885 else
9ebbca7d 15886 for (i = 0; i < 8; i++)
6fb5fa3c 15887 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 15888 {
f676971a 15889 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
15890 CR0_REGNO+i),
15891 r12_rtx));
979721f8 15892 }
979721f8
MM
15893 }
15894
9ebbca7d 15895 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
15896 have been done. */
15897 if (frame_reg_rtx != sp_reg_rtx)
15898 {
15899 /* This blockage is needed so that sched doesn't decide to move
15900 the sp change before the register restores. */
15901 rs6000_emit_stack_tie ();
52ff33d0
NF
15902 if (TARGET_SPE_ABI
15903 && info->spe_64bit_regs_used != 0
15904 && info->first_gp_reg_save != 32)
15905 emit_insn (gen_addsi3 (sp_reg_rtx, gen_rtx_REG (Pmode, 11),
15906 GEN_INT (-(info->spe_gp_save_offset + sp_offset))));
15907 else
15908 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
15909 }
15910 else if (sp_offset != 0)
15911 emit_insn (TARGET_32BIT
15912 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15913 GEN_INT (sp_offset))
15914 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15915 GEN_INT (sp_offset)));
b6c9286a 15916
83720594
RH
15917 if (current_function_calls_eh_return)
15918 {
15919 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 15920 emit_insn (TARGET_32BIT
83720594
RH
15921 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15922 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15923 }
15924
9ebbca7d
GK
15925 if (!sibcall)
15926 {
15927 rtvec p;
15928 if (! restoring_FPRs_inline)
15929 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15930 else
15931 p = rtvec_alloc (2);
b6c9286a 15932
e35b9579 15933 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
15934 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15935 gen_rtx_REG (Pmode,
1de43f85 15936 LR_REGNO));
9ebbca7d
GK
15937
15938 /* If we have to restore more than two FP registers, branch to the
15939 restore function. It will return to our caller. */
15940 if (! restoring_FPRs_inline)
15941 {
15942 int i;
15943 char rname[30];
520a57c8 15944 const char *alloc_rname;
979721f8 15945
f676971a 15946 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 15947 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 15948 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15949 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15950 gen_rtx_SYMBOL_REF (Pmode,
15951 alloc_rname));
b6c9286a 15952
9ebbca7d
GK
15953 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15954 {
15955 rtx addr, mem;
15956 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15957 GEN_INT (info->fp_save_offset + 8*i));
0be76840 15958 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15959
f676971a 15960 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
15961 gen_rtx_SET (VOIDmode,
15962 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15963 mem);
b6c9286a
MM
15964 }
15965 }
f676971a 15966
9ebbca7d 15967 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 15968 }
9878760c
RK
15969}
15970
15971/* Write function epilogue. */
15972
08c148a8 15973static void
f676971a 15974rs6000_output_function_epilogue (FILE *file,
a2369ed3 15975 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 15976{
9ebbca7d 15977 if (! HAVE_epilogue)
9878760c 15978 {
9ebbca7d
GK
15979 rtx insn = get_last_insn ();
15980 /* If the last insn was a BARRIER, we don't have to write anything except
15981 the trace table. */
15982 if (GET_CODE (insn) == NOTE)
15983 insn = prev_nonnote_insn (insn);
15984 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 15985 {
9ebbca7d
GK
15986 /* This is slightly ugly, but at least we don't have two
15987 copies of the epilogue-emitting code. */
15988 start_sequence ();
15989
15990 /* A NOTE_INSN_DELETED is supposed to be at the start
15991 and end of the "toplevel" insn chain. */
2e040219 15992 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15993 rs6000_emit_epilogue (FALSE);
2e040219 15994 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15995
a3c9585f 15996 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15997 {
15998 rtx insn;
15999 unsigned addr = 0;
16000 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16001 {
16002 INSN_ADDRESSES_NEW (insn, addr);
16003 addr += 4;
16004 }
16005 }
16006
9ebbca7d 16007 if (TARGET_DEBUG_STACK)
a4f6c312 16008 debug_rtx_list (get_insns (), 100);
c9d691e9 16009 final (get_insns (), file, FALSE);
9ebbca7d 16010 end_sequence ();
4697a36c 16011 }
9878760c 16012 }
b4ac57ab 16013
efdba735
SH
16014#if TARGET_MACHO
16015 macho_branch_islands ();
0e5da0be
GK
16016 /* Mach-O doesn't support labels at the end of objects, so if
16017 it looks like we might want one, insert a NOP. */
16018 {
16019 rtx insn = get_last_insn ();
16020 while (insn
16021 && NOTE_P (insn)
a38e7aa5 16022 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 16023 insn = PREV_INSN (insn);
f676971a
EC
16024 if (insn
16025 && (LABEL_P (insn)
0e5da0be 16026 || (NOTE_P (insn)
a38e7aa5 16027 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
16028 fputs ("\tnop\n", file);
16029 }
16030#endif
16031
9b30bae2 16032 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
16033 on its format.
16034
16035 We don't output a traceback table if -finhibit-size-directive was
16036 used. The documentation for -finhibit-size-directive reads
16037 ``don't output a @code{.size} assembler directive, or anything
16038 else that would cause trouble if the function is split in the
16039 middle, and the two halves are placed at locations far apart in
16040 memory.'' The traceback table has this property, since it
16041 includes the offset from the start of the function to the
4d30c363
MM
16042 traceback table itself.
16043
16044 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16045 different traceback table. */
57ac7be9 16046 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16047 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16048 {
69c75916 16049 const char *fname = NULL;
3ac88239 16050 const char *language_string = lang_hooks.name;
6041bf2f 16051 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16052 int i;
57ac7be9 16053 int optional_tbtab;
8097c268 16054 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16055
16056 if (rs6000_traceback == traceback_full)
16057 optional_tbtab = 1;
16058 else if (rs6000_traceback == traceback_part)
16059 optional_tbtab = 0;
16060 else
16061 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16062
69c75916
AM
16063 if (optional_tbtab)
16064 {
16065 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16066 while (*fname == '.') /* V.4 encodes . in the name */
16067 fname++;
16068
16069 /* Need label immediately before tbtab, so we can compute
16070 its offset from the function start. */
16071 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16072 ASM_OUTPUT_LABEL (file, fname);
16073 }
314fc5a9
ILT
16074
16075 /* The .tbtab pseudo-op can only be used for the first eight
16076 expressions, since it can't handle the possibly variable
16077 length fields that follow. However, if you omit the optional
16078 fields, the assembler outputs zeros for all optional fields
16079 anyways, giving each variable length field is minimum length
16080 (as defined in sys/debug.h). Thus we can not use the .tbtab
16081 pseudo-op at all. */
16082
16083 /* An all-zero word flags the start of the tbtab, for debuggers
16084 that have to find it by searching forward from the entry
16085 point or from the current pc. */
19d2d16f 16086 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16087
16088 /* Tbtab format type. Use format type 0. */
19d2d16f 16089 fputs ("\t.byte 0,", file);
314fc5a9 16090
5fc921c1
DE
16091 /* Language type. Unfortunately, there does not seem to be any
16092 official way to discover the language being compiled, so we
16093 use language_string.
16094 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16095 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16096 a number, so for now use 9. */
5fc921c1 16097 if (! strcmp (language_string, "GNU C"))
314fc5a9 16098 i = 0;
6de9cd9a
DN
16099 else if (! strcmp (language_string, "GNU F77")
16100 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16101 i = 1;
8b83775b 16102 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16103 i = 2;
5fc921c1
DE
16104 else if (! strcmp (language_string, "GNU Ada"))
16105 i = 3;
56438901
AM
16106 else if (! strcmp (language_string, "GNU C++")
16107 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16108 i = 9;
9517ead8
AG
16109 else if (! strcmp (language_string, "GNU Java"))
16110 i = 13;
5fc921c1
DE
16111 else if (! strcmp (language_string, "GNU Objective-C"))
16112 i = 14;
314fc5a9 16113 else
37409796 16114 gcc_unreachable ();
314fc5a9
ILT
16115 fprintf (file, "%d,", i);
16116
16117 /* 8 single bit fields: global linkage (not set for C extern linkage,
16118 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16119 from start of procedure stored in tbtab, internal function, function
16120 has controlled storage, function has no toc, function uses fp,
16121 function logs/aborts fp operations. */
16122 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16123 fprintf (file, "%d,",
16124 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16125
16126 /* 6 bitfields: function is interrupt handler, name present in
16127 proc table, function calls alloca, on condition directives
16128 (controls stack walks, 3 bits), saves condition reg, saves
16129 link reg. */
16130 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16131 set up as a frame pointer, even when there is no alloca call. */
16132 fprintf (file, "%d,",
6041bf2f
DE
16133 ((optional_tbtab << 6)
16134 | ((optional_tbtab & frame_pointer_needed) << 5)
16135 | (info->cr_save_p << 1)
16136 | (info->lr_save_p)));
314fc5a9 16137
6041bf2f 16138 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16139 (6 bits). */
16140 fprintf (file, "%d,",
4697a36c 16141 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16142
16143 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16144 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16145
6041bf2f
DE
16146 if (optional_tbtab)
16147 {
16148 /* Compute the parameter info from the function decl argument
16149 list. */
16150 tree decl;
16151 int next_parm_info_bit = 31;
314fc5a9 16152
6041bf2f
DE
16153 for (decl = DECL_ARGUMENTS (current_function_decl);
16154 decl; decl = TREE_CHAIN (decl))
16155 {
16156 rtx parameter = DECL_INCOMING_RTL (decl);
16157 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16158
6041bf2f
DE
16159 if (GET_CODE (parameter) == REG)
16160 {
ebb109ad 16161 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16162 {
16163 int bits;
16164
16165 float_parms++;
16166
37409796
NS
16167 switch (mode)
16168 {
16169 case SFmode:
16170 bits = 0x2;
16171 break;
16172
16173 case DFmode:
7393f7f8 16174 case DDmode:
37409796 16175 case TFmode:
7393f7f8 16176 case TDmode:
37409796
NS
16177 bits = 0x3;
16178 break;
16179
16180 default:
16181 gcc_unreachable ();
16182 }
6041bf2f
DE
16183
16184 /* If only one bit will fit, don't or in this entry. */
16185 if (next_parm_info_bit > 0)
16186 parm_info |= (bits << (next_parm_info_bit - 1));
16187 next_parm_info_bit -= 2;
16188 }
16189 else
16190 {
16191 fixed_parms += ((GET_MODE_SIZE (mode)
16192 + (UNITS_PER_WORD - 1))
16193 / UNITS_PER_WORD);
16194 next_parm_info_bit -= 1;
16195 }
16196 }
16197 }
16198 }
314fc5a9
ILT
16199
16200 /* Number of fixed point parameters. */
16201 /* This is actually the number of words of fixed point parameters; thus
16202 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16203 fprintf (file, "%d,", fixed_parms);
16204
16205 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16206 all on stack. */
16207 /* This is actually the number of fp registers that hold parameters;
16208 and thus the maximum value is 13. */
16209 /* Set parameters on stack bit if parameters are not in their original
16210 registers, regardless of whether they are on the stack? Xlc
16211 seems to set the bit when not optimizing. */
16212 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16213
6041bf2f
DE
16214 if (! optional_tbtab)
16215 return;
16216
314fc5a9
ILT
16217 /* Optional fields follow. Some are variable length. */
16218
16219 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16220 11 double float. */
16221 /* There is an entry for each parameter in a register, in the order that
16222 they occur in the parameter list. Any intervening arguments on the
16223 stack are ignored. If the list overflows a long (max possible length
16224 34 bits) then completely leave off all elements that don't fit. */
16225 /* Only emit this long if there was at least one parameter. */
16226 if (fixed_parms || float_parms)
16227 fprintf (file, "\t.long %d\n", parm_info);
16228
16229 /* Offset from start of code to tb table. */
19d2d16f 16230 fputs ("\t.long ", file);
314fc5a9 16231 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16232 if (TARGET_AIX)
16233 RS6000_OUTPUT_BASENAME (file, fname);
16234 else
16235 assemble_name (file, fname);
16236 putc ('-', file);
16237 rs6000_output_function_entry (file, fname);
19d2d16f 16238 putc ('\n', file);
314fc5a9
ILT
16239
16240 /* Interrupt handler mask. */
16241 /* Omit this long, since we never set the interrupt handler bit
16242 above. */
16243
16244 /* Number of CTL (controlled storage) anchors. */
16245 /* Omit this long, since the has_ctl bit is never set above. */
16246
16247 /* Displacement into stack of each CTL anchor. */
16248 /* Omit this list of longs, because there are no CTL anchors. */
16249
16250 /* Length of function name. */
69c75916
AM
16251 if (*fname == '*')
16252 ++fname;
296b8152 16253 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16254
16255 /* Function name. */
16256 assemble_string (fname, strlen (fname));
16257
16258 /* Register for alloca automatic storage; this is always reg 31.
16259 Only emit this if the alloca bit was set above. */
16260 if (frame_pointer_needed)
19d2d16f 16261 fputs ("\t.byte 31\n", file);
b1765bde
DE
16262
16263 fputs ("\t.align 2\n", file);
9b30bae2 16264 }
9878760c 16265}
17167fd8 16266\f
a4f6c312
SS
16267/* A C compound statement that outputs the assembler code for a thunk
16268 function, used to implement C++ virtual function calls with
16269 multiple inheritance. The thunk acts as a wrapper around a virtual
16270 function, adjusting the implicit object parameter before handing
16271 control off to the real function.
16272
16273 First, emit code to add the integer DELTA to the location that
16274 contains the incoming first argument. Assume that this argument
16275 contains a pointer, and is the one used to pass the `this' pointer
16276 in C++. This is the incoming argument *before* the function
16277 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16278 values of all other incoming arguments.
17167fd8
MM
16279
16280 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16281 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16282 not touch the return address. Hence returning from FUNCTION will
16283 return to whoever called the current `thunk'.
17167fd8 16284
a4f6c312
SS
16285 The effect must be as if FUNCTION had been called directly with the
16286 adjusted first argument. This macro is responsible for emitting
16287 all of the code for a thunk function; output_function_prologue()
16288 and output_function_epilogue() are not invoked.
17167fd8 16289
a4f6c312
SS
16290 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16291 been extracted from it.) It might possibly be useful on some
16292 targets, but probably not.
17167fd8 16293
a4f6c312
SS
16294 If you do not define this macro, the target-independent code in the
16295 C++ frontend will generate a less efficient heavyweight thunk that
16296 calls FUNCTION instead of jumping to it. The generic approach does
16297 not support varargs. */
17167fd8 16298
3961e8fe 16299static void
f676971a
EC
16300rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16301 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16302 tree function)
17167fd8 16303{
5b71a4e7 16304 rtx this, insn, funexp;
17167fd8 16305
5b71a4e7 16306 reload_completed = 1;
fe3ad572 16307 epilogue_completed = 1;
56a7189a 16308
5b71a4e7 16309 /* Mark the end of the (empty) prologue. */
2e040219 16310 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16311
5b71a4e7
DE
16312 /* Find the "this" pointer. If the function returns a structure,
16313 the structure return pointer is in r3. */
61f71b34 16314 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16315 this = gen_rtx_REG (Pmode, 4);
56a7189a 16316 else
5b71a4e7 16317 this = gen_rtx_REG (Pmode, 3);
17167fd8 16318
5b71a4e7
DE
16319 /* Apply the constant offset, if required. */
16320 if (delta)
16321 {
16322 rtx delta_rtx = GEN_INT (delta);
16323 emit_insn (TARGET_32BIT
16324 ? gen_addsi3 (this, this, delta_rtx)
16325 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16326 }
16327
5b71a4e7
DE
16328 /* Apply the offset from the vtable, if required. */
16329 if (vcall_offset)
17167fd8 16330 {
5b71a4e7
DE
16331 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16332 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16333
5b71a4e7 16334 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16335 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16336 {
16337 emit_insn (TARGET_32BIT
16338 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16339 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16340 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16341 }
16342 else
16343 {
16344 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16345
16346 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16347 }
5b71a4e7
DE
16348 emit_insn (TARGET_32BIT
16349 ? gen_addsi3 (this, this, tmp)
16350 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16351 }
16352
5b71a4e7
DE
16353 /* Generate a tail call to the target function. */
16354 if (!TREE_USED (function))
16355 {
16356 assemble_external (function);
16357 TREE_USED (function) = 1;
16358 }
16359 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16360 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16361
16362#if TARGET_MACHO
ab82a49f 16363 if (MACHOPIC_INDIRECT)
5b71a4e7 16364 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16365#endif
5b71a4e7
DE
16366
16367 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16368 generate sibcall RTL explicitly. */
5b71a4e7
DE
16369 insn = emit_call_insn (
16370 gen_rtx_PARALLEL (VOIDmode,
16371 gen_rtvec (4,
16372 gen_rtx_CALL (VOIDmode,
16373 funexp, const0_rtx),
16374 gen_rtx_USE (VOIDmode, const0_rtx),
16375 gen_rtx_USE (VOIDmode,
16376 gen_rtx_REG (SImode,
1de43f85 16377 LR_REGNO)),
5b71a4e7
DE
16378 gen_rtx_RETURN (VOIDmode))));
16379 SIBLING_CALL_P (insn) = 1;
16380 emit_barrier ();
16381
16382 /* Run just enough of rest_of_compilation to get the insns emitted.
16383 There's not really enough bulk here to make other passes such as
16384 instruction scheduling worth while. Note that use_thunk calls
16385 assemble_start_function and assemble_end_function. */
16386 insn = get_insns ();
55e092c4 16387 insn_locators_alloc ();
5b71a4e7
DE
16388 shorten_branches (insn);
16389 final_start_function (insn, file, 1);
c9d691e9 16390 final (insn, file, 1);
5b71a4e7
DE
16391 final_end_function ();
16392
16393 reload_completed = 0;
fe3ad572 16394 epilogue_completed = 0;
9ebbca7d 16395}
9ebbca7d
GK
16396\f
16397/* A quick summary of the various types of 'constant-pool tables'
16398 under PowerPC:
16399
f676971a 16400 Target Flags Name One table per
9ebbca7d
GK
16401 AIX (none) AIX TOC object file
16402 AIX -mfull-toc AIX TOC object file
16403 AIX -mminimal-toc AIX minimal TOC translation unit
16404 SVR4/EABI (none) SVR4 SDATA object file
16405 SVR4/EABI -fpic SVR4 pic object file
16406 SVR4/EABI -fPIC SVR4 PIC translation unit
16407 SVR4/EABI -mrelocatable EABI TOC function
16408 SVR4/EABI -maix AIX TOC object file
f676971a 16409 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16410 AIX minimal TOC translation unit
16411
16412 Name Reg. Set by entries contains:
16413 made by addrs? fp? sum?
16414
16415 AIX TOC 2 crt0 as Y option option
16416 AIX minimal TOC 30 prolog gcc Y Y option
16417 SVR4 SDATA 13 crt0 gcc N Y N
16418 SVR4 pic 30 prolog ld Y not yet N
16419 SVR4 PIC 30 prolog gcc Y option option
16420 EABI TOC 30 prolog gcc Y option option
16421
16422*/
16423
9ebbca7d
GK
16424/* Hash functions for the hash table. */
16425
16426static unsigned
a2369ed3 16427rs6000_hash_constant (rtx k)
9ebbca7d 16428{
46b33600
RH
16429 enum rtx_code code = GET_CODE (k);
16430 enum machine_mode mode = GET_MODE (k);
16431 unsigned result = (code << 3) ^ mode;
16432 const char *format;
16433 int flen, fidx;
f676971a 16434
46b33600
RH
16435 format = GET_RTX_FORMAT (code);
16436 flen = strlen (format);
16437 fidx = 0;
9ebbca7d 16438
46b33600
RH
16439 switch (code)
16440 {
16441 case LABEL_REF:
16442 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
16443
16444 case CONST_DOUBLE:
16445 if (mode != VOIDmode)
16446 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
16447 flen = 2;
16448 break;
16449
16450 case CODE_LABEL:
16451 fidx = 3;
16452 break;
16453
16454 default:
16455 break;
16456 }
9ebbca7d
GK
16457
16458 for (; fidx < flen; fidx++)
16459 switch (format[fidx])
16460 {
16461 case 's':
16462 {
16463 unsigned i, len;
16464 const char *str = XSTR (k, fidx);
16465 len = strlen (str);
16466 result = result * 613 + len;
16467 for (i = 0; i < len; i++)
16468 result = result * 613 + (unsigned) str[i];
17167fd8
MM
16469 break;
16470 }
9ebbca7d
GK
16471 case 'u':
16472 case 'e':
16473 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
16474 break;
16475 case 'i':
16476 case 'n':
16477 result = result * 613 + (unsigned) XINT (k, fidx);
16478 break;
16479 case 'w':
16480 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
16481 result = result * 613 + (unsigned) XWINT (k, fidx);
16482 else
16483 {
16484 size_t i;
9390387d 16485 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
16486 result = result * 613 + (unsigned) (XWINT (k, fidx)
16487 >> CHAR_BIT * i);
16488 }
16489 break;
09501938
DE
16490 case '0':
16491 break;
9ebbca7d 16492 default:
37409796 16493 gcc_unreachable ();
9ebbca7d 16494 }
46b33600 16495
9ebbca7d
GK
16496 return result;
16497}
16498
16499static unsigned
a2369ed3 16500toc_hash_function (const void *hash_entry)
9ebbca7d 16501{
f676971a 16502 const struct toc_hash_struct *thc =
a9098fd0
GK
16503 (const struct toc_hash_struct *) hash_entry;
16504 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
16505}
16506
16507/* Compare H1 and H2 for equivalence. */
16508
16509static int
a2369ed3 16510toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
16511{
16512 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
16513 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
16514
a9098fd0
GK
16515 if (((const struct toc_hash_struct *) h1)->key_mode
16516 != ((const struct toc_hash_struct *) h2)->key_mode)
16517 return 0;
16518
5692c7bc 16519 return rtx_equal_p (r1, r2);
9ebbca7d
GK
16520}
16521
28e510bd
MM
16522/* These are the names given by the C++ front-end to vtables, and
16523 vtable-like objects. Ideally, this logic should not be here;
16524 instead, there should be some programmatic way of inquiring as
16525 to whether or not an object is a vtable. */
16526
16527#define VTABLE_NAME_P(NAME) \
9390387d 16528 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
16529 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
16530 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 16531 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 16532 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
16533
16534void
a2369ed3 16535rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
16536{
16537 /* Currently C++ toc references to vtables can be emitted before it
16538 is decided whether the vtable is public or private. If this is
16539 the case, then the linker will eventually complain that there is
f676971a 16540 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
16541 we emit the TOC reference to reference the symbol and not the
16542 section. */
16543 const char *name = XSTR (x, 0);
54ee9799 16544
f676971a 16545 if (VTABLE_NAME_P (name))
54ee9799
DE
16546 {
16547 RS6000_OUTPUT_BASENAME (file, name);
16548 }
16549 else
16550 assemble_name (file, name);
28e510bd
MM
16551}
16552
a4f6c312
SS
16553/* Output a TOC entry. We derive the entry name from what is being
16554 written. */
9878760c
RK
16555
16556void
a2369ed3 16557output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
16558{
16559 char buf[256];
3cce094d 16560 const char *name = buf;
ec940faa 16561 const char *real_name;
9878760c 16562 rtx base = x;
16fdeb48 16563 HOST_WIDE_INT offset = 0;
9878760c 16564
37409796 16565 gcc_assert (!TARGET_NO_TOC);
4697a36c 16566
9ebbca7d
GK
16567 /* When the linker won't eliminate them, don't output duplicate
16568 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
16569 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
16570 CODE_LABELs. */
16571 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
16572 {
16573 struct toc_hash_struct *h;
16574 void * * found;
f676971a 16575
17211ab5 16576 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 16577 time because GGC is not initialized at that point. */
17211ab5 16578 if (toc_hash_table == NULL)
f676971a 16579 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
16580 toc_hash_eq, NULL);
16581
9ebbca7d
GK
16582 h = ggc_alloc (sizeof (*h));
16583 h->key = x;
a9098fd0 16584 h->key_mode = mode;
9ebbca7d 16585 h->labelno = labelno;
f676971a 16586
9ebbca7d
GK
16587 found = htab_find_slot (toc_hash_table, h, 1);
16588 if (*found == NULL)
16589 *found = h;
f676971a 16590 else /* This is indeed a duplicate.
9ebbca7d
GK
16591 Set this label equal to that label. */
16592 {
16593 fputs ("\t.set ", file);
16594 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
16595 fprintf (file, "%d,", labelno);
16596 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 16597 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
16598 found)->labelno));
16599 return;
16600 }
16601 }
16602
16603 /* If we're going to put a double constant in the TOC, make sure it's
16604 aligned properly when strict alignment is on. */
ff1720ed
RK
16605 if (GET_CODE (x) == CONST_DOUBLE
16606 && STRICT_ALIGNMENT
a9098fd0 16607 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
16608 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
16609 ASM_OUTPUT_ALIGN (file, 3);
16610 }
16611
4977bab6 16612 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 16613
37c37a57
RK
16614 /* Handle FP constants specially. Note that if we have a minimal
16615 TOC, things we put here aren't actually in the TOC, so we can allow
16616 FP constants. */
00b79d54
BE
16617 if (GET_CODE (x) == CONST_DOUBLE &&
16618 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
16619 {
16620 REAL_VALUE_TYPE rv;
16621 long k[4];
16622
16623 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16624 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16625 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
16626 else
16627 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
16628
16629 if (TARGET_64BIT)
16630 {
16631 if (TARGET_MINIMAL_TOC)
16632 fputs (DOUBLE_INT_ASM_OP, file);
16633 else
16634 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16635 k[0] & 0xffffffff, k[1] & 0xffffffff,
16636 k[2] & 0xffffffff, k[3] & 0xffffffff);
16637 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
16638 k[0] & 0xffffffff, k[1] & 0xffffffff,
16639 k[2] & 0xffffffff, k[3] & 0xffffffff);
16640 return;
16641 }
16642 else
16643 {
16644 if (TARGET_MINIMAL_TOC)
16645 fputs ("\t.long ", file);
16646 else
16647 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16648 k[0] & 0xffffffff, k[1] & 0xffffffff,
16649 k[2] & 0xffffffff, k[3] & 0xffffffff);
16650 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16651 k[0] & 0xffffffff, k[1] & 0xffffffff,
16652 k[2] & 0xffffffff, k[3] & 0xffffffff);
16653 return;
16654 }
16655 }
00b79d54
BE
16656 else if (GET_CODE (x) == CONST_DOUBLE &&
16657 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 16658 {
042259f2
DE
16659 REAL_VALUE_TYPE rv;
16660 long k[2];
0adc764e 16661
042259f2 16662 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16663
16664 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16665 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
16666 else
16667 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 16668
13ded975
DE
16669 if (TARGET_64BIT)
16670 {
16671 if (TARGET_MINIMAL_TOC)
2bfcf297 16672 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 16673 else
2f0552b6
AM
16674 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16675 k[0] & 0xffffffff, k[1] & 0xffffffff);
16676 fprintf (file, "0x%lx%08lx\n",
16677 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
16678 return;
16679 }
1875cc88 16680 else
13ded975
DE
16681 {
16682 if (TARGET_MINIMAL_TOC)
2bfcf297 16683 fputs ("\t.long ", file);
13ded975 16684 else
2f0552b6
AM
16685 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16686 k[0] & 0xffffffff, k[1] & 0xffffffff);
16687 fprintf (file, "0x%lx,0x%lx\n",
16688 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
16689 return;
16690 }
9878760c 16691 }
00b79d54
BE
16692 else if (GET_CODE (x) == CONST_DOUBLE &&
16693 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 16694 {
042259f2
DE
16695 REAL_VALUE_TYPE rv;
16696 long l;
9878760c 16697
042259f2 16698 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16699 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16700 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
16701 else
16702 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 16703
31bfaa0b
DE
16704 if (TARGET_64BIT)
16705 {
16706 if (TARGET_MINIMAL_TOC)
2bfcf297 16707 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 16708 else
2f0552b6
AM
16709 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16710 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
16711 return;
16712 }
042259f2 16713 else
31bfaa0b
DE
16714 {
16715 if (TARGET_MINIMAL_TOC)
2bfcf297 16716 fputs ("\t.long ", file);
31bfaa0b 16717 else
2f0552b6
AM
16718 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16719 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
16720 return;
16721 }
042259f2 16722 }
f176e826 16723 else if (GET_MODE (x) == VOIDmode
a9098fd0 16724 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 16725 {
e2c953b6 16726 unsigned HOST_WIDE_INT low;
042259f2
DE
16727 HOST_WIDE_INT high;
16728
16729 if (GET_CODE (x) == CONST_DOUBLE)
16730 {
16731 low = CONST_DOUBLE_LOW (x);
16732 high = CONST_DOUBLE_HIGH (x);
16733 }
16734 else
16735#if HOST_BITS_PER_WIDE_INT == 32
16736 {
16737 low = INTVAL (x);
0858c623 16738 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
16739 }
16740#else
16741 {
c4ad648e
AM
16742 low = INTVAL (x) & 0xffffffff;
16743 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
16744 }
16745#endif
9878760c 16746
a9098fd0
GK
16747 /* TOC entries are always Pmode-sized, but since this
16748 is a bigendian machine then if we're putting smaller
16749 integer constants in the TOC we have to pad them.
16750 (This is still a win over putting the constants in
16751 a separate constant pool, because then we'd have
02a4ec28
FS
16752 to have both a TOC entry _and_ the actual constant.)
16753
16754 For a 32-bit target, CONST_INT values are loaded and shifted
16755 entirely within `low' and can be stored in one TOC entry. */
16756
37409796
NS
16757 /* It would be easy to make this work, but it doesn't now. */
16758 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
16759
16760 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
16761 {
16762#if HOST_BITS_PER_WIDE_INT == 32
16763 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
16764 POINTER_SIZE, &low, &high, 0);
16765#else
16766 low |= high << 32;
16767 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
16768 high = (HOST_WIDE_INT) low >> 32;
16769 low &= 0xffffffff;
16770#endif
16771 }
a9098fd0 16772
13ded975
DE
16773 if (TARGET_64BIT)
16774 {
16775 if (TARGET_MINIMAL_TOC)
2bfcf297 16776 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 16777 else
2f0552b6
AM
16778 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16779 (long) high & 0xffffffff, (long) low & 0xffffffff);
16780 fprintf (file, "0x%lx%08lx\n",
16781 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
16782 return;
16783 }
1875cc88 16784 else
13ded975 16785 {
02a4ec28
FS
16786 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
16787 {
16788 if (TARGET_MINIMAL_TOC)
2bfcf297 16789 fputs ("\t.long ", file);
02a4ec28 16790 else
2bfcf297 16791 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
16792 (long) high & 0xffffffff, (long) low & 0xffffffff);
16793 fprintf (file, "0x%lx,0x%lx\n",
16794 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 16795 }
13ded975 16796 else
02a4ec28
FS
16797 {
16798 if (TARGET_MINIMAL_TOC)
2bfcf297 16799 fputs ("\t.long ", file);
02a4ec28 16800 else
2f0552b6
AM
16801 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
16802 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 16803 }
13ded975
DE
16804 return;
16805 }
9878760c
RK
16806 }
16807
16808 if (GET_CODE (x) == CONST)
16809 {
37409796 16810 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 16811
9878760c
RK
16812 base = XEXP (XEXP (x, 0), 0);
16813 offset = INTVAL (XEXP (XEXP (x, 0), 1));
16814 }
f676971a 16815
37409796
NS
16816 switch (GET_CODE (base))
16817 {
16818 case SYMBOL_REF:
16819 name = XSTR (base, 0);
16820 break;
16821
16822 case LABEL_REF:
16823 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
16824 CODE_LABEL_NUMBER (XEXP (base, 0)));
16825 break;
16826
16827 case CODE_LABEL:
16828 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
16829 break;
16830
16831 default:
16832 gcc_unreachable ();
16833 }
9878760c 16834
772c5265 16835 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 16836 if (TARGET_MINIMAL_TOC)
2bfcf297 16837 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
16838 else
16839 {
b6c9286a 16840 fprintf (file, "\t.tc %s", real_name);
9878760c 16841
1875cc88 16842 if (offset < 0)
16fdeb48 16843 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 16844 else if (offset)
16fdeb48 16845 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 16846
19d2d16f 16847 fputs ("[TC],", file);
1875cc88 16848 }
581bc4de
MM
16849
16850 /* Currently C++ toc references to vtables can be emitted before it
16851 is decided whether the vtable is public or private. If this is
16852 the case, then the linker will eventually complain that there is
16853 a TOC reference to an unknown section. Thus, for vtables only,
16854 we emit the TOC reference to reference the symbol and not the
16855 section. */
28e510bd 16856 if (VTABLE_NAME_P (name))
581bc4de 16857 {
54ee9799 16858 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 16859 if (offset < 0)
16fdeb48 16860 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 16861 else if (offset > 0)
16fdeb48 16862 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
16863 }
16864 else
16865 output_addr_const (file, x);
19d2d16f 16866 putc ('\n', file);
9878760c
RK
16867}
16868\f
16869/* Output an assembler pseudo-op to write an ASCII string of N characters
16870 starting at P to FILE.
16871
16872 On the RS/6000, we have to do this using the .byte operation and
16873 write out special characters outside the quoted string.
16874 Also, the assembler is broken; very long strings are truncated,
a4f6c312 16875 so we must artificially break them up early. */
9878760c
RK
16876
16877void
a2369ed3 16878output_ascii (FILE *file, const char *p, int n)
9878760c
RK
16879{
16880 char c;
16881 int i, count_string;
d330fd93
KG
16882 const char *for_string = "\t.byte \"";
16883 const char *for_decimal = "\t.byte ";
16884 const char *to_close = NULL;
9878760c
RK
16885
16886 count_string = 0;
16887 for (i = 0; i < n; i++)
16888 {
16889 c = *p++;
16890 if (c >= ' ' && c < 0177)
16891 {
16892 if (for_string)
16893 fputs (for_string, file);
16894 putc (c, file);
16895
16896 /* Write two quotes to get one. */
16897 if (c == '"')
16898 {
16899 putc (c, file);
16900 ++count_string;
16901 }
16902
16903 for_string = NULL;
16904 for_decimal = "\"\n\t.byte ";
16905 to_close = "\"\n";
16906 ++count_string;
16907
16908 if (count_string >= 512)
16909 {
16910 fputs (to_close, file);
16911
16912 for_string = "\t.byte \"";
16913 for_decimal = "\t.byte ";
16914 to_close = NULL;
16915 count_string = 0;
16916 }
16917 }
16918 else
16919 {
16920 if (for_decimal)
16921 fputs (for_decimal, file);
16922 fprintf (file, "%d", c);
16923
16924 for_string = "\n\t.byte \"";
16925 for_decimal = ", ";
16926 to_close = "\n";
16927 count_string = 0;
16928 }
16929 }
16930
16931 /* Now close the string if we have written one. Then end the line. */
16932 if (to_close)
9ebbca7d 16933 fputs (to_close, file);
9878760c
RK
16934}
16935\f
16936/* Generate a unique section name for FILENAME for a section type
16937 represented by SECTION_DESC. Output goes into BUF.
16938
16939 SECTION_DESC can be any string, as long as it is different for each
16940 possible section type.
16941
16942 We name the section in the same manner as xlc. The name begins with an
16943 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
16944 names) with the last period replaced by the string SECTION_DESC. If
16945 FILENAME does not contain a period, SECTION_DESC is appended to the end of
16946 the name. */
9878760c
RK
16947
16948void
f676971a 16949rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 16950 const char *section_desc)
9878760c 16951{
9ebbca7d 16952 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
16953 char *p;
16954 int len;
9878760c
RK
16955
16956 after_last_slash = filename;
16957 for (q = filename; *q; q++)
11e5fe42
RK
16958 {
16959 if (*q == '/')
16960 after_last_slash = q + 1;
16961 else if (*q == '.')
16962 last_period = q;
16963 }
9878760c 16964
11e5fe42 16965 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 16966 *buf = (char *) xmalloc (len);
9878760c
RK
16967
16968 p = *buf;
16969 *p++ = '_';
16970
16971 for (q = after_last_slash; *q; q++)
16972 {
11e5fe42 16973 if (q == last_period)
c4ad648e 16974 {
9878760c
RK
16975 strcpy (p, section_desc);
16976 p += strlen (section_desc);
e3981aab 16977 break;
c4ad648e 16978 }
9878760c 16979
e9a780ec 16980 else if (ISALNUM (*q))
c4ad648e 16981 *p++ = *q;
9878760c
RK
16982 }
16983
11e5fe42 16984 if (last_period == 0)
9878760c
RK
16985 strcpy (p, section_desc);
16986 else
16987 *p = '\0';
16988}
e165f3f0 16989\f
a4f6c312 16990/* Emit profile function. */
411707f4 16991
411707f4 16992void
a2369ed3 16993output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 16994{
858081ad
AH
16995 /* Non-standard profiling for kernels, which just saves LR then calls
16996 _mcount without worrying about arg saves. The idea is to change
16997 the function prologue as little as possible as it isn't easy to
16998 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
16999 if (TARGET_PROFILE_KERNEL)
17000 return;
17001
8480e480
CC
17002 if (DEFAULT_ABI == ABI_AIX)
17003 {
9739c90c
JJ
17004#ifndef NO_PROFILE_COUNTERS
17005# define NO_PROFILE_COUNTERS 0
17006#endif
f676971a 17007 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
17008 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
17009 else
17010 {
17011 char buf[30];
17012 const char *label_name;
17013 rtx fun;
411707f4 17014
9739c90c
JJ
17015 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
17016 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
17017 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 17018
9739c90c
JJ
17019 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
17020 fun, Pmode);
17021 }
8480e480 17022 }
ee890fe2
SS
17023 else if (DEFAULT_ABI == ABI_DARWIN)
17024 {
d5fa86ba 17025 const char *mcount_name = RS6000_MCOUNT;
1de43f85 17026 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
17027
17028 /* Be conservative and always set this, at least for now. */
17029 current_function_uses_pic_offset_table = 1;
17030
17031#if TARGET_MACHO
17032 /* For PIC code, set up a stub and collect the caller's address
17033 from r0, which is where the prologue puts it. */
11abc112
MM
17034 if (MACHOPIC_INDIRECT
17035 && current_function_uses_pic_offset_table)
17036 caller_addr_regno = 0;
ee890fe2
SS
17037#endif
17038 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
17039 0, VOIDmode, 1,
17040 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
17041 }
411707f4
CC
17042}
17043
a4f6c312 17044/* Write function profiler code. */
e165f3f0
RK
17045
17046void
a2369ed3 17047output_function_profiler (FILE *file, int labelno)
e165f3f0 17048{
3daf36a4 17049 char buf[100];
e165f3f0 17050
38c1f2d7 17051 switch (DEFAULT_ABI)
3daf36a4 17052 {
38c1f2d7 17053 default:
37409796 17054 gcc_unreachable ();
38c1f2d7
MM
17055
17056 case ABI_V4:
09eeeacb
AM
17057 if (!TARGET_32BIT)
17058 {
d4ee4d25 17059 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17060 return;
17061 }
ffcfcb5f 17062 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17063 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17064 if (NO_PROFILE_COUNTERS)
17065 {
17066 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17067 reg_names[0], reg_names[1]);
17068 }
17069 else if (TARGET_SECURE_PLT && flag_pic)
17070 {
17071 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17072 reg_names[0], reg_names[1]);
17073 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17074 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17075 reg_names[12], reg_names[12]);
17076 assemble_name (file, buf);
17077 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17078 assemble_name (file, buf);
17079 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17080 }
17081 else if (flag_pic == 1)
38c1f2d7 17082 {
dfdfa60f 17083 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17084 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17085 reg_names[0], reg_names[1]);
17167fd8 17086 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17087 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17088 assemble_name (file, buf);
17167fd8 17089 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17090 }
9ebbca7d 17091 else if (flag_pic > 1)
38c1f2d7 17092 {
71625f3d
AM
17093 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17094 reg_names[0], reg_names[1]);
9ebbca7d 17095 /* Now, we need to get the address of the label. */
71625f3d 17096 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17097 assemble_name (file, buf);
9ebbca7d
GK
17098 fputs ("-.\n1:", file);
17099 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17100 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17101 reg_names[0], reg_names[11]);
17102 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17103 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17104 }
38c1f2d7
MM
17105 else
17106 {
17167fd8 17107 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17108 assemble_name (file, buf);
dfdfa60f 17109 fputs ("@ha\n", file);
71625f3d
AM
17110 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17111 reg_names[0], reg_names[1]);
a260abc9 17112 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17113 assemble_name (file, buf);
17167fd8 17114 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17115 }
17116
50d440bc 17117 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17118 fprintf (file, "\tbl %s%s\n",
17119 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17120 break;
17121
17122 case ABI_AIX:
ee890fe2 17123 case ABI_DARWIN:
ffcfcb5f
AM
17124 if (!TARGET_PROFILE_KERNEL)
17125 {
a3c9585f 17126 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17127 }
17128 else
17129 {
37409796 17130 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17131
17132 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17133 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17134
6de9cd9a 17135 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17136 {
17137 asm_fprintf (file, "\tstd %s,24(%s)\n",
17138 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17139 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17140 asm_fprintf (file, "\tld %s,24(%s)\n",
17141 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17142 }
17143 else
17144 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17145 }
38c1f2d7
MM
17146 break;
17147 }
e165f3f0 17148}
a251ffd0 17149
b54cf83a 17150\f
44cd321e
PS
17151
17152/* The following variable value is the last issued insn. */
17153
17154static rtx last_scheduled_insn;
17155
17156/* The following variable helps to balance issuing of load and
17157 store instructions */
17158
17159static int load_store_pendulum;
17160
b54cf83a
DE
17161/* Power4 load update and store update instructions are cracked into a
17162 load or store and an integer insn which are executed in the same cycle.
17163 Branches have their own dispatch slot which does not count against the
17164 GCC issue rate, but it changes the program flow so there are no other
17165 instructions to issue in this cycle. */
17166
17167static int
f676971a
EC
17168rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17169 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17170 rtx insn, int more)
b54cf83a 17171{
44cd321e 17172 last_scheduled_insn = insn;
b54cf83a
DE
17173 if (GET_CODE (PATTERN (insn)) == USE
17174 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17175 {
17176 cached_can_issue_more = more;
17177 return cached_can_issue_more;
17178 }
17179
17180 if (insn_terminates_group_p (insn, current_group))
17181 {
17182 cached_can_issue_more = 0;
17183 return cached_can_issue_more;
17184 }
b54cf83a 17185
d296e02e
AP
17186 /* If no reservation, but reach here */
17187 if (recog_memoized (insn) < 0)
17188 return more;
17189
ec507f2d 17190 if (rs6000_sched_groups)
b54cf83a 17191 {
cbe26ab8 17192 if (is_microcoded_insn (insn))
44cd321e 17193 cached_can_issue_more = 0;
cbe26ab8 17194 else if (is_cracked_insn (insn))
44cd321e
PS
17195 cached_can_issue_more = more > 2 ? more - 2 : 0;
17196 else
17197 cached_can_issue_more = more - 1;
17198
17199 return cached_can_issue_more;
b54cf83a 17200 }
165b263e 17201
d296e02e
AP
17202 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17203 return 0;
17204
44cd321e
PS
17205 cached_can_issue_more = more - 1;
17206 return cached_can_issue_more;
b54cf83a
DE
17207}
17208
a251ffd0
TG
17209/* Adjust the cost of a scheduling dependency. Return the new cost of
17210 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17211
c237e94a 17212static int
0a4f0294 17213rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17214{
44cd321e 17215 enum attr_type attr_type;
a251ffd0 17216
44cd321e 17217 if (! recog_memoized (insn))
a251ffd0
TG
17218 return 0;
17219
44cd321e 17220 switch (REG_NOTE_KIND (link))
a251ffd0 17221 {
44cd321e
PS
17222 case REG_DEP_TRUE:
17223 {
17224 /* Data dependency; DEP_INSN writes a register that INSN reads
17225 some cycles later. */
17226
17227 /* Separate a load from a narrower, dependent store. */
17228 if (rs6000_sched_groups
17229 && GET_CODE (PATTERN (insn)) == SET
17230 && GET_CODE (PATTERN (dep_insn)) == SET
17231 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17232 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17233 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17234 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17235 return cost + 14;
17236
17237 attr_type = get_attr_type (insn);
17238
17239 switch (attr_type)
17240 {
17241 case TYPE_JMPREG:
17242 /* Tell the first scheduling pass about the latency between
17243 a mtctr and bctr (and mtlr and br/blr). The first
17244 scheduling pass will not know about this latency since
17245 the mtctr instruction, which has the latency associated
17246 to it, will be generated by reload. */
17247 return TARGET_POWER ? 5 : 4;
17248 case TYPE_BRANCH:
17249 /* Leave some extra cycles between a compare and its
17250 dependent branch, to inhibit expensive mispredicts. */
17251 if ((rs6000_cpu_attr == CPU_PPC603
17252 || rs6000_cpu_attr == CPU_PPC604
17253 || rs6000_cpu_attr == CPU_PPC604E
17254 || rs6000_cpu_attr == CPU_PPC620
17255 || rs6000_cpu_attr == CPU_PPC630
17256 || rs6000_cpu_attr == CPU_PPC750
17257 || rs6000_cpu_attr == CPU_PPC7400
17258 || rs6000_cpu_attr == CPU_PPC7450
17259 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17260 || rs6000_cpu_attr == CPU_POWER5
17261 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17262 && recog_memoized (dep_insn)
17263 && (INSN_CODE (dep_insn) >= 0))
982afe02 17264
44cd321e
PS
17265 switch (get_attr_type (dep_insn))
17266 {
17267 case TYPE_CMP:
17268 case TYPE_COMPARE:
17269 case TYPE_DELAYED_COMPARE:
17270 case TYPE_IMUL_COMPARE:
17271 case TYPE_LMUL_COMPARE:
17272 case TYPE_FPCOMPARE:
17273 case TYPE_CR_LOGICAL:
17274 case TYPE_DELAYED_CR:
17275 return cost + 2;
17276 default:
17277 break;
17278 }
17279 break;
17280
17281 case TYPE_STORE:
17282 case TYPE_STORE_U:
17283 case TYPE_STORE_UX:
17284 case TYPE_FPSTORE:
17285 case TYPE_FPSTORE_U:
17286 case TYPE_FPSTORE_UX:
17287 if ((rs6000_cpu == PROCESSOR_POWER6)
17288 && recog_memoized (dep_insn)
17289 && (INSN_CODE (dep_insn) >= 0))
17290 {
17291
17292 if (GET_CODE (PATTERN (insn)) != SET)
17293 /* If this happens, we have to extend this to schedule
17294 optimally. Return default for now. */
17295 return cost;
17296
17297 /* Adjust the cost for the case where the value written
17298 by a fixed point operation is used as the address
17299 gen value on a store. */
17300 switch (get_attr_type (dep_insn))
17301 {
17302 case TYPE_LOAD:
17303 case TYPE_LOAD_U:
17304 case TYPE_LOAD_UX:
17305 case TYPE_CNTLZ:
17306 {
17307 if (! store_data_bypass_p (dep_insn, insn))
17308 return 4;
17309 break;
17310 }
17311 case TYPE_LOAD_EXT:
17312 case TYPE_LOAD_EXT_U:
17313 case TYPE_LOAD_EXT_UX:
17314 case TYPE_VAR_SHIFT_ROTATE:
17315 case TYPE_VAR_DELAYED_COMPARE:
17316 {
17317 if (! store_data_bypass_p (dep_insn, insn))
17318 return 6;
17319 break;
17320 }
17321 case TYPE_INTEGER:
17322 case TYPE_COMPARE:
17323 case TYPE_FAST_COMPARE:
17324 case TYPE_EXTS:
17325 case TYPE_SHIFT:
17326 case TYPE_INSERT_WORD:
17327 case TYPE_INSERT_DWORD:
17328 case TYPE_FPLOAD_U:
17329 case TYPE_FPLOAD_UX:
17330 case TYPE_STORE_U:
17331 case TYPE_STORE_UX:
17332 case TYPE_FPSTORE_U:
17333 case TYPE_FPSTORE_UX:
17334 {
17335 if (! store_data_bypass_p (dep_insn, insn))
17336 return 3;
17337 break;
17338 }
17339 case TYPE_IMUL:
17340 case TYPE_IMUL2:
17341 case TYPE_IMUL3:
17342 case TYPE_LMUL:
17343 case TYPE_IMUL_COMPARE:
17344 case TYPE_LMUL_COMPARE:
17345 {
17346 if (! store_data_bypass_p (dep_insn, insn))
17347 return 17;
17348 break;
17349 }
17350 case TYPE_IDIV:
17351 {
17352 if (! store_data_bypass_p (dep_insn, insn))
17353 return 45;
17354 break;
17355 }
17356 case TYPE_LDIV:
17357 {
17358 if (! store_data_bypass_p (dep_insn, insn))
17359 return 57;
17360 break;
17361 }
17362 default:
17363 break;
17364 }
17365 }
17366 break;
17367
17368 case TYPE_LOAD:
17369 case TYPE_LOAD_U:
17370 case TYPE_LOAD_UX:
17371 case TYPE_LOAD_EXT:
17372 case TYPE_LOAD_EXT_U:
17373 case TYPE_LOAD_EXT_UX:
17374 if ((rs6000_cpu == PROCESSOR_POWER6)
17375 && recog_memoized (dep_insn)
17376 && (INSN_CODE (dep_insn) >= 0))
17377 {
17378
17379 /* Adjust the cost for the case where the value written
17380 by a fixed point instruction is used within the address
17381 gen portion of a subsequent load(u)(x) */
17382 switch (get_attr_type (dep_insn))
17383 {
17384 case TYPE_LOAD:
17385 case TYPE_LOAD_U:
17386 case TYPE_LOAD_UX:
17387 case TYPE_CNTLZ:
17388 {
17389 if (set_to_load_agen (dep_insn, insn))
17390 return 4;
17391 break;
17392 }
17393 case TYPE_LOAD_EXT:
17394 case TYPE_LOAD_EXT_U:
17395 case TYPE_LOAD_EXT_UX:
17396 case TYPE_VAR_SHIFT_ROTATE:
17397 case TYPE_VAR_DELAYED_COMPARE:
17398 {
17399 if (set_to_load_agen (dep_insn, insn))
17400 return 6;
17401 break;
17402 }
17403 case TYPE_INTEGER:
17404 case TYPE_COMPARE:
17405 case TYPE_FAST_COMPARE:
17406 case TYPE_EXTS:
17407 case TYPE_SHIFT:
17408 case TYPE_INSERT_WORD:
17409 case TYPE_INSERT_DWORD:
17410 case TYPE_FPLOAD_U:
17411 case TYPE_FPLOAD_UX:
17412 case TYPE_STORE_U:
17413 case TYPE_STORE_UX:
17414 case TYPE_FPSTORE_U:
17415 case TYPE_FPSTORE_UX:
17416 {
17417 if (set_to_load_agen (dep_insn, insn))
17418 return 3;
17419 break;
17420 }
17421 case TYPE_IMUL:
17422 case TYPE_IMUL2:
17423 case TYPE_IMUL3:
17424 case TYPE_LMUL:
17425 case TYPE_IMUL_COMPARE:
17426 case TYPE_LMUL_COMPARE:
17427 {
17428 if (set_to_load_agen (dep_insn, insn))
17429 return 17;
17430 break;
17431 }
17432 case TYPE_IDIV:
17433 {
17434 if (set_to_load_agen (dep_insn, insn))
17435 return 45;
17436 break;
17437 }
17438 case TYPE_LDIV:
17439 {
17440 if (set_to_load_agen (dep_insn, insn))
17441 return 57;
17442 break;
17443 }
17444 default:
17445 break;
17446 }
17447 }
17448 break;
17449
17450 case TYPE_FPLOAD:
17451 if ((rs6000_cpu == PROCESSOR_POWER6)
17452 && recog_memoized (dep_insn)
17453 && (INSN_CODE (dep_insn) >= 0)
17454 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
17455 return 2;
17456
17457 default:
17458 break;
17459 }
c9dbf840 17460
a251ffd0 17461 /* Fall out to return default cost. */
44cd321e
PS
17462 }
17463 break;
17464
17465 case REG_DEP_OUTPUT:
17466 /* Output dependency; DEP_INSN writes a register that INSN writes some
17467 cycles later. */
17468 if ((rs6000_cpu == PROCESSOR_POWER6)
17469 && recog_memoized (dep_insn)
17470 && (INSN_CODE (dep_insn) >= 0))
17471 {
17472 attr_type = get_attr_type (insn);
17473
17474 switch (attr_type)
17475 {
17476 case TYPE_FP:
17477 if (get_attr_type (dep_insn) == TYPE_FP)
17478 return 1;
17479 break;
17480 case TYPE_FPLOAD:
17481 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
17482 return 2;
17483 break;
17484 default:
17485 break;
17486 }
17487 }
17488 case REG_DEP_ANTI:
17489 /* Anti dependency; DEP_INSN reads a register that INSN writes some
17490 cycles later. */
17491 return 0;
17492
17493 default:
17494 gcc_unreachable ();
a251ffd0
TG
17495 }
17496
17497 return cost;
17498}
b6c9286a 17499
cbe26ab8 17500/* The function returns a true if INSN is microcoded.
839a4992 17501 Return false otherwise. */
cbe26ab8
DN
17502
17503static bool
17504is_microcoded_insn (rtx insn)
17505{
17506 if (!insn || !INSN_P (insn)
17507 || GET_CODE (PATTERN (insn)) == USE
17508 || GET_CODE (PATTERN (insn)) == CLOBBER)
17509 return false;
17510
d296e02e
AP
17511 if (rs6000_cpu_attr == CPU_CELL)
17512 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
17513
ec507f2d 17514 if (rs6000_sched_groups)
cbe26ab8
DN
17515 {
17516 enum attr_type type = get_attr_type (insn);
17517 if (type == TYPE_LOAD_EXT_U
17518 || type == TYPE_LOAD_EXT_UX
17519 || type == TYPE_LOAD_UX
17520 || type == TYPE_STORE_UX
17521 || type == TYPE_MFCR)
c4ad648e 17522 return true;
cbe26ab8
DN
17523 }
17524
17525 return false;
17526}
17527
cbe26ab8
DN
17528/* The function returns true if INSN is cracked into 2 instructions
17529 by the processor (and therefore occupies 2 issue slots). */
17530
17531static bool
17532is_cracked_insn (rtx insn)
17533{
17534 if (!insn || !INSN_P (insn)
17535 || GET_CODE (PATTERN (insn)) == USE
17536 || GET_CODE (PATTERN (insn)) == CLOBBER)
17537 return false;
17538
ec507f2d 17539 if (rs6000_sched_groups)
cbe26ab8
DN
17540 {
17541 enum attr_type type = get_attr_type (insn);
17542 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
17543 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
17544 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
17545 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
17546 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
17547 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
17548 || type == TYPE_IDIV || type == TYPE_LDIV
17549 || type == TYPE_INSERT_WORD)
17550 return true;
cbe26ab8
DN
17551 }
17552
17553 return false;
17554}
17555
17556/* The function returns true if INSN can be issued only from
a3c9585f 17557 the branch slot. */
cbe26ab8
DN
17558
17559static bool
17560is_branch_slot_insn (rtx insn)
17561{
17562 if (!insn || !INSN_P (insn)
17563 || GET_CODE (PATTERN (insn)) == USE
17564 || GET_CODE (PATTERN (insn)) == CLOBBER)
17565 return false;
17566
ec507f2d 17567 if (rs6000_sched_groups)
cbe26ab8
DN
17568 {
17569 enum attr_type type = get_attr_type (insn);
17570 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 17571 return true;
cbe26ab8
DN
17572 return false;
17573 }
17574
17575 return false;
17576}
79ae11c4 17577
44cd321e
PS
17578/* The function returns true if out_inst sets a value that is
17579 used in the address generation computation of in_insn */
17580static bool
17581set_to_load_agen (rtx out_insn, rtx in_insn)
17582{
17583 rtx out_set, in_set;
17584
17585 /* For performance reasons, only handle the simple case where
17586 both loads are a single_set. */
17587 out_set = single_set (out_insn);
17588 if (out_set)
17589 {
17590 in_set = single_set (in_insn);
17591 if (in_set)
17592 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
17593 }
17594
17595 return false;
17596}
17597
17598/* The function returns true if the target storage location of
17599 out_insn is adjacent to the target storage location of in_insn */
17600/* Return 1 if memory locations are adjacent. */
17601
17602static bool
17603adjacent_mem_locations (rtx insn1, rtx insn2)
17604{
17605
e3a0e200
PB
17606 rtx a = get_store_dest (PATTERN (insn1));
17607 rtx b = get_store_dest (PATTERN (insn2));
17608
44cd321e
PS
17609 if ((GET_CODE (XEXP (a, 0)) == REG
17610 || (GET_CODE (XEXP (a, 0)) == PLUS
17611 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
17612 && (GET_CODE (XEXP (b, 0)) == REG
17613 || (GET_CODE (XEXP (b, 0)) == PLUS
17614 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
17615 {
17616 HOST_WIDE_INT val0 = 0, val1 = 0;
17617 rtx reg0, reg1;
17618 int val_diff;
17619
17620 if (GET_CODE (XEXP (a, 0)) == PLUS)
17621 {
17622 reg0 = XEXP (XEXP (a, 0), 0);
17623 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
17624 }
17625 else
17626 reg0 = XEXP (a, 0);
17627
17628 if (GET_CODE (XEXP (b, 0)) == PLUS)
17629 {
17630 reg1 = XEXP (XEXP (b, 0), 0);
17631 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
17632 }
17633 else
17634 reg1 = XEXP (b, 0);
17635
17636 val_diff = val1 - val0;
17637
17638 return ((REGNO (reg0) == REGNO (reg1))
17639 && (val_diff == INTVAL (MEM_SIZE (a))
17640 || val_diff == -INTVAL (MEM_SIZE (b))));
17641 }
17642
17643 return false;
17644}
17645
a4f6c312 17646/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
17647 priority INSN_PRIORITY (INSN). Increase the priority to execute the
17648 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
17649 define this macro if you do not need to adjust the scheduling
17650 priorities of insns. */
bef84347 17651
c237e94a 17652static int
a2369ed3 17653rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 17654{
a4f6c312
SS
17655 /* On machines (like the 750) which have asymmetric integer units,
17656 where one integer unit can do multiply and divides and the other
17657 can't, reduce the priority of multiply/divide so it is scheduled
17658 before other integer operations. */
bef84347
VM
17659
17660#if 0
2c3c49de 17661 if (! INSN_P (insn))
bef84347
VM
17662 return priority;
17663
17664 if (GET_CODE (PATTERN (insn)) == USE)
17665 return priority;
17666
17667 switch (rs6000_cpu_attr) {
17668 case CPU_PPC750:
17669 switch (get_attr_type (insn))
17670 {
17671 default:
17672 break;
17673
17674 case TYPE_IMUL:
17675 case TYPE_IDIV:
3cb999d8
DE
17676 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
17677 priority, priority);
bef84347
VM
17678 if (priority >= 0 && priority < 0x01000000)
17679 priority >>= 3;
17680 break;
17681 }
17682 }
17683#endif
17684
44cd321e 17685 if (insn_must_be_first_in_group (insn)
79ae11c4 17686 && reload_completed
f676971a 17687 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
17688 && rs6000_sched_restricted_insns_priority)
17689 {
17690
c4ad648e
AM
17691 /* Prioritize insns that can be dispatched only in the first
17692 dispatch slot. */
79ae11c4 17693 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
17694 /* Attach highest priority to insn. This means that in
17695 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 17696 precede 'priority' (critical path) considerations. */
f676971a 17697 return current_sched_info->sched_max_insns_priority;
79ae11c4 17698 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 17699 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
17700 haifa-sched.c:ready_sort(), only 'priority' (critical path)
17701 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
17702 return (priority + 1);
17703 }
79ae11c4 17704
44cd321e
PS
17705 if (rs6000_cpu == PROCESSOR_POWER6
17706 && ((load_store_pendulum == -2 && is_load_insn (insn))
17707 || (load_store_pendulum == 2 && is_store_insn (insn))))
17708 /* Attach highest priority to insn if the scheduler has just issued two
17709 stores and this instruction is a load, or two loads and this instruction
17710 is a store. Power6 wants loads and stores scheduled alternately
17711 when possible */
17712 return current_sched_info->sched_max_insns_priority;
17713
bef84347
VM
17714 return priority;
17715}
17716
d296e02e
AP
17717/* Return true if the instruction is nonpipelined on the Cell. */
17718static bool
17719is_nonpipeline_insn (rtx insn)
17720{
17721 enum attr_type type;
17722 if (!insn || !INSN_P (insn)
17723 || GET_CODE (PATTERN (insn)) == USE
17724 || GET_CODE (PATTERN (insn)) == CLOBBER)
17725 return false;
17726
17727 type = get_attr_type (insn);
17728 if (type == TYPE_IMUL
17729 || type == TYPE_IMUL2
17730 || type == TYPE_IMUL3
17731 || type == TYPE_LMUL
17732 || type == TYPE_IDIV
17733 || type == TYPE_LDIV
17734 || type == TYPE_SDIV
17735 || type == TYPE_DDIV
17736 || type == TYPE_SSQRT
17737 || type == TYPE_DSQRT
17738 || type == TYPE_MFCR
17739 || type == TYPE_MFCRF
17740 || type == TYPE_MFJMPR)
17741 {
17742 return true;
17743 }
17744 return false;
17745}
17746
17747
a4f6c312
SS
17748/* Return how many instructions the machine can issue per cycle. */
17749
c237e94a 17750static int
863d938c 17751rs6000_issue_rate (void)
b6c9286a 17752{
3317bab1
DE
17753 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
17754 if (!reload_completed)
17755 return 1;
17756
b6c9286a 17757 switch (rs6000_cpu_attr) {
3cb999d8
DE
17758 case CPU_RIOS1: /* ? */
17759 case CPU_RS64A:
17760 case CPU_PPC601: /* ? */
ed947a96 17761 case CPU_PPC7450:
3cb999d8 17762 return 3;
b54cf83a 17763 case CPU_PPC440:
b6c9286a 17764 case CPU_PPC603:
bef84347 17765 case CPU_PPC750:
ed947a96 17766 case CPU_PPC7400:
be12c2b0 17767 case CPU_PPC8540:
d296e02e 17768 case CPU_CELL:
f676971a 17769 return 2;
3cb999d8 17770 case CPU_RIOS2:
b6c9286a 17771 case CPU_PPC604:
19684119 17772 case CPU_PPC604E:
b6c9286a 17773 case CPU_PPC620:
3cb999d8 17774 case CPU_PPC630:
b6c9286a 17775 return 4;
cbe26ab8 17776 case CPU_POWER4:
ec507f2d 17777 case CPU_POWER5:
44cd321e 17778 case CPU_POWER6:
cbe26ab8 17779 return 5;
b6c9286a
MM
17780 default:
17781 return 1;
17782 }
17783}
17784
be12c2b0
VM
17785/* Return how many instructions to look ahead for better insn
17786 scheduling. */
17787
17788static int
863d938c 17789rs6000_use_sched_lookahead (void)
be12c2b0
VM
17790{
17791 if (rs6000_cpu_attr == CPU_PPC8540)
17792 return 4;
d296e02e
AP
17793 if (rs6000_cpu_attr == CPU_CELL)
17794 return (reload_completed ? 8 : 0);
be12c2b0
VM
17795 return 0;
17796}
17797
d296e02e
AP
17798/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
17799static int
17800rs6000_use_sched_lookahead_guard (rtx insn)
17801{
17802 if (rs6000_cpu_attr != CPU_CELL)
17803 return 1;
17804
17805 if (insn == NULL_RTX || !INSN_P (insn))
17806 abort ();
982afe02 17807
d296e02e
AP
17808 if (!reload_completed
17809 || is_nonpipeline_insn (insn)
17810 || is_microcoded_insn (insn))
17811 return 0;
17812
17813 return 1;
17814}
17815
569fa502
DN
17816/* Determine is PAT refers to memory. */
17817
17818static bool
17819is_mem_ref (rtx pat)
17820{
17821 const char * fmt;
17822 int i, j;
17823 bool ret = false;
17824
17825 if (GET_CODE (pat) == MEM)
17826 return true;
17827
17828 /* Recursively process the pattern. */
17829 fmt = GET_RTX_FORMAT (GET_CODE (pat));
17830
17831 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
17832 {
17833 if (fmt[i] == 'e')
17834 ret |= is_mem_ref (XEXP (pat, i));
17835 else if (fmt[i] == 'E')
17836 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
17837 ret |= is_mem_ref (XVECEXP (pat, i, j));
17838 }
17839
17840 return ret;
17841}
17842
17843/* Determine if PAT is a PATTERN of a load insn. */
f676971a 17844
569fa502
DN
17845static bool
17846is_load_insn1 (rtx pat)
17847{
17848 if (!pat || pat == NULL_RTX)
17849 return false;
17850
17851 if (GET_CODE (pat) == SET)
17852 return is_mem_ref (SET_SRC (pat));
17853
17854 if (GET_CODE (pat) == PARALLEL)
17855 {
17856 int i;
17857
17858 for (i = 0; i < XVECLEN (pat, 0); i++)
17859 if (is_load_insn1 (XVECEXP (pat, 0, i)))
17860 return true;
17861 }
17862
17863 return false;
17864}
17865
17866/* Determine if INSN loads from memory. */
17867
17868static bool
17869is_load_insn (rtx insn)
17870{
17871 if (!insn || !INSN_P (insn))
17872 return false;
17873
17874 if (GET_CODE (insn) == CALL_INSN)
17875 return false;
17876
17877 return is_load_insn1 (PATTERN (insn));
17878}
17879
17880/* Determine if PAT is a PATTERN of a store insn. */
17881
17882static bool
17883is_store_insn1 (rtx pat)
17884{
17885 if (!pat || pat == NULL_RTX)
17886 return false;
17887
17888 if (GET_CODE (pat) == SET)
17889 return is_mem_ref (SET_DEST (pat));
17890
17891 if (GET_CODE (pat) == PARALLEL)
17892 {
17893 int i;
17894
17895 for (i = 0; i < XVECLEN (pat, 0); i++)
17896 if (is_store_insn1 (XVECEXP (pat, 0, i)))
17897 return true;
17898 }
17899
17900 return false;
17901}
17902
17903/* Determine if INSN stores to memory. */
17904
17905static bool
17906is_store_insn (rtx insn)
17907{
17908 if (!insn || !INSN_P (insn))
17909 return false;
17910
17911 return is_store_insn1 (PATTERN (insn));
17912}
17913
e3a0e200
PB
17914/* Return the dest of a store insn. */
17915
17916static rtx
17917get_store_dest (rtx pat)
17918{
17919 gcc_assert (is_store_insn1 (pat));
17920
17921 if (GET_CODE (pat) == SET)
17922 return SET_DEST (pat);
17923 else if (GET_CODE (pat) == PARALLEL)
17924 {
17925 int i;
17926
17927 for (i = 0; i < XVECLEN (pat, 0); i++)
17928 {
17929 rtx inner_pat = XVECEXP (pat, 0, i);
17930 if (GET_CODE (inner_pat) == SET
17931 && is_mem_ref (SET_DEST (inner_pat)))
17932 return inner_pat;
17933 }
17934 }
17935 /* We shouldn't get here, because we should have either a simple
17936 store insn or a store with update which are covered above. */
17937 gcc_unreachable();
17938}
17939
569fa502
DN
17940/* Returns whether the dependence between INSN and NEXT is considered
17941 costly by the given target. */
17942
17943static bool
b198261f 17944rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 17945{
b198261f
MK
17946 rtx insn;
17947 rtx next;
17948
aabcd309 17949 /* If the flag is not enabled - no dependence is considered costly;
f676971a 17950 allow all dependent insns in the same group.
569fa502
DN
17951 This is the most aggressive option. */
17952 if (rs6000_sched_costly_dep == no_dep_costly)
17953 return false;
17954
f676971a 17955 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
17956 do not allow dependent instructions in the same group.
17957 This is the most conservative option. */
17958 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 17959 return true;
569fa502 17960
b198261f
MK
17961 insn = DEP_PRO (dep);
17962 next = DEP_CON (dep);
17963
f676971a
EC
17964 if (rs6000_sched_costly_dep == store_to_load_dep_costly
17965 && is_load_insn (next)
569fa502
DN
17966 && is_store_insn (insn))
17967 /* Prevent load after store in the same group. */
17968 return true;
17969
17970 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 17971 && is_load_insn (next)
569fa502 17972 && is_store_insn (insn)
e2f6ff94 17973 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
17974 /* Prevent load after store in the same group if it is a true
17975 dependence. */
569fa502 17976 return true;
f676971a
EC
17977
17978 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
17979 and will not be scheduled in the same group. */
17980 if (rs6000_sched_costly_dep <= max_dep_latency
17981 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
17982 return true;
17983
17984 return false;
17985}
17986
f676971a 17987/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
17988 skipping any "non-active" insns - insns that will not actually occupy
17989 an issue slot. Return NULL_RTX if such an insn is not found. */
17990
17991static rtx
17992get_next_active_insn (rtx insn, rtx tail)
17993{
f489aff8 17994 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
17995 return NULL_RTX;
17996
f489aff8 17997 while (1)
cbe26ab8 17998 {
f489aff8
AM
17999 insn = NEXT_INSN (insn);
18000 if (insn == NULL_RTX || insn == tail)
18001 return NULL_RTX;
cbe26ab8 18002
f489aff8
AM
18003 if (CALL_P (insn)
18004 || JUMP_P (insn)
18005 || (NONJUMP_INSN_P (insn)
18006 && GET_CODE (PATTERN (insn)) != USE
18007 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 18008 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
18009 break;
18010 }
18011 return insn;
cbe26ab8
DN
18012}
18013
44cd321e
PS
18014/* We are about to begin issuing insns for this clock cycle. */
18015
18016static int
18017rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
18018 rtx *ready ATTRIBUTE_UNUSED,
18019 int *pn_ready ATTRIBUTE_UNUSED,
18020 int clock_var ATTRIBUTE_UNUSED)
18021{
d296e02e
AP
18022 int n_ready = *pn_ready;
18023
44cd321e
PS
18024 if (sched_verbose)
18025 fprintf (dump, "// rs6000_sched_reorder :\n");
18026
d296e02e
AP
18027 /* Reorder the ready list, if the second to last ready insn
18028 is a nonepipeline insn. */
18029 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
18030 {
18031 if (is_nonpipeline_insn (ready[n_ready - 1])
18032 && (recog_memoized (ready[n_ready - 2]) > 0))
18033 /* Simply swap first two insns. */
18034 {
18035 rtx tmp = ready[n_ready - 1];
18036 ready[n_ready - 1] = ready[n_ready - 2];
18037 ready[n_ready - 2] = tmp;
18038 }
18039 }
18040
44cd321e
PS
18041 if (rs6000_cpu == PROCESSOR_POWER6)
18042 load_store_pendulum = 0;
18043
18044 return rs6000_issue_rate ();
18045}
18046
18047/* Like rs6000_sched_reorder, but called after issuing each insn. */
18048
18049static int
18050rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18051 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18052{
18053 if (sched_verbose)
18054 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18055
18056 /* For Power6, we need to handle some special cases to try and keep the
18057 store queue from overflowing and triggering expensive flushes.
18058
18059 This code monitors how load and store instructions are being issued
18060 and skews the ready list one way or the other to increase the likelihood
18061 that a desired instruction is issued at the proper time.
18062
18063 A couple of things are done. First, we maintain a "load_store_pendulum"
18064 to track the current state of load/store issue.
18065
18066 - If the pendulum is at zero, then no loads or stores have been
18067 issued in the current cycle so we do nothing.
18068
18069 - If the pendulum is 1, then a single load has been issued in this
18070 cycle and we attempt to locate another load in the ready list to
18071 issue with it.
18072
2f8e468b 18073 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18074 issued in this cycle, so we increase the priority of the first load
18075 in the ready list to increase it's likelihood of being chosen first
18076 in the next cycle.
18077
18078 - If the pendulum is -1, then a single store has been issued in this
18079 cycle and we attempt to locate another store in the ready list to
18080 issue with it, preferring a store to an adjacent memory location to
18081 facilitate store pairing in the store queue.
18082
18083 - If the pendulum is 2, then two loads have already been
18084 issued in this cycle, so we increase the priority of the first store
18085 in the ready list to increase it's likelihood of being chosen first
18086 in the next cycle.
18087
18088 - If the pendulum < -2 or > 2, then do nothing.
18089
18090 Note: This code covers the most common scenarios. There exist non
18091 load/store instructions which make use of the LSU and which
18092 would need to be accounted for to strictly model the behavior
18093 of the machine. Those instructions are currently unaccounted
18094 for to help minimize compile time overhead of this code.
18095 */
18096 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18097 {
18098 int pos;
18099 int i;
18100 rtx tmp;
18101
18102 if (is_store_insn (last_scheduled_insn))
18103 /* Issuing a store, swing the load_store_pendulum to the left */
18104 load_store_pendulum--;
18105 else if (is_load_insn (last_scheduled_insn))
18106 /* Issuing a load, swing the load_store_pendulum to the right */
18107 load_store_pendulum++;
18108 else
18109 return cached_can_issue_more;
18110
18111 /* If the pendulum is balanced, or there is only one instruction on
18112 the ready list, then all is well, so return. */
18113 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18114 return cached_can_issue_more;
18115
18116 if (load_store_pendulum == 1)
18117 {
18118 /* A load has been issued in this cycle. Scan the ready list
18119 for another load to issue with it */
18120 pos = *pn_ready-1;
18121
18122 while (pos >= 0)
18123 {
18124 if (is_load_insn (ready[pos]))
18125 {
18126 /* Found a load. Move it to the head of the ready list,
18127 and adjust it's priority so that it is more likely to
18128 stay there */
18129 tmp = ready[pos];
18130 for (i=pos; i<*pn_ready-1; i++)
18131 ready[i] = ready[i + 1];
18132 ready[*pn_ready-1] = tmp;
18133 if INSN_PRIORITY_KNOWN (tmp)
18134 INSN_PRIORITY (tmp)++;
18135 break;
18136 }
18137 pos--;
18138 }
18139 }
18140 else if (load_store_pendulum == -2)
18141 {
18142 /* Two stores have been issued in this cycle. Increase the
18143 priority of the first load in the ready list to favor it for
18144 issuing in the next cycle. */
18145 pos = *pn_ready-1;
18146
18147 while (pos >= 0)
18148 {
18149 if (is_load_insn (ready[pos])
18150 && INSN_PRIORITY_KNOWN (ready[pos]))
18151 {
18152 INSN_PRIORITY (ready[pos])++;
18153
18154 /* Adjust the pendulum to account for the fact that a load
18155 was found and increased in priority. This is to prevent
18156 increasing the priority of multiple loads */
18157 load_store_pendulum--;
18158
18159 break;
18160 }
18161 pos--;
18162 }
18163 }
18164 else if (load_store_pendulum == -1)
18165 {
18166 /* A store has been issued in this cycle. Scan the ready list for
18167 another store to issue with it, preferring a store to an adjacent
18168 memory location */
18169 int first_store_pos = -1;
18170
18171 pos = *pn_ready-1;
18172
18173 while (pos >= 0)
18174 {
18175 if (is_store_insn (ready[pos]))
18176 {
18177 /* Maintain the index of the first store found on the
18178 list */
18179 if (first_store_pos == -1)
18180 first_store_pos = pos;
18181
18182 if (is_store_insn (last_scheduled_insn)
18183 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18184 {
18185 /* Found an adjacent store. Move it to the head of the
18186 ready list, and adjust it's priority so that it is
18187 more likely to stay there */
18188 tmp = ready[pos];
18189 for (i=pos; i<*pn_ready-1; i++)
18190 ready[i] = ready[i + 1];
18191 ready[*pn_ready-1] = tmp;
18192 if INSN_PRIORITY_KNOWN (tmp)
18193 INSN_PRIORITY (tmp)++;
18194 first_store_pos = -1;
18195
18196 break;
18197 };
18198 }
18199 pos--;
18200 }
18201
18202 if (first_store_pos >= 0)
18203 {
18204 /* An adjacent store wasn't found, but a non-adjacent store was,
18205 so move the non-adjacent store to the front of the ready
18206 list, and adjust its priority so that it is more likely to
18207 stay there. */
18208 tmp = ready[first_store_pos];
18209 for (i=first_store_pos; i<*pn_ready-1; i++)
18210 ready[i] = ready[i + 1];
18211 ready[*pn_ready-1] = tmp;
18212 if INSN_PRIORITY_KNOWN (tmp)
18213 INSN_PRIORITY (tmp)++;
18214 }
18215 }
18216 else if (load_store_pendulum == 2)
18217 {
18218 /* Two loads have been issued in this cycle. Increase the priority
18219 of the first store in the ready list to favor it for issuing in
18220 the next cycle. */
18221 pos = *pn_ready-1;
18222
18223 while (pos >= 0)
18224 {
18225 if (is_store_insn (ready[pos])
18226 && INSN_PRIORITY_KNOWN (ready[pos]))
18227 {
18228 INSN_PRIORITY (ready[pos])++;
18229
18230 /* Adjust the pendulum to account for the fact that a store
18231 was found and increased in priority. This is to prevent
18232 increasing the priority of multiple stores */
18233 load_store_pendulum++;
18234
18235 break;
18236 }
18237 pos--;
18238 }
18239 }
18240 }
18241
18242 return cached_can_issue_more;
18243}
18244
839a4992 18245/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18246 of group WHICH_GROUP.
18247
18248 If WHICH_GROUP == current_group, this function will return true if INSN
18249 causes the termination of the current group (i.e, the dispatch group to
18250 which INSN belongs). This means that INSN will be the last insn in the
18251 group it belongs to.
18252
18253 If WHICH_GROUP == previous_group, this function will return true if INSN
18254 causes the termination of the previous group (i.e, the dispatch group that
18255 precedes the group to which INSN belongs). This means that INSN will be
18256 the first insn in the group it belongs to). */
18257
18258static bool
18259insn_terminates_group_p (rtx insn, enum group_termination which_group)
18260{
44cd321e 18261 bool first, last;
cbe26ab8
DN
18262
18263 if (! insn)
18264 return false;
569fa502 18265
44cd321e
PS
18266 first = insn_must_be_first_in_group (insn);
18267 last = insn_must_be_last_in_group (insn);
cbe26ab8 18268
44cd321e 18269 if (first && last)
cbe26ab8
DN
18270 return true;
18271
18272 if (which_group == current_group)
44cd321e 18273 return last;
cbe26ab8 18274 else if (which_group == previous_group)
44cd321e
PS
18275 return first;
18276
18277 return false;
18278}
18279
18280
18281static bool
18282insn_must_be_first_in_group (rtx insn)
18283{
18284 enum attr_type type;
18285
18286 if (!insn
18287 || insn == NULL_RTX
18288 || GET_CODE (insn) == NOTE
18289 || GET_CODE (PATTERN (insn)) == USE
18290 || GET_CODE (PATTERN (insn)) == CLOBBER)
18291 return false;
18292
18293 switch (rs6000_cpu)
cbe26ab8 18294 {
44cd321e
PS
18295 case PROCESSOR_POWER5:
18296 if (is_cracked_insn (insn))
18297 return true;
18298 case PROCESSOR_POWER4:
18299 if (is_microcoded_insn (insn))
18300 return true;
18301
18302 if (!rs6000_sched_groups)
18303 return false;
18304
18305 type = get_attr_type (insn);
18306
18307 switch (type)
18308 {
18309 case TYPE_MFCR:
18310 case TYPE_MFCRF:
18311 case TYPE_MTCR:
18312 case TYPE_DELAYED_CR:
18313 case TYPE_CR_LOGICAL:
18314 case TYPE_MTJMPR:
18315 case TYPE_MFJMPR:
18316 case TYPE_IDIV:
18317 case TYPE_LDIV:
18318 case TYPE_LOAD_L:
18319 case TYPE_STORE_C:
18320 case TYPE_ISYNC:
18321 case TYPE_SYNC:
18322 return true;
18323 default:
18324 break;
18325 }
18326 break;
18327 case PROCESSOR_POWER6:
18328 type = get_attr_type (insn);
18329
18330 switch (type)
18331 {
18332 case TYPE_INSERT_DWORD:
18333 case TYPE_EXTS:
18334 case TYPE_CNTLZ:
18335 case TYPE_SHIFT:
18336 case TYPE_VAR_SHIFT_ROTATE:
18337 case TYPE_TRAP:
18338 case TYPE_IMUL:
18339 case TYPE_IMUL2:
18340 case TYPE_IMUL3:
18341 case TYPE_LMUL:
18342 case TYPE_IDIV:
18343 case TYPE_INSERT_WORD:
18344 case TYPE_DELAYED_COMPARE:
18345 case TYPE_IMUL_COMPARE:
18346 case TYPE_LMUL_COMPARE:
18347 case TYPE_FPCOMPARE:
18348 case TYPE_MFCR:
18349 case TYPE_MTCR:
18350 case TYPE_MFJMPR:
18351 case TYPE_MTJMPR:
18352 case TYPE_ISYNC:
18353 case TYPE_SYNC:
18354 case TYPE_LOAD_L:
18355 case TYPE_STORE_C:
18356 case TYPE_LOAD_U:
18357 case TYPE_LOAD_UX:
18358 case TYPE_LOAD_EXT_UX:
18359 case TYPE_STORE_U:
18360 case TYPE_STORE_UX:
18361 case TYPE_FPLOAD_U:
18362 case TYPE_FPLOAD_UX:
18363 case TYPE_FPSTORE_U:
18364 case TYPE_FPSTORE_UX:
18365 return true;
18366 default:
18367 break;
18368 }
18369 break;
18370 default:
18371 break;
18372 }
18373
18374 return false;
18375}
18376
18377static bool
18378insn_must_be_last_in_group (rtx insn)
18379{
18380 enum attr_type type;
18381
18382 if (!insn
18383 || insn == NULL_RTX
18384 || GET_CODE (insn) == NOTE
18385 || GET_CODE (PATTERN (insn)) == USE
18386 || GET_CODE (PATTERN (insn)) == CLOBBER)
18387 return false;
18388
18389 switch (rs6000_cpu) {
18390 case PROCESSOR_POWER4:
18391 case PROCESSOR_POWER5:
18392 if (is_microcoded_insn (insn))
18393 return true;
18394
18395 if (is_branch_slot_insn (insn))
18396 return true;
18397
18398 break;
18399 case PROCESSOR_POWER6:
18400 type = get_attr_type (insn);
18401
18402 switch (type)
18403 {
18404 case TYPE_EXTS:
18405 case TYPE_CNTLZ:
18406 case TYPE_SHIFT:
18407 case TYPE_VAR_SHIFT_ROTATE:
18408 case TYPE_TRAP:
18409 case TYPE_IMUL:
18410 case TYPE_IMUL2:
18411 case TYPE_IMUL3:
18412 case TYPE_LMUL:
18413 case TYPE_IDIV:
18414 case TYPE_DELAYED_COMPARE:
18415 case TYPE_IMUL_COMPARE:
18416 case TYPE_LMUL_COMPARE:
18417 case TYPE_FPCOMPARE:
18418 case TYPE_MFCR:
18419 case TYPE_MTCR:
18420 case TYPE_MFJMPR:
18421 case TYPE_MTJMPR:
18422 case TYPE_ISYNC:
18423 case TYPE_SYNC:
18424 case TYPE_LOAD_L:
18425 case TYPE_STORE_C:
18426 return true;
18427 default:
18428 break;
cbe26ab8 18429 }
44cd321e
PS
18430 break;
18431 default:
18432 break;
18433 }
cbe26ab8
DN
18434
18435 return false;
18436}
18437
839a4992 18438/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
18439 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18440
18441static bool
18442is_costly_group (rtx *group_insns, rtx next_insn)
18443{
18444 int i;
cbe26ab8
DN
18445 int issue_rate = rs6000_issue_rate ();
18446
18447 for (i = 0; i < issue_rate; i++)
18448 {
e2f6ff94
MK
18449 sd_iterator_def sd_it;
18450 dep_t dep;
cbe26ab8 18451 rtx insn = group_insns[i];
b198261f 18452
cbe26ab8 18453 if (!insn)
c4ad648e 18454 continue;
b198261f 18455
e2f6ff94 18456 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 18457 {
b198261f
MK
18458 rtx next = DEP_CON (dep);
18459
18460 if (next == next_insn
18461 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
18462 return true;
c4ad648e 18463 }
cbe26ab8
DN
18464 }
18465
18466 return false;
18467}
18468
f676971a 18469/* Utility of the function redefine_groups.
cbe26ab8
DN
18470 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
18471 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
18472 to keep it "far" (in a separate group) from GROUP_INSNS, following
18473 one of the following schemes, depending on the value of the flag
18474 -minsert_sched_nops = X:
18475 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 18476 in order to force NEXT_INSN into a separate group.
f676971a
EC
18477 (2) X < sched_finish_regroup_exact: insert exactly X nops.
18478 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
18479 insertion (has a group just ended, how many vacant issue slots remain in the
18480 last group, and how many dispatch groups were encountered so far). */
18481
f676971a 18482static int
c4ad648e
AM
18483force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
18484 rtx next_insn, bool *group_end, int can_issue_more,
18485 int *group_count)
cbe26ab8
DN
18486{
18487 rtx nop;
18488 bool force;
18489 int issue_rate = rs6000_issue_rate ();
18490 bool end = *group_end;
18491 int i;
18492
18493 if (next_insn == NULL_RTX)
18494 return can_issue_more;
18495
18496 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
18497 return can_issue_more;
18498
18499 force = is_costly_group (group_insns, next_insn);
18500 if (!force)
18501 return can_issue_more;
18502
18503 if (sched_verbose > 6)
18504 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 18505 *group_count ,can_issue_more);
cbe26ab8
DN
18506
18507 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
18508 {
18509 if (*group_end)
c4ad648e 18510 can_issue_more = 0;
cbe26ab8
DN
18511
18512 /* Since only a branch can be issued in the last issue_slot, it is
18513 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
18514 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
18515 in this case the last nop will start a new group and the branch
18516 will be forced to the new group. */
cbe26ab8 18517 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 18518 can_issue_more--;
cbe26ab8
DN
18519
18520 while (can_issue_more > 0)
c4ad648e 18521 {
9390387d 18522 nop = gen_nop ();
c4ad648e
AM
18523 emit_insn_before (nop, next_insn);
18524 can_issue_more--;
18525 }
cbe26ab8
DN
18526
18527 *group_end = true;
18528 return 0;
f676971a 18529 }
cbe26ab8
DN
18530
18531 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
18532 {
18533 int n_nops = rs6000_sched_insert_nops;
18534
f676971a 18535 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 18536 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 18537 if (can_issue_more == 0)
c4ad648e 18538 can_issue_more = issue_rate;
cbe26ab8
DN
18539 can_issue_more--;
18540 if (can_issue_more == 0)
c4ad648e
AM
18541 {
18542 can_issue_more = issue_rate - 1;
18543 (*group_count)++;
18544 end = true;
18545 for (i = 0; i < issue_rate; i++)
18546 {
18547 group_insns[i] = 0;
18548 }
18549 }
cbe26ab8
DN
18550
18551 while (n_nops > 0)
c4ad648e
AM
18552 {
18553 nop = gen_nop ();
18554 emit_insn_before (nop, next_insn);
18555 if (can_issue_more == issue_rate - 1) /* new group begins */
18556 end = false;
18557 can_issue_more--;
18558 if (can_issue_more == 0)
18559 {
18560 can_issue_more = issue_rate - 1;
18561 (*group_count)++;
18562 end = true;
18563 for (i = 0; i < issue_rate; i++)
18564 {
18565 group_insns[i] = 0;
18566 }
18567 }
18568 n_nops--;
18569 }
cbe26ab8
DN
18570
18571 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 18572 can_issue_more++;
cbe26ab8 18573
c4ad648e
AM
18574 /* Is next_insn going to start a new group? */
18575 *group_end
18576 = (end
cbe26ab8
DN
18577 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18578 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18579 || (can_issue_more < issue_rate &&
c4ad648e 18580 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18581 if (*group_end && end)
c4ad648e 18582 (*group_count)--;
cbe26ab8
DN
18583
18584 if (sched_verbose > 6)
c4ad648e
AM
18585 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
18586 *group_count, can_issue_more);
f676971a
EC
18587 return can_issue_more;
18588 }
cbe26ab8
DN
18589
18590 return can_issue_more;
18591}
18592
18593/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 18594 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
18595 form in practice. It tries to achieve this synchronization by forcing the
18596 estimated processor grouping on the compiler (as opposed to the function
18597 'pad_goups' which tries to force the scheduler's grouping on the processor).
18598
18599 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
18600 examines the (estimated) dispatch groups that will be formed by the processor
18601 dispatcher. It marks these group boundaries to reflect the estimated
18602 processor grouping, overriding the grouping that the scheduler had marked.
18603 Depending on the value of the flag '-minsert-sched-nops' this function can
18604 force certain insns into separate groups or force a certain distance between
18605 them by inserting nops, for example, if there exists a "costly dependence"
18606 between the insns.
18607
18608 The function estimates the group boundaries that the processor will form as
0fa2e4df 18609 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
18610 each insn. A subsequent insn will start a new group if one of the following
18611 4 cases applies:
18612 - no more vacant issue slots remain in the current dispatch group.
18613 - only the last issue slot, which is the branch slot, is vacant, but the next
18614 insn is not a branch.
18615 - only the last 2 or less issue slots, including the branch slot, are vacant,
18616 which means that a cracked insn (which occupies two issue slots) can't be
18617 issued in this group.
f676971a 18618 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
18619 start a new group. */
18620
18621static int
18622redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
18623{
18624 rtx insn, next_insn;
18625 int issue_rate;
18626 int can_issue_more;
18627 int slot, i;
18628 bool group_end;
18629 int group_count = 0;
18630 rtx *group_insns;
18631
18632 /* Initialize. */
18633 issue_rate = rs6000_issue_rate ();
18634 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 18635 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
18636 {
18637 group_insns[i] = 0;
18638 }
18639 can_issue_more = issue_rate;
18640 slot = 0;
18641 insn = get_next_active_insn (prev_head_insn, tail);
18642 group_end = false;
18643
18644 while (insn != NULL_RTX)
18645 {
18646 slot = (issue_rate - can_issue_more);
18647 group_insns[slot] = insn;
18648 can_issue_more =
c4ad648e 18649 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 18650 if (insn_terminates_group_p (insn, current_group))
c4ad648e 18651 can_issue_more = 0;
cbe26ab8
DN
18652
18653 next_insn = get_next_active_insn (insn, tail);
18654 if (next_insn == NULL_RTX)
c4ad648e 18655 return group_count + 1;
cbe26ab8 18656
c4ad648e
AM
18657 /* Is next_insn going to start a new group? */
18658 group_end
18659 = (can_issue_more == 0
18660 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18661 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18662 || (can_issue_more < issue_rate &&
18663 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18664
f676971a 18665 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
18666 next_insn, &group_end, can_issue_more,
18667 &group_count);
cbe26ab8
DN
18668
18669 if (group_end)
c4ad648e
AM
18670 {
18671 group_count++;
18672 can_issue_more = 0;
18673 for (i = 0; i < issue_rate; i++)
18674 {
18675 group_insns[i] = 0;
18676 }
18677 }
cbe26ab8
DN
18678
18679 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 18680 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 18681 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 18682 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
18683
18684 insn = next_insn;
18685 if (can_issue_more == 0)
c4ad648e
AM
18686 can_issue_more = issue_rate;
18687 } /* while */
cbe26ab8
DN
18688
18689 return group_count;
18690}
18691
18692/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
18693 dispatch group boundaries that the scheduler had marked. Pad with nops
18694 any dispatch groups which have vacant issue slots, in order to force the
18695 scheduler's grouping on the processor dispatcher. The function
18696 returns the number of dispatch groups found. */
18697
18698static int
18699pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
18700{
18701 rtx insn, next_insn;
18702 rtx nop;
18703 int issue_rate;
18704 int can_issue_more;
18705 int group_end;
18706 int group_count = 0;
18707
18708 /* Initialize issue_rate. */
18709 issue_rate = rs6000_issue_rate ();
18710 can_issue_more = issue_rate;
18711
18712 insn = get_next_active_insn (prev_head_insn, tail);
18713 next_insn = get_next_active_insn (insn, tail);
18714
18715 while (insn != NULL_RTX)
18716 {
18717 can_issue_more =
18718 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
18719
18720 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
18721
18722 if (next_insn == NULL_RTX)
c4ad648e 18723 break;
cbe26ab8
DN
18724
18725 if (group_end)
c4ad648e
AM
18726 {
18727 /* If the scheduler had marked group termination at this location
18728 (between insn and next_indn), and neither insn nor next_insn will
18729 force group termination, pad the group with nops to force group
18730 termination. */
18731 if (can_issue_more
18732 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
18733 && !insn_terminates_group_p (insn, current_group)
18734 && !insn_terminates_group_p (next_insn, previous_group))
18735 {
9390387d 18736 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
18737 can_issue_more--;
18738
18739 while (can_issue_more)
18740 {
18741 nop = gen_nop ();
18742 emit_insn_before (nop, next_insn);
18743 can_issue_more--;
18744 }
18745 }
18746
18747 can_issue_more = issue_rate;
18748 group_count++;
18749 }
cbe26ab8
DN
18750
18751 insn = next_insn;
18752 next_insn = get_next_active_insn (insn, tail);
18753 }
18754
18755 return group_count;
18756}
18757
44cd321e
PS
18758/* We're beginning a new block. Initialize data structures as necessary. */
18759
18760static void
18761rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
18762 int sched_verbose ATTRIBUTE_UNUSED,
18763 int max_ready ATTRIBUTE_UNUSED)
982afe02 18764{
44cd321e
PS
18765 last_scheduled_insn = NULL_RTX;
18766 load_store_pendulum = 0;
18767}
18768
cbe26ab8
DN
18769/* The following function is called at the end of scheduling BB.
18770 After reload, it inserts nops at insn group bundling. */
18771
18772static void
38f391a5 18773rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
18774{
18775 int n_groups;
18776
18777 if (sched_verbose)
18778 fprintf (dump, "=== Finishing schedule.\n");
18779
ec507f2d 18780 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
18781 {
18782 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 18783 return;
cbe26ab8
DN
18784
18785 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
18786 n_groups = pad_groups (dump, sched_verbose,
18787 current_sched_info->prev_head,
18788 current_sched_info->next_tail);
cbe26ab8 18789 else
c4ad648e
AM
18790 n_groups = redefine_groups (dump, sched_verbose,
18791 current_sched_info->prev_head,
18792 current_sched_info->next_tail);
cbe26ab8
DN
18793
18794 if (sched_verbose >= 6)
18795 {
18796 fprintf (dump, "ngroups = %d\n", n_groups);
18797 print_rtl (dump, current_sched_info->prev_head);
18798 fprintf (dump, "Done finish_sched\n");
18799 }
18800 }
18801}
b6c9286a 18802\f
b6c9286a
MM
18803/* Length in units of the trampoline for entering a nested function. */
18804
18805int
863d938c 18806rs6000_trampoline_size (void)
b6c9286a
MM
18807{
18808 int ret = 0;
18809
18810 switch (DEFAULT_ABI)
18811 {
18812 default:
37409796 18813 gcc_unreachable ();
b6c9286a
MM
18814
18815 case ABI_AIX:
8f802bfb 18816 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
18817 break;
18818
4dabc42d 18819 case ABI_DARWIN:
b6c9286a 18820 case ABI_V4:
03a7e1a5 18821 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 18822 break;
b6c9286a
MM
18823 }
18824
18825 return ret;
18826}
18827
18828/* Emit RTL insns to initialize the variable parts of a trampoline.
18829 FNADDR is an RTX for the address of the function's pure code.
18830 CXT is an RTX for the static chain value for the function. */
18831
18832void
a2369ed3 18833rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 18834{
8bd04c56 18835 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 18836 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
18837
18838 switch (DEFAULT_ABI)
18839 {
18840 default:
37409796 18841 gcc_unreachable ();
b6c9286a 18842
8bd04c56 18843/* Macros to shorten the code expansions below. */
9613eaff 18844#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 18845#define MEM_PLUS(addr,offset) \
9613eaff 18846 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 18847
b6c9286a
MM
18848 /* Under AIX, just build the 3 word function descriptor */
18849 case ABI_AIX:
8bd04c56 18850 {
9613eaff
SH
18851 rtx fn_reg = gen_reg_rtx (Pmode);
18852 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 18853 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 18854 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
18855 emit_move_insn (MEM_DEREF (addr), fn_reg);
18856 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
18857 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
18858 }
b6c9286a
MM
18859 break;
18860
4dabc42d
TC
18861 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
18862 case ABI_DARWIN:
b6c9286a 18863 case ABI_V4:
9613eaff 18864 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 18865 FALSE, VOIDmode, 4,
9613eaff 18866 addr, Pmode,
eaf1bcf1 18867 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
18868 fnaddr, Pmode,
18869 ctx_reg, Pmode);
b6c9286a 18870 break;
b6c9286a
MM
18871 }
18872
18873 return;
18874}
7509c759
MM
18875
18876\f
91d231cb 18877/* Table of valid machine attributes. */
a4f6c312 18878
91d231cb 18879const struct attribute_spec rs6000_attribute_table[] =
7509c759 18880{
91d231cb 18881 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 18882 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
18883 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
18884 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
18885 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
18886 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
18887#ifdef SUBTARGET_ATTRIBUTE_TABLE
18888 SUBTARGET_ATTRIBUTE_TABLE,
18889#endif
a5c76ee6 18890 { NULL, 0, 0, false, false, false, NULL }
91d231cb 18891};
7509c759 18892
8bb418a3
ZL
18893/* Handle the "altivec" attribute. The attribute may have
18894 arguments as follows:
f676971a 18895
8bb418a3
ZL
18896 __attribute__((altivec(vector__)))
18897 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
18898 __attribute__((altivec(bool__))) (always followed by 'unsigned')
18899
18900 and may appear more than once (e.g., 'vector bool char') in a
18901 given declaration. */
18902
18903static tree
f90ac3f0
UP
18904rs6000_handle_altivec_attribute (tree *node,
18905 tree name ATTRIBUTE_UNUSED,
18906 tree args,
8bb418a3
ZL
18907 int flags ATTRIBUTE_UNUSED,
18908 bool *no_add_attrs)
18909{
18910 tree type = *node, result = NULL_TREE;
18911 enum machine_mode mode;
18912 int unsigned_p;
18913 char altivec_type
18914 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
18915 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
18916 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 18917 : '?');
8bb418a3
ZL
18918
18919 while (POINTER_TYPE_P (type)
18920 || TREE_CODE (type) == FUNCTION_TYPE
18921 || TREE_CODE (type) == METHOD_TYPE
18922 || TREE_CODE (type) == ARRAY_TYPE)
18923 type = TREE_TYPE (type);
18924
18925 mode = TYPE_MODE (type);
18926
f90ac3f0
UP
18927 /* Check for invalid AltiVec type qualifiers. */
18928 if (type == long_unsigned_type_node || type == long_integer_type_node)
18929 {
18930 if (TARGET_64BIT)
18931 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
18932 else if (rs6000_warn_altivec_long)
d4ee4d25 18933 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
18934 }
18935 else if (type == long_long_unsigned_type_node
18936 || type == long_long_integer_type_node)
18937 error ("use of %<long long%> in AltiVec types is invalid");
18938 else if (type == double_type_node)
18939 error ("use of %<double%> in AltiVec types is invalid");
18940 else if (type == long_double_type_node)
18941 error ("use of %<long double%> in AltiVec types is invalid");
18942 else if (type == boolean_type_node)
18943 error ("use of boolean types in AltiVec types is invalid");
18944 else if (TREE_CODE (type) == COMPLEX_TYPE)
18945 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
18946 else if (DECIMAL_FLOAT_MODE_P (mode))
18947 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
18948
18949 switch (altivec_type)
18950 {
18951 case 'v':
8df83eae 18952 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
18953 switch (mode)
18954 {
c4ad648e
AM
18955 case SImode:
18956 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
18957 break;
18958 case HImode:
18959 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
18960 break;
18961 case QImode:
18962 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
18963 break;
18964 case SFmode: result = V4SF_type_node; break;
18965 /* If the user says 'vector int bool', we may be handed the 'bool'
18966 attribute _before_ the 'vector' attribute, and so select the
18967 proper type in the 'b' case below. */
18968 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
18969 result = type;
18970 default: break;
8bb418a3
ZL
18971 }
18972 break;
18973 case 'b':
18974 switch (mode)
18975 {
c4ad648e
AM
18976 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
18977 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
18978 case QImode: case V16QImode: result = bool_V16QI_type_node;
18979 default: break;
8bb418a3
ZL
18980 }
18981 break;
18982 case 'p':
18983 switch (mode)
18984 {
c4ad648e
AM
18985 case V8HImode: result = pixel_V8HI_type_node;
18986 default: break;
8bb418a3
ZL
18987 }
18988 default: break;
18989 }
18990
7958a2a6
FJ
18991 if (result && result != type && TYPE_READONLY (type))
18992 result = build_qualified_type (result, TYPE_QUAL_CONST);
18993
8bb418a3
ZL
18994 *no_add_attrs = true; /* No need to hang on to the attribute. */
18995
f90ac3f0 18996 if (result)
8bb418a3
ZL
18997 *node = reconstruct_complex_type (*node, result);
18998
18999 return NULL_TREE;
19000}
19001
f18eca82
ZL
19002/* AltiVec defines four built-in scalar types that serve as vector
19003 elements; we must teach the compiler how to mangle them. */
19004
19005static const char *
608063c3 19006rs6000_mangle_type (tree type)
f18eca82 19007{
608063c3
JB
19008 type = TYPE_MAIN_VARIANT (type);
19009
19010 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
19011 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
19012 return NULL;
19013
f18eca82
ZL
19014 if (type == bool_char_type_node) return "U6__boolc";
19015 if (type == bool_short_type_node) return "U6__bools";
19016 if (type == pixel_type_node) return "u7__pixel";
19017 if (type == bool_int_type_node) return "U6__booli";
19018
337bde91
DE
19019 /* Mangle IBM extended float long double as `g' (__float128) on
19020 powerpc*-linux where long-double-64 previously was the default. */
19021 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
19022 && TARGET_ELF
19023 && TARGET_LONG_DOUBLE_128
19024 && !TARGET_IEEEQUAD)
19025 return "g";
19026
f18eca82
ZL
19027 /* For all other types, use normal C++ mangling. */
19028 return NULL;
19029}
19030
a5c76ee6
ZW
19031/* Handle a "longcall" or "shortcall" attribute; arguments as in
19032 struct attribute_spec.handler. */
a4f6c312 19033
91d231cb 19034static tree
f676971a
EC
19035rs6000_handle_longcall_attribute (tree *node, tree name,
19036 tree args ATTRIBUTE_UNUSED,
19037 int flags ATTRIBUTE_UNUSED,
a2369ed3 19038 bool *no_add_attrs)
91d231cb
JM
19039{
19040 if (TREE_CODE (*node) != FUNCTION_TYPE
19041 && TREE_CODE (*node) != FIELD_DECL
19042 && TREE_CODE (*node) != TYPE_DECL)
19043 {
5c498b10 19044 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19045 IDENTIFIER_POINTER (name));
19046 *no_add_attrs = true;
19047 }
6a4cee5f 19048
91d231cb 19049 return NULL_TREE;
7509c759
MM
19050}
19051
a5c76ee6
ZW
19052/* Set longcall attributes on all functions declared when
19053 rs6000_default_long_calls is true. */
19054static void
a2369ed3 19055rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19056{
19057 if (rs6000_default_long_calls
19058 && (TREE_CODE (type) == FUNCTION_TYPE
19059 || TREE_CODE (type) == METHOD_TYPE))
19060 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19061 NULL_TREE,
19062 TYPE_ATTRIBUTES (type));
16d6f994
EC
19063
19064#if TARGET_MACHO
19065 darwin_set_default_type_attributes (type);
19066#endif
a5c76ee6
ZW
19067}
19068
3cb999d8
DE
19069/* Return a reference suitable for calling a function with the
19070 longcall attribute. */
a4f6c312 19071
9390387d 19072rtx
a2369ed3 19073rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19074{
d330fd93 19075 const char *call_name;
6a4cee5f
MM
19076 tree node;
19077
19078 if (GET_CODE (call_ref) != SYMBOL_REF)
19079 return call_ref;
19080
19081 /* System V adds '.' to the internal name, so skip them. */
19082 call_name = XSTR (call_ref, 0);
19083 if (*call_name == '.')
19084 {
19085 while (*call_name == '.')
19086 call_name++;
19087
19088 node = get_identifier (call_name);
39403d82 19089 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19090 }
19091
19092 return force_reg (Pmode, call_ref);
19093}
7509c759 19094\f
77ccdfed
EC
19095#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19096#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19097#endif
19098
19099/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19100 struct attribute_spec.handler. */
19101static tree
19102rs6000_handle_struct_attribute (tree *node, tree name,
19103 tree args ATTRIBUTE_UNUSED,
19104 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19105{
19106 tree *type = NULL;
19107 if (DECL_P (*node))
19108 {
19109 if (TREE_CODE (*node) == TYPE_DECL)
19110 type = &TREE_TYPE (*node);
19111 }
19112 else
19113 type = node;
19114
19115 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19116 || TREE_CODE (*type) == UNION_TYPE)))
19117 {
19118 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19119 *no_add_attrs = true;
19120 }
19121
19122 else if ((is_attribute_p ("ms_struct", name)
19123 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19124 || ((is_attribute_p ("gcc_struct", name)
19125 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19126 {
19127 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19128 IDENTIFIER_POINTER (name));
19129 *no_add_attrs = true;
19130 }
19131
19132 return NULL_TREE;
19133}
19134
19135static bool
19136rs6000_ms_bitfield_layout_p (tree record_type)
19137{
19138 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19139 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19140 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19141}
19142\f
b64a1b53
RH
19143#ifdef USING_ELFOS_H
19144
d6b5193b 19145/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19146
d6b5193b
RS
19147static void
19148rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19149{
19150 if (DEFAULT_ABI == ABI_AIX
19151 && TARGET_MINIMAL_TOC
19152 && !TARGET_RELOCATABLE)
19153 {
19154 if (!toc_initialized)
19155 {
19156 toc_initialized = 1;
19157 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19158 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19159 fprintf (asm_out_file, "\t.tc ");
19160 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19161 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19162 fprintf (asm_out_file, "\n");
19163
19164 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19165 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19166 fprintf (asm_out_file, " = .+32768\n");
19167 }
19168 else
19169 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19170 }
19171 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19172 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19173 else
19174 {
19175 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19176 if (!toc_initialized)
19177 {
19178 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19179 fprintf (asm_out_file, " = .+32768\n");
19180 toc_initialized = 1;
19181 }
19182 }
19183}
19184
19185/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19186
b64a1b53 19187static void
d6b5193b
RS
19188rs6000_elf_asm_init_sections (void)
19189{
19190 toc_section
19191 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19192
19193 sdata2_section
19194 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19195 SDATA2_SECTION_ASM_OP);
19196}
19197
19198/* Implement TARGET_SELECT_RTX_SECTION. */
19199
19200static section *
f676971a 19201rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19202 unsigned HOST_WIDE_INT align)
7509c759 19203{
a9098fd0 19204 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19205 return toc_section;
7509c759 19206 else
d6b5193b 19207 return default_elf_select_rtx_section (mode, x, align);
7509c759 19208}
d9407988 19209\f
d1908feb
JJ
19210/* For a SYMBOL_REF, set generic flags and then perform some
19211 target-specific processing.
19212
d1908feb
JJ
19213 When the AIX ABI is requested on a non-AIX system, replace the
19214 function name with the real name (with a leading .) rather than the
19215 function descriptor name. This saves a lot of overriding code to
19216 read the prefixes. */
d9407988 19217
fb49053f 19218static void
a2369ed3 19219rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19220{
d1908feb 19221 default_encode_section_info (decl, rtl, first);
b2003250 19222
d1908feb
JJ
19223 if (first
19224 && TREE_CODE (decl) == FUNCTION_DECL
19225 && !TARGET_AIX
19226 && DEFAULT_ABI == ABI_AIX)
d9407988 19227 {
c6a2438a 19228 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19229 size_t len = strlen (XSTR (sym_ref, 0));
19230 char *str = alloca (len + 2);
19231 str[0] = '.';
19232 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19233 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19234 }
d9407988
MM
19235}
19236
c1b7d95a 19237bool
a2369ed3 19238rs6000_elf_in_small_data_p (tree decl)
0e5dbd9b
DE
19239{
19240 if (rs6000_sdata == SDATA_NONE)
19241 return false;
19242
7482ad25
AF
19243 /* We want to merge strings, so we never consider them small data. */
19244 if (TREE_CODE (decl) == STRING_CST)
19245 return false;
19246
19247 /* Functions are never in the small data area. */
19248 if (TREE_CODE (decl) == FUNCTION_DECL)
19249 return false;
19250
0e5dbd9b
DE
19251 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
19252 {
19253 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
19254 if (strcmp (section, ".sdata") == 0
19255 || strcmp (section, ".sdata2") == 0
20bfcd69
GK
19256 || strcmp (section, ".sbss") == 0
19257 || strcmp (section, ".sbss2") == 0
19258 || strcmp (section, ".PPC.EMB.sdata0") == 0
19259 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19260 return true;
19261 }
19262 else
19263 {
19264 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19265
19266 if (size > 0
307b599c 19267 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19268 /* If it's not public, and we're not going to reference it there,
19269 there's no need to put it in the small data section. */
0e5dbd9b
DE
19270 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19271 return true;
19272 }
19273
19274 return false;
19275}
19276
b91da81f 19277#endif /* USING_ELFOS_H */
aacd3885
RS
19278\f
19279/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19280
aacd3885
RS
19281static bool
19282rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
19283{
19284 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19285}
a6c2a102 19286\f
000034eb 19287/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19288 ADDR can be effectively incremented by incrementing REG.
19289
19290 r0 is special and we must not select it as an address
19291 register by this routine since our caller will try to
19292 increment the returned register via an "la" instruction. */
000034eb 19293
9390387d 19294rtx
a2369ed3 19295find_addr_reg (rtx addr)
000034eb
DE
19296{
19297 while (GET_CODE (addr) == PLUS)
19298 {
02441cd6
JL
19299 if (GET_CODE (XEXP (addr, 0)) == REG
19300 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19301 addr = XEXP (addr, 0);
02441cd6
JL
19302 else if (GET_CODE (XEXP (addr, 1)) == REG
19303 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19304 addr = XEXP (addr, 1);
19305 else if (CONSTANT_P (XEXP (addr, 0)))
19306 addr = XEXP (addr, 1);
19307 else if (CONSTANT_P (XEXP (addr, 1)))
19308 addr = XEXP (addr, 0);
19309 else
37409796 19310 gcc_unreachable ();
000034eb 19311 }
37409796
NS
19312 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19313 return addr;
000034eb
DE
19314}
19315
a6c2a102 19316void
a2369ed3 19317rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19318{
19319 fatal_insn ("bad address", op);
19320}
c8023011 19321
ee890fe2
SS
19322#if TARGET_MACHO
19323
efdba735 19324static tree branch_island_list = 0;
ee890fe2 19325
efdba735
SH
19326/* Remember to generate a branch island for far calls to the given
19327 function. */
ee890fe2 19328
f676971a 19329static void
c4ad648e
AM
19330add_compiler_branch_island (tree label_name, tree function_name,
19331 int line_number)
ee890fe2 19332{
efdba735 19333 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19334 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19335 TREE_CHAIN (branch_island) = branch_island_list;
19336 branch_island_list = branch_island;
ee890fe2
SS
19337}
19338
efdba735
SH
19339#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19340#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19341#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19342 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19343
efdba735
SH
19344/* Generate far-jump branch islands for everything on the
19345 branch_island_list. Invoked immediately after the last instruction
19346 of the epilogue has been emitted; the branch-islands must be
19347 appended to, and contiguous with, the function body. Mach-O stubs
19348 are generated in machopic_output_stub(). */
ee890fe2 19349
efdba735
SH
19350static void
19351macho_branch_islands (void)
19352{
19353 char tmp_buf[512];
19354 tree branch_island;
19355
19356 for (branch_island = branch_island_list;
19357 branch_island;
19358 branch_island = TREE_CHAIN (branch_island))
19359 {
19360 const char *label =
19361 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19362 const char *name =
11abc112 19363 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19364 char name_buf[512];
19365 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19366 if (name[0] == '*' || name[0] == '&')
19367 strcpy (name_buf, name+1);
19368 else
19369 {
19370 name_buf[0] = '_';
19371 strcpy (name_buf+1, name);
19372 }
19373 strcpy (tmp_buf, "\n");
19374 strcat (tmp_buf, label);
ee890fe2 19375#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19376 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19377 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19378#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19379 if (flag_pic)
19380 {
19381 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19382 strcat (tmp_buf, label);
19383 strcat (tmp_buf, "_pic\n");
19384 strcat (tmp_buf, label);
19385 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19386
efdba735
SH
19387 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19388 strcat (tmp_buf, name_buf);
19389 strcat (tmp_buf, " - ");
19390 strcat (tmp_buf, label);
19391 strcat (tmp_buf, "_pic)\n");
f676971a 19392
efdba735 19393 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19394
efdba735
SH
19395 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19396 strcat (tmp_buf, name_buf);
19397 strcat (tmp_buf, " - ");
19398 strcat (tmp_buf, label);
19399 strcat (tmp_buf, "_pic)\n");
f676971a 19400
efdba735
SH
19401 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19402 }
19403 else
19404 {
19405 strcat (tmp_buf, ":\nlis r12,hi16(");
19406 strcat (tmp_buf, name_buf);
19407 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19408 strcat (tmp_buf, name_buf);
19409 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19410 }
19411 output_asm_insn (tmp_buf, 0);
ee890fe2 19412#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19413 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19414 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19415#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19416 }
ee890fe2 19417
efdba735 19418 branch_island_list = 0;
ee890fe2
SS
19419}
19420
19421/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19422 already there or not. */
19423
efdba735 19424static int
a2369ed3 19425no_previous_def (tree function_name)
ee890fe2 19426{
efdba735
SH
19427 tree branch_island;
19428 for (branch_island = branch_island_list;
19429 branch_island;
19430 branch_island = TREE_CHAIN (branch_island))
19431 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
19432 return 0;
19433 return 1;
19434}
19435
19436/* GET_PREV_LABEL gets the label name from the previous definition of
19437 the function. */
19438
efdba735 19439static tree
a2369ed3 19440get_prev_label (tree function_name)
ee890fe2 19441{
efdba735
SH
19442 tree branch_island;
19443 for (branch_island = branch_island_list;
19444 branch_island;
19445 branch_island = TREE_CHAIN (branch_island))
19446 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
19447 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
19448 return 0;
19449}
19450
75b1b789
MS
19451#ifndef DARWIN_LINKER_GENERATES_ISLANDS
19452#define DARWIN_LINKER_GENERATES_ISLANDS 0
19453#endif
19454
19455/* KEXTs still need branch islands. */
19456#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
19457 || flag_mkernel || flag_apple_kext)
19458
ee890fe2 19459/* INSN is either a function call or a millicode call. It may have an
f676971a 19460 unconditional jump in its delay slot.
ee890fe2
SS
19461
19462 CALL_DEST is the routine we are calling. */
19463
19464char *
c4ad648e
AM
19465output_call (rtx insn, rtx *operands, int dest_operand_number,
19466 int cookie_operand_number)
ee890fe2
SS
19467{
19468 static char buf[256];
75b1b789
MS
19469 if (DARWIN_GENERATE_ISLANDS
19470 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 19471 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
19472 {
19473 tree labelname;
efdba735 19474 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 19475
ee890fe2
SS
19476 if (no_previous_def (funname))
19477 {
ee890fe2
SS
19478 rtx label_rtx = gen_label_rtx ();
19479 char *label_buf, temp_buf[256];
19480 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
19481 CODE_LABEL_NUMBER (label_rtx));
19482 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
19483 labelname = get_identifier (label_buf);
a38e7aa5 19484 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
19485 }
19486 else
19487 labelname = get_prev_label (funname);
19488
efdba735
SH
19489 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
19490 instruction will reach 'foo', otherwise link as 'bl L42'".
19491 "L42" should be a 'branch island', that will do a far jump to
19492 'foo'. Branch islands are generated in
19493 macho_branch_islands(). */
ee890fe2 19494 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 19495 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
19496 }
19497 else
efdba735
SH
19498 sprintf (buf, "bl %%z%d", dest_operand_number);
19499 return buf;
ee890fe2
SS
19500}
19501
ee890fe2
SS
19502/* Generate PIC and indirect symbol stubs. */
19503
19504void
a2369ed3 19505machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
19506{
19507 unsigned int length;
a4f6c312
SS
19508 char *symbol_name, *lazy_ptr_name;
19509 char *local_label_0;
ee890fe2
SS
19510 static int label = 0;
19511
df56a27f 19512 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 19513 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 19514
ee890fe2 19515
ee890fe2
SS
19516 length = strlen (symb);
19517 symbol_name = alloca (length + 32);
19518 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
19519
19520 lazy_ptr_name = alloca (length + 32);
19521 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
19522
ee890fe2 19523 if (flag_pic == 2)
56c779bc 19524 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 19525 else
56c779bc 19526 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
19527
19528 if (flag_pic == 2)
19529 {
d974312d
DJ
19530 fprintf (file, "\t.align 5\n");
19531
19532 fprintf (file, "%s:\n", stub);
19533 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19534
876455fa 19535 label++;
89da1f32 19536 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 19537 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 19538
ee890fe2
SS
19539 fprintf (file, "\tmflr r0\n");
19540 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
19541 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
19542 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
19543 lazy_ptr_name, local_label_0);
19544 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
19545 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
19546 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
19547 lazy_ptr_name, local_label_0);
19548 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
19549 fprintf (file, "\tbctr\n");
19550 }
19551 else
d974312d
DJ
19552 {
19553 fprintf (file, "\t.align 4\n");
19554
19555 fprintf (file, "%s:\n", stub);
19556 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19557
19558 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
19559 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
19560 (TARGET_64BIT ? "ldu" : "lwzu"),
19561 lazy_ptr_name);
d974312d
DJ
19562 fprintf (file, "\tmtctr r12\n");
19563 fprintf (file, "\tbctr\n");
19564 }
f676971a 19565
56c779bc 19566 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
19567 fprintf (file, "%s:\n", lazy_ptr_name);
19568 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
19569 fprintf (file, "%sdyld_stub_binding_helper\n",
19570 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
19571}
19572
19573/* Legitimize PIC addresses. If the address is already
19574 position-independent, we return ORIG. Newly generated
19575 position-independent addresses go into a reg. This is REG if non
19576 zero, otherwise we allocate register(s) as necessary. */
19577
4fbbe694 19578#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
19579
19580rtx
f676971a 19581rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 19582 rtx reg)
ee890fe2
SS
19583{
19584 rtx base, offset;
19585
19586 if (reg == NULL && ! reload_in_progress && ! reload_completed)
19587 reg = gen_reg_rtx (Pmode);
19588
19589 if (GET_CODE (orig) == CONST)
19590 {
37409796
NS
19591 rtx reg_temp;
19592
ee890fe2
SS
19593 if (GET_CODE (XEXP (orig, 0)) == PLUS
19594 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
19595 return orig;
19596
37409796 19597 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 19598
37409796
NS
19599 /* Use a different reg for the intermediate value, as
19600 it will be marked UNCHANGING. */
b3a13419 19601 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
19602 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
19603 Pmode, reg_temp);
19604 offset =
19605 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
19606 Pmode, reg);
bb8df8a6 19607
ee890fe2
SS
19608 if (GET_CODE (offset) == CONST_INT)
19609 {
19610 if (SMALL_INT (offset))
ed8908e7 19611 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
19612 else if (! reload_in_progress && ! reload_completed)
19613 offset = force_reg (Pmode, offset);
19614 else
c859cda6
DJ
19615 {
19616 rtx mem = force_const_mem (Pmode, orig);
19617 return machopic_legitimize_pic_address (mem, Pmode, reg);
19618 }
ee890fe2 19619 }
f1c25d3b 19620 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
19621 }
19622
19623 /* Fall back on generic machopic code. */
19624 return machopic_legitimize_pic_address (orig, mode, reg);
19625}
19626
c4e18b1c
GK
19627/* Output a .machine directive for the Darwin assembler, and call
19628 the generic start_file routine. */
19629
19630static void
19631rs6000_darwin_file_start (void)
19632{
94ff898d 19633 static const struct
c4e18b1c
GK
19634 {
19635 const char *arg;
19636 const char *name;
19637 int if_set;
19638 } mapping[] = {
55dbfb48 19639 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
19640 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
19641 { "power4", "ppc970", 0 },
19642 { "G5", "ppc970", 0 },
19643 { "7450", "ppc7450", 0 },
19644 { "7400", "ppc7400", MASK_ALTIVEC },
19645 { "G4", "ppc7400", 0 },
19646 { "750", "ppc750", 0 },
19647 { "740", "ppc750", 0 },
19648 { "G3", "ppc750", 0 },
19649 { "604e", "ppc604e", 0 },
19650 { "604", "ppc604", 0 },
19651 { "603e", "ppc603", 0 },
19652 { "603", "ppc603", 0 },
19653 { "601", "ppc601", 0 },
19654 { NULL, "ppc", 0 } };
19655 const char *cpu_id = "";
19656 size_t i;
94ff898d 19657
9390387d 19658 rs6000_file_start ();
192d0f89 19659 darwin_file_start ();
c4e18b1c
GK
19660
19661 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
19662 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
19663 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
19664 && rs6000_select[i].string[0] != '\0')
19665 cpu_id = rs6000_select[i].string;
19666
19667 /* Look through the mapping array. Pick the first name that either
19668 matches the argument, has a bit set in IF_SET that is also set
19669 in the target flags, or has a NULL name. */
19670
19671 i = 0;
19672 while (mapping[i].arg != NULL
19673 && strcmp (mapping[i].arg, cpu_id) != 0
19674 && (mapping[i].if_set & target_flags) == 0)
19675 i++;
19676
19677 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
19678}
19679
ee890fe2 19680#endif /* TARGET_MACHO */
7c262518
RH
19681
19682#if TARGET_ELF
9b580a0b
RH
19683static int
19684rs6000_elf_reloc_rw_mask (void)
7c262518 19685{
9b580a0b
RH
19686 if (flag_pic)
19687 return 3;
19688 else if (DEFAULT_ABI == ABI_AIX)
19689 return 2;
19690 else
19691 return 0;
7c262518 19692}
d9f6800d
RH
19693
19694/* Record an element in the table of global constructors. SYMBOL is
19695 a SYMBOL_REF of the function to be called; PRIORITY is a number
19696 between 0 and MAX_INIT_PRIORITY.
19697
19698 This differs from default_named_section_asm_out_constructor in
19699 that we have special handling for -mrelocatable. */
19700
19701static void
a2369ed3 19702rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
19703{
19704 const char *section = ".ctors";
19705 char buf[16];
19706
19707 if (priority != DEFAULT_INIT_PRIORITY)
19708 {
19709 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
19710 /* Invert the numbering so the linker puts us in the proper
19711 order; constructors are run from right to left, and the
19712 linker sorts in increasing order. */
19713 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
19714 section = buf;
19715 }
19716
d6b5193b 19717 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 19718 assemble_align (POINTER_SIZE);
d9f6800d
RH
19719
19720 if (TARGET_RELOCATABLE)
19721 {
19722 fputs ("\t.long (", asm_out_file);
19723 output_addr_const (asm_out_file, symbol);
19724 fputs (")@fixup\n", asm_out_file);
19725 }
19726 else
c8af3574 19727 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
19728}
19729
19730static void
a2369ed3 19731rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
19732{
19733 const char *section = ".dtors";
19734 char buf[16];
19735
19736 if (priority != DEFAULT_INIT_PRIORITY)
19737 {
19738 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
19739 /* Invert the numbering so the linker puts us in the proper
19740 order; constructors are run from right to left, and the
19741 linker sorts in increasing order. */
19742 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
19743 section = buf;
19744 }
19745
d6b5193b 19746 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 19747 assemble_align (POINTER_SIZE);
d9f6800d
RH
19748
19749 if (TARGET_RELOCATABLE)
19750 {
19751 fputs ("\t.long (", asm_out_file);
19752 output_addr_const (asm_out_file, symbol);
19753 fputs (")@fixup\n", asm_out_file);
19754 }
19755 else
c8af3574 19756 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 19757}
9739c90c
JJ
19758
19759void
a2369ed3 19760rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
19761{
19762 if (TARGET_64BIT)
19763 {
19764 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
19765 ASM_OUTPUT_LABEL (file, name);
19766 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
19767 rs6000_output_function_entry (file, name);
19768 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
19769 if (DOT_SYMBOLS)
9739c90c 19770 {
85b776df 19771 fputs ("\t.size\t", file);
9739c90c 19772 assemble_name (file, name);
85b776df
AM
19773 fputs (",24\n\t.type\t.", file);
19774 assemble_name (file, name);
19775 fputs (",@function\n", file);
19776 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
19777 {
19778 fputs ("\t.globl\t.", file);
19779 assemble_name (file, name);
19780 putc ('\n', file);
19781 }
9739c90c 19782 }
85b776df
AM
19783 else
19784 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 19785 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
19786 rs6000_output_function_entry (file, name);
19787 fputs (":\n", file);
9739c90c
JJ
19788 return;
19789 }
19790
19791 if (TARGET_RELOCATABLE
7f970b70 19792 && !TARGET_SECURE_PLT
9739c90c 19793 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 19794 && uses_TOC ())
9739c90c
JJ
19795 {
19796 char buf[256];
19797
19798 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
19799
19800 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
19801 fprintf (file, "\t.long ");
19802 assemble_name (file, buf);
19803 putc ('-', file);
19804 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
19805 assemble_name (file, buf);
19806 putc ('\n', file);
19807 }
19808
19809 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
19810 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
19811
19812 if (DEFAULT_ABI == ABI_AIX)
19813 {
19814 const char *desc_name, *orig_name;
19815
19816 orig_name = (*targetm.strip_name_encoding) (name);
19817 desc_name = orig_name;
19818 while (*desc_name == '.')
19819 desc_name++;
19820
19821 if (TREE_PUBLIC (decl))
19822 fprintf (file, "\t.globl %s\n", desc_name);
19823
19824 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19825 fprintf (file, "%s:\n", desc_name);
19826 fprintf (file, "\t.long %s\n", orig_name);
19827 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
19828 if (DEFAULT_ABI == ABI_AIX)
19829 fputs ("\t.long 0\n", file);
19830 fprintf (file, "\t.previous\n");
19831 }
19832 ASM_OUTPUT_LABEL (file, name);
19833}
1334b570
AM
19834
19835static void
19836rs6000_elf_end_indicate_exec_stack (void)
19837{
19838 if (TARGET_32BIT)
19839 file_end_indicate_exec_stack ();
19840}
7c262518
RH
19841#endif
19842
cbaaba19 19843#if TARGET_XCOFF
0d5817b2
DE
19844static void
19845rs6000_xcoff_asm_output_anchor (rtx symbol)
19846{
19847 char buffer[100];
19848
19849 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
19850 SYMBOL_REF_BLOCK_OFFSET (symbol));
19851 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
19852}
19853
7c262518 19854static void
a2369ed3 19855rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
19856{
19857 fputs (GLOBAL_ASM_OP, stream);
19858 RS6000_OUTPUT_BASENAME (stream, name);
19859 putc ('\n', stream);
19860}
19861
d6b5193b
RS
19862/* A get_unnamed_decl callback, used for read-only sections. PTR
19863 points to the section string variable. */
19864
19865static void
19866rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
19867{
19868 fprintf (asm_out_file, "\t.csect %s[RO],3\n",
19869 *(const char *const *) directive);
19870}
19871
19872/* Likewise for read-write sections. */
19873
19874static void
19875rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
19876{
19877 fprintf (asm_out_file, "\t.csect %s[RW],3\n",
19878 *(const char *const *) directive);
19879}
19880
19881/* A get_unnamed_section callback, used for switching to toc_section. */
19882
19883static void
19884rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19885{
19886 if (TARGET_MINIMAL_TOC)
19887 {
19888 /* toc_section is always selected at least once from
19889 rs6000_xcoff_file_start, so this is guaranteed to
19890 always be defined once and only once in each file. */
19891 if (!toc_initialized)
19892 {
19893 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
19894 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
19895 toc_initialized = 1;
19896 }
19897 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
19898 (TARGET_32BIT ? "" : ",3"));
19899 }
19900 else
19901 fputs ("\t.toc\n", asm_out_file);
19902}
19903
19904/* Implement TARGET_ASM_INIT_SECTIONS. */
19905
19906static void
19907rs6000_xcoff_asm_init_sections (void)
19908{
19909 read_only_data_section
19910 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
19911 &xcoff_read_only_section_name);
19912
19913 private_data_section
19914 = get_unnamed_section (SECTION_WRITE,
19915 rs6000_xcoff_output_readwrite_section_asm_op,
19916 &xcoff_private_data_section_name);
19917
19918 read_only_private_data_section
19919 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
19920 &xcoff_private_data_section_name);
19921
19922 toc_section
19923 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
19924
19925 readonly_data_section = read_only_data_section;
19926 exception_section = data_section;
19927}
19928
9b580a0b
RH
19929static int
19930rs6000_xcoff_reloc_rw_mask (void)
19931{
19932 return 3;
19933}
19934
b275d088 19935static void
c18a5b6c
MM
19936rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
19937 tree decl ATTRIBUTE_UNUSED)
7c262518 19938{
0e5dbd9b
DE
19939 int smclass;
19940 static const char * const suffix[3] = { "PR", "RO", "RW" };
19941
19942 if (flags & SECTION_CODE)
19943 smclass = 0;
19944 else if (flags & SECTION_WRITE)
19945 smclass = 2;
19946 else
19947 smclass = 1;
19948
5b5198f7 19949 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 19950 (flags & SECTION_CODE) ? "." : "",
5b5198f7 19951 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 19952}
ae46c4e0 19953
d6b5193b 19954static section *
f676971a 19955rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 19956 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 19957{
9b580a0b 19958 if (decl_readonly_section (decl, reloc))
ae46c4e0 19959 {
0e5dbd9b 19960 if (TREE_PUBLIC (decl))
d6b5193b 19961 return read_only_data_section;
ae46c4e0 19962 else
d6b5193b 19963 return read_only_private_data_section;
ae46c4e0
RH
19964 }
19965 else
19966 {
0e5dbd9b 19967 if (TREE_PUBLIC (decl))
d6b5193b 19968 return data_section;
ae46c4e0 19969 else
d6b5193b 19970 return private_data_section;
ae46c4e0
RH
19971 }
19972}
19973
19974static void
a2369ed3 19975rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
19976{
19977 const char *name;
ae46c4e0 19978
5b5198f7
DE
19979 /* Use select_section for private and uninitialized data. */
19980 if (!TREE_PUBLIC (decl)
19981 || DECL_COMMON (decl)
0e5dbd9b
DE
19982 || DECL_INITIAL (decl) == NULL_TREE
19983 || DECL_INITIAL (decl) == error_mark_node
19984 || (flag_zero_initialized_in_bss
19985 && initializer_zerop (DECL_INITIAL (decl))))
19986 return;
19987
19988 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
19989 name = (*targetm.strip_name_encoding) (name);
19990 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 19991}
b64a1b53 19992
fb49053f
RH
19993/* Select section for constant in constant pool.
19994
19995 On RS/6000, all constants are in the private read-only data area.
19996 However, if this is being placed in the TOC it must be output as a
19997 toc entry. */
19998
d6b5193b 19999static section *
f676971a 20000rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 20001 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
20002{
20003 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20004 return toc_section;
b64a1b53 20005 else
d6b5193b 20006 return read_only_private_data_section;
b64a1b53 20007}
772c5265
RH
20008
20009/* Remove any trailing [DS] or the like from the symbol name. */
20010
20011static const char *
a2369ed3 20012rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
20013{
20014 size_t len;
20015 if (*name == '*')
20016 name++;
20017 len = strlen (name);
20018 if (name[len - 1] == ']')
20019 return ggc_alloc_string (name, len - 4);
20020 else
20021 return name;
20022}
20023
5add3202
DE
20024/* Section attributes. AIX is always PIC. */
20025
20026static unsigned int
a2369ed3 20027rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 20028{
5b5198f7 20029 unsigned int align;
9b580a0b 20030 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
20031
20032 /* Align to at least UNIT size. */
20033 if (flags & SECTION_CODE)
20034 align = MIN_UNITS_PER_WORD;
20035 else
20036 /* Increase alignment of large objects if not already stricter. */
20037 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
20038 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
20039 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
20040
20041 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 20042}
a5fe455b 20043
1bc7c5b6
ZW
20044/* Output at beginning of assembler file.
20045
20046 Initialize the section names for the RS/6000 at this point.
20047
20048 Specify filename, including full path, to assembler.
20049
20050 We want to go into the TOC section so at least one .toc will be emitted.
20051 Also, in order to output proper .bs/.es pairs, we need at least one static
20052 [RW] section emitted.
20053
20054 Finally, declare mcount when profiling to make the assembler happy. */
20055
20056static void
863d938c 20057rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20058{
20059 rs6000_gen_section_name (&xcoff_bss_section_name,
20060 main_input_filename, ".bss_");
20061 rs6000_gen_section_name (&xcoff_private_data_section_name,
20062 main_input_filename, ".rw_");
20063 rs6000_gen_section_name (&xcoff_read_only_section_name,
20064 main_input_filename, ".ro_");
20065
20066 fputs ("\t.file\t", asm_out_file);
20067 output_quoted_string (asm_out_file, main_input_filename);
20068 fputc ('\n', asm_out_file);
1bc7c5b6 20069 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20070 switch_to_section (private_data_section);
20071 switch_to_section (text_section);
1bc7c5b6
ZW
20072 if (profile_flag)
20073 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20074 rs6000_file_start ();
20075}
20076
a5fe455b
ZW
20077/* Output at end of assembler file.
20078 On the RS/6000, referencing data should automatically pull in text. */
20079
20080static void
863d938c 20081rs6000_xcoff_file_end (void)
a5fe455b 20082{
d6b5193b 20083 switch_to_section (text_section);
a5fe455b 20084 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20085 switch_to_section (data_section);
a5fe455b
ZW
20086 fputs (TARGET_32BIT
20087 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20088 asm_out_file);
20089}
f1384257 20090#endif /* TARGET_XCOFF */
0e5dbd9b 20091
3c50106f
RH
20092/* Compute a (partial) cost for rtx X. Return true if the complete
20093 cost has been computed, and false if subexpressions should be
20094 scanned. In either case, *TOTAL contains the cost result. */
20095
20096static bool
1494c534 20097rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20098{
f0517163
RS
20099 enum machine_mode mode = GET_MODE (x);
20100
3c50106f
RH
20101 switch (code)
20102 {
30a555d9 20103 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20104 case CONST_INT:
066cd967
DE
20105 if (((outer_code == SET
20106 || outer_code == PLUS
20107 || outer_code == MINUS)
279bb624
DE
20108 && (satisfies_constraint_I (x)
20109 || satisfies_constraint_L (x)))
066cd967 20110 || (outer_code == AND
279bb624
DE
20111 && (satisfies_constraint_K (x)
20112 || (mode == SImode
20113 ? satisfies_constraint_L (x)
20114 : satisfies_constraint_J (x))
1990cd79
AM
20115 || mask_operand (x, mode)
20116 || (mode == DImode
20117 && mask64_operand (x, DImode))))
22e54023 20118 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20119 && (satisfies_constraint_K (x)
20120 || (mode == SImode
20121 ? satisfies_constraint_L (x)
20122 : satisfies_constraint_J (x))))
066cd967
DE
20123 || outer_code == ASHIFT
20124 || outer_code == ASHIFTRT
20125 || outer_code == LSHIFTRT
20126 || outer_code == ROTATE
20127 || outer_code == ROTATERT
d5861a7a 20128 || outer_code == ZERO_EXTRACT
066cd967 20129 || (outer_code == MULT
279bb624 20130 && satisfies_constraint_I (x))
22e54023
DE
20131 || ((outer_code == DIV || outer_code == UDIV
20132 || outer_code == MOD || outer_code == UMOD)
20133 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20134 || (outer_code == COMPARE
279bb624
DE
20135 && (satisfies_constraint_I (x)
20136 || satisfies_constraint_K (x)))
22e54023 20137 || (outer_code == EQ
279bb624
DE
20138 && (satisfies_constraint_I (x)
20139 || satisfies_constraint_K (x)
20140 || (mode == SImode
20141 ? satisfies_constraint_L (x)
20142 : satisfies_constraint_J (x))))
22e54023 20143 || (outer_code == GTU
279bb624 20144 && satisfies_constraint_I (x))
22e54023 20145 || (outer_code == LTU
279bb624 20146 && satisfies_constraint_P (x)))
066cd967
DE
20147 {
20148 *total = 0;
20149 return true;
20150 }
20151 else if ((outer_code == PLUS
4ae234b0 20152 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20153 || (outer_code == MINUS
4ae234b0 20154 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20155 || ((outer_code == SET
20156 || outer_code == IOR
20157 || outer_code == XOR)
20158 && (INTVAL (x)
20159 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20160 {
20161 *total = COSTS_N_INSNS (1);
20162 return true;
20163 }
20164 /* FALLTHRU */
20165
20166 case CONST_DOUBLE:
f6fe3a22 20167 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20168 {
f6fe3a22
DE
20169 if ((outer_code == IOR || outer_code == XOR)
20170 && CONST_DOUBLE_HIGH (x) == 0
20171 && (CONST_DOUBLE_LOW (x)
20172 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20173 {
20174 *total = 0;
20175 return true;
20176 }
20177 else if ((outer_code == AND && and64_2_operand (x, DImode))
20178 || ((outer_code == SET
20179 || outer_code == IOR
20180 || outer_code == XOR)
20181 && CONST_DOUBLE_HIGH (x) == 0))
20182 {
20183 *total = COSTS_N_INSNS (1);
20184 return true;
20185 }
066cd967
DE
20186 }
20187 /* FALLTHRU */
20188
3c50106f 20189 case CONST:
066cd967 20190 case HIGH:
3c50106f 20191 case SYMBOL_REF:
066cd967
DE
20192 case MEM:
20193 /* When optimizing for size, MEM should be slightly more expensive
20194 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20195 L1 cache latency is about two instructions. */
066cd967 20196 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20197 return true;
20198
30a555d9
DE
20199 case LABEL_REF:
20200 *total = 0;
20201 return true;
20202
3c50106f 20203 case PLUS:
f0517163 20204 if (mode == DFmode)
066cd967
DE
20205 {
20206 if (GET_CODE (XEXP (x, 0)) == MULT)
20207 {
20208 /* FNMA accounted in outer NEG. */
20209 if (outer_code == NEG)
20210 *total = rs6000_cost->dmul - rs6000_cost->fp;
20211 else
20212 *total = rs6000_cost->dmul;
20213 }
20214 else
20215 *total = rs6000_cost->fp;
20216 }
f0517163 20217 else if (mode == SFmode)
066cd967
DE
20218 {
20219 /* FNMA accounted in outer NEG. */
20220 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20221 *total = 0;
20222 else
20223 *total = rs6000_cost->fp;
20224 }
f0517163 20225 else
066cd967
DE
20226 *total = COSTS_N_INSNS (1);
20227 return false;
3c50106f 20228
52190329 20229 case MINUS:
f0517163 20230 if (mode == DFmode)
066cd967 20231 {
762c919f
JM
20232 if (GET_CODE (XEXP (x, 0)) == MULT
20233 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20234 {
20235 /* FNMA accounted in outer NEG. */
20236 if (outer_code == NEG)
762c919f 20237 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20238 else
20239 *total = rs6000_cost->dmul;
20240 }
20241 else
20242 *total = rs6000_cost->fp;
20243 }
f0517163 20244 else if (mode == SFmode)
066cd967
DE
20245 {
20246 /* FNMA accounted in outer NEG. */
20247 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20248 *total = 0;
20249 else
20250 *total = rs6000_cost->fp;
20251 }
f0517163 20252 else
c4ad648e 20253 *total = COSTS_N_INSNS (1);
066cd967 20254 return false;
3c50106f
RH
20255
20256 case MULT:
c9dbf840 20257 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 20258 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 20259 {
8b897cfa
RS
20260 if (INTVAL (XEXP (x, 1)) >= -256
20261 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20262 *total = rs6000_cost->mulsi_const9;
8b897cfa 20263 else
06a67bdd 20264 *total = rs6000_cost->mulsi_const;
3c50106f 20265 }
066cd967
DE
20266 /* FMA accounted in outer PLUS/MINUS. */
20267 else if ((mode == DFmode || mode == SFmode)
20268 && (outer_code == PLUS || outer_code == MINUS))
20269 *total = 0;
f0517163 20270 else if (mode == DFmode)
06a67bdd 20271 *total = rs6000_cost->dmul;
f0517163 20272 else if (mode == SFmode)
06a67bdd 20273 *total = rs6000_cost->fp;
f0517163 20274 else if (mode == DImode)
06a67bdd 20275 *total = rs6000_cost->muldi;
8b897cfa 20276 else
06a67bdd 20277 *total = rs6000_cost->mulsi;
066cd967 20278 return false;
3c50106f
RH
20279
20280 case DIV:
20281 case MOD:
f0517163
RS
20282 if (FLOAT_MODE_P (mode))
20283 {
06a67bdd
RS
20284 *total = mode == DFmode ? rs6000_cost->ddiv
20285 : rs6000_cost->sdiv;
066cd967 20286 return false;
f0517163 20287 }
5efb1046 20288 /* FALLTHRU */
3c50106f
RH
20289
20290 case UDIV:
20291 case UMOD:
627b6fe2
DJ
20292 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20293 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20294 {
20295 if (code == DIV || code == MOD)
20296 /* Shift, addze */
20297 *total = COSTS_N_INSNS (2);
20298 else
20299 /* Shift */
20300 *total = COSTS_N_INSNS (1);
20301 }
c4ad648e 20302 else
627b6fe2
DJ
20303 {
20304 if (GET_MODE (XEXP (x, 1)) == DImode)
20305 *total = rs6000_cost->divdi;
20306 else
20307 *total = rs6000_cost->divsi;
20308 }
20309 /* Add in shift and subtract for MOD. */
20310 if (code == MOD || code == UMOD)
20311 *total += COSTS_N_INSNS (2);
066cd967 20312 return false;
3c50106f 20313
32f56aad 20314 case CTZ:
3c50106f
RH
20315 case FFS:
20316 *total = COSTS_N_INSNS (4);
066cd967 20317 return false;
3c50106f 20318
32f56aad
DE
20319 case POPCOUNT:
20320 *total = COSTS_N_INSNS (6);
20321 return false;
20322
06a67bdd 20323 case NOT:
066cd967
DE
20324 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20325 {
20326 *total = 0;
20327 return false;
20328 }
20329 /* FALLTHRU */
20330
20331 case AND:
32f56aad 20332 case CLZ:
066cd967
DE
20333 case IOR:
20334 case XOR:
d5861a7a
DE
20335 case ZERO_EXTRACT:
20336 *total = COSTS_N_INSNS (1);
20337 return false;
20338
066cd967
DE
20339 case ASHIFT:
20340 case ASHIFTRT:
20341 case LSHIFTRT:
20342 case ROTATE:
20343 case ROTATERT:
d5861a7a 20344 /* Handle mul_highpart. */
066cd967
DE
20345 if (outer_code == TRUNCATE
20346 && GET_CODE (XEXP (x, 0)) == MULT)
20347 {
20348 if (mode == DImode)
20349 *total = rs6000_cost->muldi;
20350 else
20351 *total = rs6000_cost->mulsi;
20352 return true;
20353 }
d5861a7a
DE
20354 else if (outer_code == AND)
20355 *total = 0;
20356 else
20357 *total = COSTS_N_INSNS (1);
20358 return false;
20359
20360 case SIGN_EXTEND:
20361 case ZERO_EXTEND:
20362 if (GET_CODE (XEXP (x, 0)) == MEM)
20363 *total = 0;
20364 else
20365 *total = COSTS_N_INSNS (1);
066cd967 20366 return false;
06a67bdd 20367
066cd967
DE
20368 case COMPARE:
20369 case NEG:
20370 case ABS:
20371 if (!FLOAT_MODE_P (mode))
20372 {
20373 *total = COSTS_N_INSNS (1);
20374 return false;
20375 }
20376 /* FALLTHRU */
20377
20378 case FLOAT:
20379 case UNSIGNED_FLOAT:
20380 case FIX:
20381 case UNSIGNED_FIX:
06a67bdd
RS
20382 case FLOAT_TRUNCATE:
20383 *total = rs6000_cost->fp;
066cd967 20384 return false;
06a67bdd 20385
a2af5043
DJ
20386 case FLOAT_EXTEND:
20387 if (mode == DFmode)
20388 *total = 0;
20389 else
20390 *total = rs6000_cost->fp;
20391 return false;
20392
06a67bdd
RS
20393 case UNSPEC:
20394 switch (XINT (x, 1))
20395 {
20396 case UNSPEC_FRSP:
20397 *total = rs6000_cost->fp;
20398 return true;
20399
20400 default:
20401 break;
20402 }
20403 break;
20404
20405 case CALL:
20406 case IF_THEN_ELSE:
20407 if (optimize_size)
20408 {
20409 *total = COSTS_N_INSNS (1);
20410 return true;
20411 }
066cd967
DE
20412 else if (FLOAT_MODE_P (mode)
20413 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20414 {
20415 *total = rs6000_cost->fp;
20416 return false;
20417 }
06a67bdd
RS
20418 break;
20419
c0600ecd
DE
20420 case EQ:
20421 case GTU:
20422 case LTU:
22e54023
DE
20423 /* Carry bit requires mode == Pmode.
20424 NEG or PLUS already counted so only add one. */
20425 if (mode == Pmode
20426 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 20427 {
22e54023
DE
20428 *total = COSTS_N_INSNS (1);
20429 return true;
20430 }
20431 if (outer_code == SET)
20432 {
20433 if (XEXP (x, 1) == const0_rtx)
c0600ecd 20434 {
22e54023 20435 *total = COSTS_N_INSNS (2);
c0600ecd 20436 return true;
c0600ecd 20437 }
22e54023
DE
20438 else if (mode == Pmode)
20439 {
20440 *total = COSTS_N_INSNS (3);
20441 return false;
20442 }
20443 }
20444 /* FALLTHRU */
20445
20446 case GT:
20447 case LT:
20448 case UNORDERED:
20449 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
20450 {
20451 *total = COSTS_N_INSNS (2);
20452 return true;
c0600ecd 20453 }
22e54023
DE
20454 /* CC COMPARE. */
20455 if (outer_code == COMPARE)
20456 {
20457 *total = 0;
20458 return true;
20459 }
20460 break;
c0600ecd 20461
3c50106f 20462 default:
06a67bdd 20463 break;
3c50106f 20464 }
06a67bdd
RS
20465
20466 return false;
3c50106f
RH
20467}
20468
34bb030a
DE
20469/* A C expression returning the cost of moving data from a register of class
20470 CLASS1 to one of CLASS2. */
20471
20472int
f676971a 20473rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 20474 enum reg_class from, enum reg_class to)
34bb030a
DE
20475{
20476 /* Moves from/to GENERAL_REGS. */
20477 if (reg_classes_intersect_p (to, GENERAL_REGS)
20478 || reg_classes_intersect_p (from, GENERAL_REGS))
20479 {
20480 if (! reg_classes_intersect_p (to, GENERAL_REGS))
20481 from = to;
20482
20483 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
20484 return (rs6000_memory_move_cost (mode, from, 0)
20485 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
20486
c4ad648e
AM
20487 /* It's more expensive to move CR_REGS than CR0_REGS because of the
20488 shift. */
34bb030a
DE
20489 else if (from == CR_REGS)
20490 return 4;
20491
20492 else
c4ad648e 20493 /* A move will cost one instruction per GPR moved. */
c8b622ff 20494 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
20495 }
20496
c4ad648e 20497 /* Moving between two similar registers is just one instruction. */
34bb030a 20498 else if (reg_classes_intersect_p (to, from))
7393f7f8 20499 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 20500
c4ad648e 20501 /* Everything else has to go through GENERAL_REGS. */
34bb030a 20502 else
f676971a 20503 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
20504 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
20505}
20506
20507/* A C expressions returning the cost of moving data of MODE from a register to
20508 or from memory. */
20509
20510int
f676971a 20511rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 20512 int in ATTRIBUTE_UNUSED)
34bb030a
DE
20513{
20514 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 20515 return 4 * hard_regno_nregs[0][mode];
34bb030a 20516 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 20517 return 4 * hard_regno_nregs[32][mode];
34bb030a 20518 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 20519 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
20520 else
20521 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
20522}
20523
ef765ea9
DE
20524/* Newton-Raphson approximation of single-precision floating point divide n/d.
20525 Assumes no trapping math and finite arguments. */
20526
20527void
20528rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
20529{
20530 rtx x0, e0, e1, y1, u0, v0, one;
20531
20532 x0 = gen_reg_rtx (SFmode);
20533 e0 = gen_reg_rtx (SFmode);
20534 e1 = gen_reg_rtx (SFmode);
20535 y1 = gen_reg_rtx (SFmode);
20536 u0 = gen_reg_rtx (SFmode);
20537 v0 = gen_reg_rtx (SFmode);
20538 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
20539
20540 /* x0 = 1./d estimate */
20541 emit_insn (gen_rtx_SET (VOIDmode, x0,
20542 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
20543 UNSPEC_FRES)));
20544 /* e0 = 1. - d * x0 */
20545 emit_insn (gen_rtx_SET (VOIDmode, e0,
20546 gen_rtx_MINUS (SFmode, one,
20547 gen_rtx_MULT (SFmode, d, x0))));
20548 /* e1 = e0 + e0 * e0 */
20549 emit_insn (gen_rtx_SET (VOIDmode, e1,
20550 gen_rtx_PLUS (SFmode,
20551 gen_rtx_MULT (SFmode, e0, e0), e0)));
20552 /* y1 = x0 + e1 * x0 */
20553 emit_insn (gen_rtx_SET (VOIDmode, y1,
20554 gen_rtx_PLUS (SFmode,
20555 gen_rtx_MULT (SFmode, e1, x0), x0)));
20556 /* u0 = n * y1 */
20557 emit_insn (gen_rtx_SET (VOIDmode, u0,
20558 gen_rtx_MULT (SFmode, n, y1)));
20559 /* v0 = n - d * u0 */
20560 emit_insn (gen_rtx_SET (VOIDmode, v0,
20561 gen_rtx_MINUS (SFmode, n,
20562 gen_rtx_MULT (SFmode, d, u0))));
20563 /* res = u0 + v0 * y1 */
20564 emit_insn (gen_rtx_SET (VOIDmode, res,
20565 gen_rtx_PLUS (SFmode,
20566 gen_rtx_MULT (SFmode, v0, y1), u0)));
20567}
20568
20569/* Newton-Raphson approximation of double-precision floating point divide n/d.
20570 Assumes no trapping math and finite arguments. */
20571
20572void
20573rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
20574{
20575 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
20576
20577 x0 = gen_reg_rtx (DFmode);
20578 e0 = gen_reg_rtx (DFmode);
20579 e1 = gen_reg_rtx (DFmode);
20580 e2 = gen_reg_rtx (DFmode);
20581 y1 = gen_reg_rtx (DFmode);
20582 y2 = gen_reg_rtx (DFmode);
20583 y3 = gen_reg_rtx (DFmode);
20584 u0 = gen_reg_rtx (DFmode);
20585 v0 = gen_reg_rtx (DFmode);
20586 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
20587
20588 /* x0 = 1./d estimate */
20589 emit_insn (gen_rtx_SET (VOIDmode, x0,
20590 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
20591 UNSPEC_FRES)));
20592 /* e0 = 1. - d * x0 */
20593 emit_insn (gen_rtx_SET (VOIDmode, e0,
20594 gen_rtx_MINUS (DFmode, one,
20595 gen_rtx_MULT (SFmode, d, x0))));
20596 /* y1 = x0 + e0 * x0 */
20597 emit_insn (gen_rtx_SET (VOIDmode, y1,
20598 gen_rtx_PLUS (DFmode,
20599 gen_rtx_MULT (DFmode, e0, x0), x0)));
20600 /* e1 = e0 * e0 */
20601 emit_insn (gen_rtx_SET (VOIDmode, e1,
20602 gen_rtx_MULT (DFmode, e0, e0)));
20603 /* y2 = y1 + e1 * y1 */
20604 emit_insn (gen_rtx_SET (VOIDmode, y2,
20605 gen_rtx_PLUS (DFmode,
20606 gen_rtx_MULT (DFmode, e1, y1), y1)));
20607 /* e2 = e1 * e1 */
20608 emit_insn (gen_rtx_SET (VOIDmode, e2,
20609 gen_rtx_MULT (DFmode, e1, e1)));
20610 /* y3 = y2 + e2 * y2 */
20611 emit_insn (gen_rtx_SET (VOIDmode, y3,
20612 gen_rtx_PLUS (DFmode,
20613 gen_rtx_MULT (DFmode, e2, y2), y2)));
20614 /* u0 = n * y3 */
20615 emit_insn (gen_rtx_SET (VOIDmode, u0,
20616 gen_rtx_MULT (DFmode, n, y3)));
20617 /* v0 = n - d * u0 */
20618 emit_insn (gen_rtx_SET (VOIDmode, v0,
20619 gen_rtx_MINUS (DFmode, n,
20620 gen_rtx_MULT (DFmode, d, u0))));
20621 /* res = u0 + v0 * y3 */
20622 emit_insn (gen_rtx_SET (VOIDmode, res,
20623 gen_rtx_PLUS (DFmode,
20624 gen_rtx_MULT (DFmode, v0, y3), u0)));
20625}
20626
565ef4ba
RS
20627
20628/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
20629 target, and SRC is the argument operand. */
20630
20631void
20632rs6000_emit_popcount (rtx dst, rtx src)
20633{
20634 enum machine_mode mode = GET_MODE (dst);
20635 rtx tmp1, tmp2;
20636
20637 tmp1 = gen_reg_rtx (mode);
20638
20639 if (mode == SImode)
20640 {
20641 emit_insn (gen_popcntbsi2 (tmp1, src));
20642 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
20643 NULL_RTX, 0);
20644 tmp2 = force_reg (SImode, tmp2);
20645 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
20646 }
20647 else
20648 {
20649 emit_insn (gen_popcntbdi2 (tmp1, src));
20650 tmp2 = expand_mult (DImode, tmp1,
20651 GEN_INT ((HOST_WIDE_INT)
20652 0x01010101 << 32 | 0x01010101),
20653 NULL_RTX, 0);
20654 tmp2 = force_reg (DImode, tmp2);
20655 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
20656 }
20657}
20658
20659
20660/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
20661 target, and SRC is the argument operand. */
20662
20663void
20664rs6000_emit_parity (rtx dst, rtx src)
20665{
20666 enum machine_mode mode = GET_MODE (dst);
20667 rtx tmp;
20668
20669 tmp = gen_reg_rtx (mode);
20670 if (mode == SImode)
20671 {
20672 /* Is mult+shift >= shift+xor+shift+xor? */
20673 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
20674 {
20675 rtx tmp1, tmp2, tmp3, tmp4;
20676
20677 tmp1 = gen_reg_rtx (SImode);
20678 emit_insn (gen_popcntbsi2 (tmp1, src));
20679
20680 tmp2 = gen_reg_rtx (SImode);
20681 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
20682 tmp3 = gen_reg_rtx (SImode);
20683 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
20684
20685 tmp4 = gen_reg_rtx (SImode);
20686 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
20687 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
20688 }
20689 else
20690 rs6000_emit_popcount (tmp, src);
20691 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
20692 }
20693 else
20694 {
20695 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
20696 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
20697 {
20698 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
20699
20700 tmp1 = gen_reg_rtx (DImode);
20701 emit_insn (gen_popcntbdi2 (tmp1, src));
20702
20703 tmp2 = gen_reg_rtx (DImode);
20704 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
20705 tmp3 = gen_reg_rtx (DImode);
20706 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
20707
20708 tmp4 = gen_reg_rtx (DImode);
20709 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
20710 tmp5 = gen_reg_rtx (DImode);
20711 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
20712
20713 tmp6 = gen_reg_rtx (DImode);
20714 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
20715 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
20716 }
20717 else
20718 rs6000_emit_popcount (tmp, src);
20719 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
20720 }
20721}
20722
ded9bf77
AH
20723/* Return an RTX representing where to find the function value of a
20724 function returning MODE. */
20725static rtx
20726rs6000_complex_function_value (enum machine_mode mode)
20727{
20728 unsigned int regno;
20729 rtx r1, r2;
20730 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 20731 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 20732
18f63bfa
AH
20733 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
20734 regno = FP_ARG_RETURN;
354ed18f
AH
20735 else
20736 {
18f63bfa 20737 regno = GP_ARG_RETURN;
ded9bf77 20738
18f63bfa
AH
20739 /* 32-bit is OK since it'll go in r3/r4. */
20740 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
20741 return gen_rtx_REG (mode, regno);
20742 }
20743
18f63bfa
AH
20744 if (inner_bytes >= 8)
20745 return gen_rtx_REG (mode, regno);
20746
ded9bf77
AH
20747 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
20748 const0_rtx);
20749 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 20750 GEN_INT (inner_bytes));
ded9bf77
AH
20751 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
20752}
20753
a6ebc39a
AH
20754/* Define how to find the value returned by a function.
20755 VALTYPE is the data type of the value (as a tree).
20756 If the precise function being called is known, FUNC is its FUNCTION_DECL;
20757 otherwise, FUNC is 0.
20758
20759 On the SPE, both FPs and vectors are returned in r3.
20760
20761 On RS/6000 an integer value is in r3 and a floating-point value is in
20762 fp1, unless -msoft-float. */
20763
20764rtx
586de218 20765rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
20766{
20767 enum machine_mode mode;
2a8fa26c 20768 unsigned int regno;
a6ebc39a 20769
594a51fe
SS
20770 /* Special handling for structs in darwin64. */
20771 if (rs6000_darwin64_abi
20772 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
20773 && TREE_CODE (valtype) == RECORD_TYPE
20774 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
20775 {
20776 CUMULATIVE_ARGS valcum;
20777 rtx valret;
20778
0b5383eb 20779 valcum.words = 0;
594a51fe
SS
20780 valcum.fregno = FP_ARG_MIN_REG;
20781 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
20782 /* Do a trial code generation as if this were going to be passed as
20783 an argument; if any part goes in memory, we return NULL. */
20784 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
20785 if (valret)
20786 return valret;
20787 /* Otherwise fall through to standard ABI rules. */
20788 }
20789
0e67400a
FJ
20790 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
20791 {
20792 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20793 return gen_rtx_PARALLEL (DImode,
20794 gen_rtvec (2,
20795 gen_rtx_EXPR_LIST (VOIDmode,
20796 gen_rtx_REG (SImode, GP_ARG_RETURN),
20797 const0_rtx),
20798 gen_rtx_EXPR_LIST (VOIDmode,
20799 gen_rtx_REG (SImode,
20800 GP_ARG_RETURN + 1),
20801 GEN_INT (4))));
20802 }
0f086e42
FJ
20803 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
20804 {
20805 return gen_rtx_PARALLEL (DCmode,
20806 gen_rtvec (4,
20807 gen_rtx_EXPR_LIST (VOIDmode,
20808 gen_rtx_REG (SImode, GP_ARG_RETURN),
20809 const0_rtx),
20810 gen_rtx_EXPR_LIST (VOIDmode,
20811 gen_rtx_REG (SImode,
20812 GP_ARG_RETURN + 1),
20813 GEN_INT (4)),
20814 gen_rtx_EXPR_LIST (VOIDmode,
20815 gen_rtx_REG (SImode,
20816 GP_ARG_RETURN + 2),
20817 GEN_INT (8)),
20818 gen_rtx_EXPR_LIST (VOIDmode,
20819 gen_rtx_REG (SImode,
20820 GP_ARG_RETURN + 3),
20821 GEN_INT (12))));
20822 }
602ea4d3 20823
7348aa7f
FXC
20824 mode = TYPE_MODE (valtype);
20825 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 20826 || POINTER_TYPE_P (valtype))
b78d48dd 20827 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 20828
00b79d54 20829 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
20830 {
20831 if (TARGET_HARD_FLOAT && TARGET_FPRS)
20832 {
20833 switch (mode)
20834 {
20835 default:
20836 gcc_unreachable ();
20837 case SDmode:
20838 regno = GP_ARG_RETURN;
20839 break;
20840 case DDmode:
20841 regno = FP_ARG_RETURN;
20842 break;
20843 case TDmode:
20844 /* Use f2:f3 specified by the ABI. */
20845 regno = FP_ARG_RETURN + 1;
20846 break;
20847 }
20848 }
20849 else
20850 regno = GP_ARG_RETURN;
20851 }
00b79d54 20852 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 20853 regno = FP_ARG_RETURN;
ded9bf77 20854 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 20855 && targetm.calls.split_complex_arg)
ded9bf77 20856 return rs6000_complex_function_value (mode);
44688022 20857 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 20858 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 20859 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 20860 regno = ALTIVEC_ARG_RETURN;
18f63bfa 20861 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
20862 && (mode == DFmode || mode == DCmode
20863 || mode == TFmode || mode == TCmode))
18f63bfa 20864 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
20865 else
20866 regno = GP_ARG_RETURN;
20867
20868 return gen_rtx_REG (mode, regno);
20869}
20870
ded9bf77
AH
20871/* Define how to find the value returned by a library function
20872 assuming the value has mode MODE. */
20873rtx
20874rs6000_libcall_value (enum machine_mode mode)
20875{
20876 unsigned int regno;
20877
2e6c9641
FJ
20878 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
20879 {
20880 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20881 return gen_rtx_PARALLEL (DImode,
20882 gen_rtvec (2,
20883 gen_rtx_EXPR_LIST (VOIDmode,
20884 gen_rtx_REG (SImode, GP_ARG_RETURN),
20885 const0_rtx),
20886 gen_rtx_EXPR_LIST (VOIDmode,
20887 gen_rtx_REG (SImode,
20888 GP_ARG_RETURN + 1),
20889 GEN_INT (4))));
20890 }
20891
00b79d54 20892 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
20893 {
20894 if (TARGET_HARD_FLOAT && TARGET_FPRS)
20895 {
20896 switch (mode)
20897 {
20898 default:
20899 gcc_unreachable ();
20900 case SDmode:
20901 regno = GP_ARG_RETURN;
20902 break;
20903 case DDmode:
20904 regno = FP_ARG_RETURN;
20905 break;
20906 case TDmode:
20907 /* Use f2:f3 specified by the ABI. */
20908 regno = FP_ARG_RETURN + 1;
20909 break;
20910 }
20911 }
20912 else
20913 regno = GP_ARG_RETURN;
20914 }
00b79d54 20915 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
20916 && TARGET_HARD_FLOAT && TARGET_FPRS)
20917 regno = FP_ARG_RETURN;
44688022
AM
20918 else if (ALTIVEC_VECTOR_MODE (mode)
20919 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 20920 regno = ALTIVEC_ARG_RETURN;
42ba5130 20921 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 20922 return rs6000_complex_function_value (mode);
18f63bfa 20923 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
20924 && (mode == DFmode || mode == DCmode
20925 || mode == TFmode || mode == TCmode))
18f63bfa 20926 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
20927 else
20928 regno = GP_ARG_RETURN;
20929
20930 return gen_rtx_REG (mode, regno);
20931}
20932
d1d0c603
JJ
20933/* Define the offset between two registers, FROM to be eliminated and its
20934 replacement TO, at the start of a routine. */
20935HOST_WIDE_INT
20936rs6000_initial_elimination_offset (int from, int to)
20937{
20938 rs6000_stack_t *info = rs6000_stack_info ();
20939 HOST_WIDE_INT offset;
20940
7d5175e1 20941 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 20942 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
20943 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
20944 {
20945 offset = info->push_p ? 0 : -info->total_size;
20946 if (FRAME_GROWS_DOWNWARD)
5b667039 20947 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
20948 }
20949 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
20950 offset = FRAME_GROWS_DOWNWARD
5b667039 20951 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
20952 : 0;
20953 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
20954 offset = info->total_size;
20955 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
20956 offset = info->push_p ? info->total_size : 0;
20957 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
20958 offset = 0;
20959 else
37409796 20960 gcc_unreachable ();
d1d0c603
JJ
20961
20962 return offset;
20963}
20964
58646b77 20965/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 20966
c8e4f0e9 20967static bool
58646b77 20968rs6000_is_opaque_type (tree type)
62e1dfcf 20969{
58646b77 20970 return (type == opaque_V2SI_type_node
2abe3e28 20971 || type == opaque_V2SF_type_node
58646b77
PB
20972 || type == opaque_p_V2SI_type_node
20973 || type == opaque_V4SI_type_node);
62e1dfcf
NC
20974}
20975
96714395 20976static rtx
a2369ed3 20977rs6000_dwarf_register_span (rtx reg)
96714395
AH
20978{
20979 unsigned regno;
20980
4d4cbc0e
AH
20981 if (TARGET_SPE
20982 && (SPE_VECTOR_MODE (GET_MODE (reg))
20983 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
20984 ;
20985 else
96714395
AH
20986 return NULL_RTX;
20987
20988 regno = REGNO (reg);
20989
20990 /* The duality of the SPE register size wreaks all kinds of havoc.
20991 This is a way of distinguishing r0 in 32-bits from r0 in
20992 64-bits. */
20993 return
20994 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
20995 BYTES_BIG_ENDIAN
20996 ? gen_rtvec (2,
20997 gen_rtx_REG (SImode, regno + 1200),
20998 gen_rtx_REG (SImode, regno))
20999 : gen_rtvec (2,
21000 gen_rtx_REG (SImode, regno),
21001 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
21002}
21003
37ea0b7e
JM
21004/* Fill in sizes for SPE register high parts in table used by unwinder. */
21005
21006static void
21007rs6000_init_dwarf_reg_sizes_extra (tree address)
21008{
21009 if (TARGET_SPE)
21010 {
21011 int i;
21012 enum machine_mode mode = TYPE_MODE (char_type_node);
21013 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
21014 rtx mem = gen_rtx_MEM (BLKmode, addr);
21015 rtx value = gen_int_mode (4, mode);
21016
21017 for (i = 1201; i < 1232; i++)
21018 {
21019 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
21020 HOST_WIDE_INT offset
21021 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
21022
21023 emit_move_insn (adjust_address (mem, mode, offset), value);
21024 }
21025 }
21026}
21027
93c9d1ba
AM
21028/* Map internal gcc register numbers to DWARF2 register numbers. */
21029
21030unsigned int
21031rs6000_dbx_register_number (unsigned int regno)
21032{
21033 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
21034 return regno;
21035 if (regno == MQ_REGNO)
21036 return 100;
1de43f85 21037 if (regno == LR_REGNO)
93c9d1ba 21038 return 108;
1de43f85 21039 if (regno == CTR_REGNO)
93c9d1ba
AM
21040 return 109;
21041 if (CR_REGNO_P (regno))
21042 return regno - CR0_REGNO + 86;
21043 if (regno == XER_REGNO)
21044 return 101;
21045 if (ALTIVEC_REGNO_P (regno))
21046 return regno - FIRST_ALTIVEC_REGNO + 1124;
21047 if (regno == VRSAVE_REGNO)
21048 return 356;
21049 if (regno == VSCR_REGNO)
21050 return 67;
21051 if (regno == SPE_ACC_REGNO)
21052 return 99;
21053 if (regno == SPEFSCR_REGNO)
21054 return 612;
21055 /* SPE high reg number. We get these values of regno from
21056 rs6000_dwarf_register_span. */
37409796
NS
21057 gcc_assert (regno >= 1200 && regno < 1232);
21058 return regno;
93c9d1ba
AM
21059}
21060
93f90be6 21061/* target hook eh_return_filter_mode */
f676971a 21062static enum machine_mode
93f90be6
FJ
21063rs6000_eh_return_filter_mode (void)
21064{
21065 return TARGET_32BIT ? SImode : word_mode;
21066}
21067
00b79d54
BE
21068/* Target hook for scalar_mode_supported_p. */
21069static bool
21070rs6000_scalar_mode_supported_p (enum machine_mode mode)
21071{
21072 if (DECIMAL_FLOAT_MODE_P (mode))
21073 return true;
21074 else
21075 return default_scalar_mode_supported_p (mode);
21076}
21077
f676971a
EC
21078/* Target hook for vector_mode_supported_p. */
21079static bool
21080rs6000_vector_mode_supported_p (enum machine_mode mode)
21081{
21082
21083 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21084 return true;
21085
21086 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21087 return true;
21088
21089 else
21090 return false;
21091}
21092
bb8df8a6
EC
21093/* Target hook for invalid_arg_for_unprototyped_fn. */
21094static const char *
4d3e6fae
FJ
21095invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
21096{
21097 return (!rs6000_darwin64_abi
21098 && typelist == 0
21099 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21100 && (funcdecl == NULL_TREE
21101 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21102 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21103 ? N_("AltiVec argument passed to unprototyped function")
21104 : NULL;
21105}
21106
3aebbe5f
JJ
21107/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21108 setup by using __stack_chk_fail_local hidden function instead of
21109 calling __stack_chk_fail directly. Otherwise it is better to call
21110 __stack_chk_fail directly. */
21111
21112static tree
21113rs6000_stack_protect_fail (void)
21114{
21115 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21116 ? default_hidden_stack_protect_fail ()
21117 : default_external_stack_protect_fail ();
21118}
21119
17211ab5 21120#include "gt-rs6000.h"