]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
[multiple changes]
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
66647d44 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
726a989a 55#include "gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
ec507f2d
DE
146/* Always emit branch hint bits. */
147static GTY(()) bool rs6000_always_hint;
148
149/* Schedule instructions for group formation. */
150static GTY(()) bool rs6000_sched_groups;
151
44cd321e
PS
152/* Align branch targets. */
153static GTY(()) bool rs6000_align_branch_targets;
154
569fa502
DN
155/* Support for -msched-costly-dep option. */
156const char *rs6000_sched_costly_dep_str;
157enum rs6000_dependence_cost rs6000_sched_costly_dep;
158
cbe26ab8
DN
159/* Support for -minsert-sched-nops option. */
160const char *rs6000_sched_insert_nops_str;
161enum rs6000_nop_insertion rs6000_sched_insert_nops;
162
7ccf35ed 163/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 164static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 165
602ea4d3 166/* Size of long double. */
6fa3f289
ZW
167int rs6000_long_double_type_size;
168
602ea4d3
JJ
169/* IEEE quad extended precision long double. */
170int rs6000_ieeequad;
171
a2db2771 172/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
173int rs6000_altivec_abi;
174
94f4765c
NF
175/* Nonzero if we want SPE SIMD instructions. */
176int rs6000_spe;
177
a3170dc6
AH
178/* Nonzero if we want SPE ABI extensions. */
179int rs6000_spe_abi;
180
94f4765c
NF
181/* Nonzero to use isel instructions. */
182int rs6000_isel;
183
5da702b1
AH
184/* Nonzero if floating point operations are done in the GPRs. */
185int rs6000_float_gprs = 0;
186
594a51fe
SS
187/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
188int rs6000_darwin64_abi;
189
a0ab749a 190/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 191static GTY(()) int common_mode_defined;
c81bebd7 192
9878760c
RK
193/* Save information from a "cmpxx" operation until the branch or scc is
194 emitted. */
9878760c
RK
195rtx rs6000_compare_op0, rs6000_compare_op1;
196int rs6000_compare_fp_p;
874a0744 197
874a0744
MM
198/* Label number of label created for -mrelocatable, to call to so we can
199 get the address of the GOT section */
200int rs6000_pic_labelno;
c81bebd7 201
b91da81f 202#ifdef USING_ELFOS_H
c81bebd7 203/* Which abi to adhere to */
9739c90c 204const char *rs6000_abi_name;
d9407988
MM
205
206/* Semantics of the small data area */
207enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
208
209/* Which small data model to use */
815cdc52 210const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
211
212/* Counter for labels which are to be placed in .fixup. */
213int fixuplabelno = 0;
874a0744 214#endif
4697a36c 215
c4501e62
JJ
216/* Bit size of immediate TLS offsets and string from which it is decoded. */
217int rs6000_tls_size = 32;
218const char *rs6000_tls_size_string;
219
b6c9286a
MM
220/* ABI enumeration available for subtarget to use. */
221enum rs6000_abi rs6000_current_abi;
222
85b776df
AM
223/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
224int dot_symbols;
225
38c1f2d7 226/* Debug flags */
815cdc52 227const char *rs6000_debug_name;
38c1f2d7
MM
228int rs6000_debug_stack; /* debug stack applications */
229int rs6000_debug_arg; /* debug argument handling */
230
aabcd309 231/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
232bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
233
58646b77
PB
234/* Built in types. */
235
236tree rs6000_builtin_types[RS6000_BTI_MAX];
237tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 238
57ac7be9
AM
239const char *rs6000_traceback_name;
240static enum {
241 traceback_default = 0,
242 traceback_none,
243 traceback_part,
244 traceback_full
245} rs6000_traceback;
246
38c1f2d7
MM
247/* Flag to say the TOC is initialized */
248int toc_initialized;
9ebbca7d 249char toc_label_name[10];
38c1f2d7 250
44cd321e
PS
251/* Cached value of rs6000_variable_issue. This is cached in
252 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
253static short cached_can_issue_more;
254
d6b5193b
RS
255static GTY(()) section *read_only_data_section;
256static GTY(()) section *private_data_section;
257static GTY(()) section *read_only_private_data_section;
258static GTY(()) section *sdata2_section;
259static GTY(()) section *toc_section;
260
a3c9585f
KH
261/* Control alignment for fields within structures. */
262/* String from -malign-XXXXX. */
025d9908
KH
263int rs6000_alignment_flags;
264
78f5898b
AH
265/* True for any options that were explicitly set. */
266struct {
df01da37 267 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 268 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
269 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
270 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
271 bool spe; /* True if -mspe= was used. */
272 bool float_gprs; /* True if -mfloat-gprs= was used. */
273 bool isel; /* True if -misel was used. */
274 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 275 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 276 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
277} rs6000_explicit_options;
278
a3170dc6
AH
279struct builtin_description
280{
281 /* mask is not const because we're going to alter it below. This
282 nonsense will go away when we rewrite the -march infrastructure
283 to give us more target flag bits. */
284 unsigned int mask;
285 const enum insn_code icode;
286 const char *const name;
287 const enum rs6000_builtins code;
288};
8b897cfa
RS
289\f
290/* Target cpu costs. */
291
292struct processor_costs {
c4ad648e 293 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
294 const int mulsi_const; /* cost of SImode multiplication by constant. */
295 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
296 const int muldi; /* cost of DImode multiplication. */
297 const int divsi; /* cost of SImode division. */
298 const int divdi; /* cost of DImode division. */
299 const int fp; /* cost of simple SFmode and DFmode insns. */
300 const int dmul; /* cost of DFmode multiplication (and fmadd). */
301 const int sdiv; /* cost of SFmode division (fdivs). */
302 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
303 const int cache_line_size; /* cache line size in bytes. */
304 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
305 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
306 const int simultaneous_prefetches; /* number of parallel prefetch
307 operations. */
8b897cfa
RS
308};
309
310const struct processor_costs *rs6000_cost;
311
312/* Processor costs (relative to an add) */
313
314/* Instruction size costs on 32bit processors. */
315static const
316struct processor_costs size32_cost = {
06a67bdd
RS
317 COSTS_N_INSNS (1), /* mulsi */
318 COSTS_N_INSNS (1), /* mulsi_const */
319 COSTS_N_INSNS (1), /* mulsi_const9 */
320 COSTS_N_INSNS (1), /* muldi */
321 COSTS_N_INSNS (1), /* divsi */
322 COSTS_N_INSNS (1), /* divdi */
323 COSTS_N_INSNS (1), /* fp */
324 COSTS_N_INSNS (1), /* dmul */
325 COSTS_N_INSNS (1), /* sdiv */
326 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
327 32,
328 0,
329 0,
5f732aba 330 0,
8b897cfa
RS
331};
332
333/* Instruction size costs on 64bit processors. */
334static const
335struct processor_costs size64_cost = {
06a67bdd
RS
336 COSTS_N_INSNS (1), /* mulsi */
337 COSTS_N_INSNS (1), /* mulsi_const */
338 COSTS_N_INSNS (1), /* mulsi_const9 */
339 COSTS_N_INSNS (1), /* muldi */
340 COSTS_N_INSNS (1), /* divsi */
341 COSTS_N_INSNS (1), /* divdi */
342 COSTS_N_INSNS (1), /* fp */
343 COSTS_N_INSNS (1), /* dmul */
344 COSTS_N_INSNS (1), /* sdiv */
345 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
346 128,
347 0,
348 0,
5f732aba 349 0,
8b897cfa
RS
350};
351
352/* Instruction costs on RIOS1 processors. */
353static const
354struct processor_costs rios1_cost = {
06a67bdd
RS
355 COSTS_N_INSNS (5), /* mulsi */
356 COSTS_N_INSNS (4), /* mulsi_const */
357 COSTS_N_INSNS (3), /* mulsi_const9 */
358 COSTS_N_INSNS (5), /* muldi */
359 COSTS_N_INSNS (19), /* divsi */
360 COSTS_N_INSNS (19), /* divdi */
361 COSTS_N_INSNS (2), /* fp */
362 COSTS_N_INSNS (2), /* dmul */
363 COSTS_N_INSNS (19), /* sdiv */
364 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 365 128, /* cache line size */
5f732aba
DE
366 64, /* l1 cache */
367 512, /* l2 cache */
0b11da67 368 0, /* streams */
8b897cfa
RS
369};
370
371/* Instruction costs on RIOS2 processors. */
372static const
373struct processor_costs rios2_cost = {
06a67bdd
RS
374 COSTS_N_INSNS (2), /* mulsi */
375 COSTS_N_INSNS (2), /* mulsi_const */
376 COSTS_N_INSNS (2), /* mulsi_const9 */
377 COSTS_N_INSNS (2), /* muldi */
378 COSTS_N_INSNS (13), /* divsi */
379 COSTS_N_INSNS (13), /* divdi */
380 COSTS_N_INSNS (2), /* fp */
381 COSTS_N_INSNS (2), /* dmul */
382 COSTS_N_INSNS (17), /* sdiv */
383 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 384 256, /* cache line size */
5f732aba
DE
385 256, /* l1 cache */
386 1024, /* l2 cache */
0b11da67 387 0, /* streams */
8b897cfa
RS
388};
389
390/* Instruction costs on RS64A processors. */
391static const
392struct processor_costs rs64a_cost = {
06a67bdd
RS
393 COSTS_N_INSNS (20), /* mulsi */
394 COSTS_N_INSNS (12), /* mulsi_const */
395 COSTS_N_INSNS (8), /* mulsi_const9 */
396 COSTS_N_INSNS (34), /* muldi */
397 COSTS_N_INSNS (65), /* divsi */
398 COSTS_N_INSNS (67), /* divdi */
399 COSTS_N_INSNS (4), /* fp */
400 COSTS_N_INSNS (4), /* dmul */
401 COSTS_N_INSNS (31), /* sdiv */
402 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 403 128, /* cache line size */
5f732aba
DE
404 128, /* l1 cache */
405 2048, /* l2 cache */
0b11da67 406 1, /* streams */
8b897cfa
RS
407};
408
409/* Instruction costs on MPCCORE processors. */
410static const
411struct processor_costs mpccore_cost = {
06a67bdd
RS
412 COSTS_N_INSNS (2), /* mulsi */
413 COSTS_N_INSNS (2), /* mulsi_const */
414 COSTS_N_INSNS (2), /* mulsi_const9 */
415 COSTS_N_INSNS (2), /* muldi */
416 COSTS_N_INSNS (6), /* divsi */
417 COSTS_N_INSNS (6), /* divdi */
418 COSTS_N_INSNS (4), /* fp */
419 COSTS_N_INSNS (5), /* dmul */
420 COSTS_N_INSNS (10), /* sdiv */
421 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 422 32, /* cache line size */
5f732aba
DE
423 4, /* l1 cache */
424 16, /* l2 cache */
0b11da67 425 1, /* streams */
8b897cfa
RS
426};
427
428/* Instruction costs on PPC403 processors. */
429static const
430struct processor_costs ppc403_cost = {
06a67bdd
RS
431 COSTS_N_INSNS (4), /* mulsi */
432 COSTS_N_INSNS (4), /* mulsi_const */
433 COSTS_N_INSNS (4), /* mulsi_const9 */
434 COSTS_N_INSNS (4), /* muldi */
435 COSTS_N_INSNS (33), /* divsi */
436 COSTS_N_INSNS (33), /* divdi */
437 COSTS_N_INSNS (11), /* fp */
438 COSTS_N_INSNS (11), /* dmul */
439 COSTS_N_INSNS (11), /* sdiv */
440 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 441 32, /* cache line size */
5f732aba
DE
442 4, /* l1 cache */
443 16, /* l2 cache */
0b11da67 444 1, /* streams */
8b897cfa
RS
445};
446
447/* Instruction costs on PPC405 processors. */
448static const
449struct processor_costs ppc405_cost = {
06a67bdd
RS
450 COSTS_N_INSNS (5), /* mulsi */
451 COSTS_N_INSNS (4), /* mulsi_const */
452 COSTS_N_INSNS (3), /* mulsi_const9 */
453 COSTS_N_INSNS (5), /* muldi */
454 COSTS_N_INSNS (35), /* divsi */
455 COSTS_N_INSNS (35), /* divdi */
456 COSTS_N_INSNS (11), /* fp */
457 COSTS_N_INSNS (11), /* dmul */
458 COSTS_N_INSNS (11), /* sdiv */
459 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 460 32, /* cache line size */
5f732aba
DE
461 16, /* l1 cache */
462 128, /* l2 cache */
0b11da67 463 1, /* streams */
8b897cfa
RS
464};
465
466/* Instruction costs on PPC440 processors. */
467static const
468struct processor_costs ppc440_cost = {
06a67bdd
RS
469 COSTS_N_INSNS (3), /* mulsi */
470 COSTS_N_INSNS (2), /* mulsi_const */
471 COSTS_N_INSNS (2), /* mulsi_const9 */
472 COSTS_N_INSNS (3), /* muldi */
473 COSTS_N_INSNS (34), /* divsi */
474 COSTS_N_INSNS (34), /* divdi */
475 COSTS_N_INSNS (5), /* fp */
476 COSTS_N_INSNS (5), /* dmul */
477 COSTS_N_INSNS (19), /* sdiv */
478 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 479 32, /* cache line size */
5f732aba
DE
480 32, /* l1 cache */
481 256, /* l2 cache */
0b11da67 482 1, /* streams */
8b897cfa
RS
483};
484
485/* Instruction costs on PPC601 processors. */
486static const
487struct processor_costs ppc601_cost = {
06a67bdd
RS
488 COSTS_N_INSNS (5), /* mulsi */
489 COSTS_N_INSNS (5), /* mulsi_const */
490 COSTS_N_INSNS (5), /* mulsi_const9 */
491 COSTS_N_INSNS (5), /* muldi */
492 COSTS_N_INSNS (36), /* divsi */
493 COSTS_N_INSNS (36), /* divdi */
494 COSTS_N_INSNS (4), /* fp */
495 COSTS_N_INSNS (5), /* dmul */
496 COSTS_N_INSNS (17), /* sdiv */
497 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 498 32, /* cache line size */
5f732aba
DE
499 32, /* l1 cache */
500 256, /* l2 cache */
0b11da67 501 1, /* streams */
8b897cfa
RS
502};
503
504/* Instruction costs on PPC603 processors. */
505static const
506struct processor_costs ppc603_cost = {
06a67bdd
RS
507 COSTS_N_INSNS (5), /* mulsi */
508 COSTS_N_INSNS (3), /* mulsi_const */
509 COSTS_N_INSNS (2), /* mulsi_const9 */
510 COSTS_N_INSNS (5), /* muldi */
511 COSTS_N_INSNS (37), /* divsi */
512 COSTS_N_INSNS (37), /* divdi */
513 COSTS_N_INSNS (3), /* fp */
514 COSTS_N_INSNS (4), /* dmul */
515 COSTS_N_INSNS (18), /* sdiv */
516 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 517 32, /* cache line size */
5f732aba
DE
518 8, /* l1 cache */
519 64, /* l2 cache */
0b11da67 520 1, /* streams */
8b897cfa
RS
521};
522
523/* Instruction costs on PPC604 processors. */
524static const
525struct processor_costs ppc604_cost = {
06a67bdd
RS
526 COSTS_N_INSNS (4), /* mulsi */
527 COSTS_N_INSNS (4), /* mulsi_const */
528 COSTS_N_INSNS (4), /* mulsi_const9 */
529 COSTS_N_INSNS (4), /* muldi */
530 COSTS_N_INSNS (20), /* divsi */
531 COSTS_N_INSNS (20), /* divdi */
532 COSTS_N_INSNS (3), /* fp */
533 COSTS_N_INSNS (3), /* dmul */
534 COSTS_N_INSNS (18), /* sdiv */
535 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 536 32, /* cache line size */
5f732aba
DE
537 16, /* l1 cache */
538 512, /* l2 cache */
0b11da67 539 1, /* streams */
8b897cfa
RS
540};
541
542/* Instruction costs on PPC604e processors. */
543static const
544struct processor_costs ppc604e_cost = {
06a67bdd
RS
545 COSTS_N_INSNS (2), /* mulsi */
546 COSTS_N_INSNS (2), /* mulsi_const */
547 COSTS_N_INSNS (2), /* mulsi_const9 */
548 COSTS_N_INSNS (2), /* muldi */
549 COSTS_N_INSNS (20), /* divsi */
550 COSTS_N_INSNS (20), /* divdi */
551 COSTS_N_INSNS (3), /* fp */
552 COSTS_N_INSNS (3), /* dmul */
553 COSTS_N_INSNS (18), /* sdiv */
554 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 555 32, /* cache line size */
5f732aba
DE
556 32, /* l1 cache */
557 1024, /* l2 cache */
0b11da67 558 1, /* streams */
8b897cfa
RS
559};
560
f0517163 561/* Instruction costs on PPC620 processors. */
8b897cfa
RS
562static const
563struct processor_costs ppc620_cost = {
06a67bdd
RS
564 COSTS_N_INSNS (5), /* mulsi */
565 COSTS_N_INSNS (4), /* mulsi_const */
566 COSTS_N_INSNS (3), /* mulsi_const9 */
567 COSTS_N_INSNS (7), /* muldi */
568 COSTS_N_INSNS (21), /* divsi */
569 COSTS_N_INSNS (37), /* divdi */
570 COSTS_N_INSNS (3), /* fp */
571 COSTS_N_INSNS (3), /* dmul */
572 COSTS_N_INSNS (18), /* sdiv */
573 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 574 128, /* cache line size */
5f732aba
DE
575 32, /* l1 cache */
576 1024, /* l2 cache */
0b11da67 577 1, /* streams */
f0517163
RS
578};
579
580/* Instruction costs on PPC630 processors. */
581static const
582struct processor_costs ppc630_cost = {
06a67bdd
RS
583 COSTS_N_INSNS (5), /* mulsi */
584 COSTS_N_INSNS (4), /* mulsi_const */
585 COSTS_N_INSNS (3), /* mulsi_const9 */
586 COSTS_N_INSNS (7), /* muldi */
587 COSTS_N_INSNS (21), /* divsi */
588 COSTS_N_INSNS (37), /* divdi */
589 COSTS_N_INSNS (3), /* fp */
590 COSTS_N_INSNS (3), /* dmul */
591 COSTS_N_INSNS (17), /* sdiv */
592 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 593 128, /* cache line size */
5f732aba
DE
594 64, /* l1 cache */
595 1024, /* l2 cache */
0b11da67 596 1, /* streams */
8b897cfa
RS
597};
598
d296e02e
AP
599/* Instruction costs on Cell processor. */
600/* COSTS_N_INSNS (1) ~ one add. */
601static const
602struct processor_costs ppccell_cost = {
603 COSTS_N_INSNS (9/2)+2, /* mulsi */
604 COSTS_N_INSNS (6/2), /* mulsi_const */
605 COSTS_N_INSNS (6/2), /* mulsi_const9 */
606 COSTS_N_INSNS (15/2)+2, /* muldi */
607 COSTS_N_INSNS (38/2), /* divsi */
608 COSTS_N_INSNS (70/2), /* divdi */
609 COSTS_N_INSNS (10/2), /* fp */
610 COSTS_N_INSNS (10/2), /* dmul */
611 COSTS_N_INSNS (74/2), /* sdiv */
612 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 613 128, /* cache line size */
5f732aba
DE
614 32, /* l1 cache */
615 512, /* l2 cache */
616 6, /* streams */
d296e02e
AP
617};
618
8b897cfa
RS
619/* Instruction costs on PPC750 and PPC7400 processors. */
620static const
621struct processor_costs ppc750_cost = {
06a67bdd
RS
622 COSTS_N_INSNS (5), /* mulsi */
623 COSTS_N_INSNS (3), /* mulsi_const */
624 COSTS_N_INSNS (2), /* mulsi_const9 */
625 COSTS_N_INSNS (5), /* muldi */
626 COSTS_N_INSNS (17), /* divsi */
627 COSTS_N_INSNS (17), /* divdi */
628 COSTS_N_INSNS (3), /* fp */
629 COSTS_N_INSNS (3), /* dmul */
630 COSTS_N_INSNS (17), /* sdiv */
631 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 632 32, /* cache line size */
5f732aba
DE
633 32, /* l1 cache */
634 512, /* l2 cache */
0b11da67 635 1, /* streams */
8b897cfa
RS
636};
637
638/* Instruction costs on PPC7450 processors. */
639static const
640struct processor_costs ppc7450_cost = {
06a67bdd
RS
641 COSTS_N_INSNS (4), /* mulsi */
642 COSTS_N_INSNS (3), /* mulsi_const */
643 COSTS_N_INSNS (3), /* mulsi_const9 */
644 COSTS_N_INSNS (4), /* muldi */
645 COSTS_N_INSNS (23), /* divsi */
646 COSTS_N_INSNS (23), /* divdi */
647 COSTS_N_INSNS (5), /* fp */
648 COSTS_N_INSNS (5), /* dmul */
649 COSTS_N_INSNS (21), /* sdiv */
650 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 651 32, /* cache line size */
5f732aba
DE
652 32, /* l1 cache */
653 1024, /* l2 cache */
0b11da67 654 1, /* streams */
8b897cfa 655};
a3170dc6 656
8b897cfa
RS
657/* Instruction costs on PPC8540 processors. */
658static const
659struct processor_costs ppc8540_cost = {
06a67bdd
RS
660 COSTS_N_INSNS (4), /* mulsi */
661 COSTS_N_INSNS (4), /* mulsi_const */
662 COSTS_N_INSNS (4), /* mulsi_const9 */
663 COSTS_N_INSNS (4), /* muldi */
664 COSTS_N_INSNS (19), /* divsi */
665 COSTS_N_INSNS (19), /* divdi */
666 COSTS_N_INSNS (4), /* fp */
667 COSTS_N_INSNS (4), /* dmul */
668 COSTS_N_INSNS (29), /* sdiv */
669 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 670 32, /* cache line size */
5f732aba
DE
671 32, /* l1 cache */
672 256, /* l2 cache */
0b11da67 673 1, /* prefetch streams /*/
8b897cfa
RS
674};
675
fa41c305
EW
676/* Instruction costs on E300C2 and E300C3 cores. */
677static const
678struct processor_costs ppce300c2c3_cost = {
679 COSTS_N_INSNS (4), /* mulsi */
680 COSTS_N_INSNS (4), /* mulsi_const */
681 COSTS_N_INSNS (4), /* mulsi_const9 */
682 COSTS_N_INSNS (4), /* muldi */
683 COSTS_N_INSNS (19), /* divsi */
684 COSTS_N_INSNS (19), /* divdi */
685 COSTS_N_INSNS (3), /* fp */
686 COSTS_N_INSNS (4), /* dmul */
687 COSTS_N_INSNS (18), /* sdiv */
688 COSTS_N_INSNS (33), /* ddiv */
642639ce 689 32,
a19b7d46
EW
690 16, /* l1 cache */
691 16, /* l2 cache */
642639ce 692 1, /* prefetch streams /*/
fa41c305
EW
693};
694
edae5fe3
DE
695/* Instruction costs on PPCE500MC processors. */
696static const
697struct processor_costs ppce500mc_cost = {
698 COSTS_N_INSNS (4), /* mulsi */
699 COSTS_N_INSNS (4), /* mulsi_const */
700 COSTS_N_INSNS (4), /* mulsi_const9 */
701 COSTS_N_INSNS (4), /* muldi */
702 COSTS_N_INSNS (14), /* divsi */
703 COSTS_N_INSNS (14), /* divdi */
704 COSTS_N_INSNS (8), /* fp */
705 COSTS_N_INSNS (10), /* dmul */
706 COSTS_N_INSNS (36), /* sdiv */
707 COSTS_N_INSNS (66), /* ddiv */
708 64, /* cache line size */
709 32, /* l1 cache */
710 128, /* l2 cache */
711 1, /* prefetch streams /*/
712};
713
8b897cfa
RS
714/* Instruction costs on POWER4 and POWER5 processors. */
715static const
716struct processor_costs power4_cost = {
06a67bdd
RS
717 COSTS_N_INSNS (3), /* mulsi */
718 COSTS_N_INSNS (2), /* mulsi_const */
719 COSTS_N_INSNS (2), /* mulsi_const9 */
720 COSTS_N_INSNS (4), /* muldi */
721 COSTS_N_INSNS (18), /* divsi */
722 COSTS_N_INSNS (34), /* divdi */
723 COSTS_N_INSNS (3), /* fp */
724 COSTS_N_INSNS (3), /* dmul */
725 COSTS_N_INSNS (17), /* sdiv */
726 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 727 128, /* cache line size */
5f732aba
DE
728 32, /* l1 cache */
729 1024, /* l2 cache */
0b11da67 730 8, /* prefetch streams /*/
8b897cfa
RS
731};
732
44cd321e
PS
733/* Instruction costs on POWER6 processors. */
734static const
735struct processor_costs power6_cost = {
736 COSTS_N_INSNS (8), /* mulsi */
737 COSTS_N_INSNS (8), /* mulsi_const */
738 COSTS_N_INSNS (8), /* mulsi_const9 */
739 COSTS_N_INSNS (8), /* muldi */
740 COSTS_N_INSNS (22), /* divsi */
741 COSTS_N_INSNS (28), /* divdi */
742 COSTS_N_INSNS (3), /* fp */
743 COSTS_N_INSNS (3), /* dmul */
744 COSTS_N_INSNS (13), /* sdiv */
745 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 746 128, /* cache line size */
5f732aba
DE
747 64, /* l1 cache */
748 2048, /* l2 cache */
0b11da67 749 16, /* prefetch streams */
44cd321e
PS
750};
751
8b897cfa 752\f
a2369ed3 753static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 754static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 755static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
756static void rs6000_emit_stack_tie (void);
757static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 758static bool spe_func_has_64bit_regs_p (void);
b20a9cca 759static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 760 int, HOST_WIDE_INT);
a2369ed3 761static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 762static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
763static unsigned rs6000_hash_constant (rtx);
764static unsigned toc_hash_function (const void *);
765static int toc_hash_eq (const void *, const void *);
a2369ed3 766static bool constant_pool_expr_p (rtx);
d04b6e6e 767static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
768static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
769static struct machine_function * rs6000_init_machine_status (void);
770static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 771static bool no_global_regs_above (int, bool);
5add3202 772#ifdef HAVE_GAS_HIDDEN
a2369ed3 773static void rs6000_assemble_visibility (tree, int);
5add3202 774#endif
a2369ed3
DJ
775static int rs6000_ra_ever_killed (void);
776static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 777static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 778static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 779static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 780static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 781static const char *rs6000_mangle_type (const_tree);
b86fe7b4 782extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 783static void rs6000_set_default_type_attributes (tree);
f78c3290
NF
784static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
785static void rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
786static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
787 enum machine_mode, bool, bool, bool);
52ff33d0 788static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
789static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
790static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
791static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
792static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
793static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
794 tree);
a2369ed3 795static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 796static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 797static void rs6000_file_start (void);
7c262518 798#if TARGET_ELF
9b580a0b 799static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
800static void rs6000_elf_asm_out_constructor (rtx, int);
801static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 802static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 803static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
804static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
805 unsigned HOST_WIDE_INT);
a56d7372 806static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 807 ATTRIBUTE_UNUSED;
7c262518 808#endif
3101faab 809static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
810static void rs6000_alloc_sdmode_stack_slot (void);
811static void rs6000_instantiate_decls (void);
cbaaba19 812#if TARGET_XCOFF
0d5817b2 813static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 814static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 815static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 816static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 817static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 818static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 819 unsigned HOST_WIDE_INT);
d6b5193b
RS
820static void rs6000_xcoff_unique_section (tree, int);
821static section *rs6000_xcoff_select_rtx_section
822 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
823static const char * rs6000_xcoff_strip_name_encoding (const char *);
824static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
825static void rs6000_xcoff_file_start (void);
826static void rs6000_xcoff_file_end (void);
f1384257 827#endif
a2369ed3 828static int rs6000_variable_issue (FILE *, int, rtx, int);
f40751dd 829static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
a2369ed3 830static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 831static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 832static bool is_microcoded_insn (rtx);
d296e02e 833static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
834static bool is_cracked_insn (rtx);
835static bool is_branch_slot_insn (rtx);
44cd321e 836static bool is_load_insn (rtx);
e3a0e200 837static rtx get_store_dest (rtx pat);
44cd321e
PS
838static bool is_store_insn (rtx);
839static bool set_to_load_agen (rtx,rtx);
982afe02 840static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
841static int rs6000_adjust_priority (rtx, int);
842static int rs6000_issue_rate (void);
b198261f 843static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
844static rtx get_next_active_insn (rtx, rtx);
845static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
846static bool insn_must_be_first_in_group (rtx);
847static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
848static bool is_costly_group (rtx *, rtx);
849static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
850static int redefine_groups (FILE *, int, rtx, rtx);
851static int pad_groups (FILE *, int, rtx, rtx);
852static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
853static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
854static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 855static int rs6000_use_sched_lookahead (void);
d296e02e 856static int rs6000_use_sched_lookahead_guard (rtx);
e855c69d
AB
857static void * rs6000_alloc_sched_context (void);
858static void rs6000_init_sched_context (void *, bool);
859static void rs6000_set_sched_context (void *);
860static void rs6000_free_sched_context (void *);
9c78b944 861static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 862static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
863static tree rs6000_builtin_mul_widen_even (tree);
864static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 865static tree rs6000_builtin_conversion (enum tree_code, tree);
0fca40f5 866static tree rs6000_builtin_vec_perm (tree, tree *);
a2369ed3 867
58646b77 868static void def_builtin (int, const char *, tree, int);
3101faab 869static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
870static void rs6000_init_builtins (void);
871static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
872static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
873static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
874static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
875static void altivec_init_builtins (void);
876static void rs6000_common_init_builtins (void);
c15c90bb 877static void rs6000_init_libfuncs (void);
a2369ed3 878
96038623
DE
879static void paired_init_builtins (void);
880static rtx paired_expand_builtin (tree, rtx, bool *);
881static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
882static rtx paired_expand_stv_builtin (enum insn_code, tree);
883static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
884
b20a9cca
AM
885static void enable_mask_for_builtins (struct builtin_description *, int,
886 enum rs6000_builtins,
887 enum rs6000_builtins);
7c62e993 888static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
889static void spe_init_builtins (void);
890static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 891static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
892static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
893static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
894static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
895static rs6000_stack_t *rs6000_stack_info (void);
896static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
897
898static rtx altivec_expand_builtin (tree, rtx, bool *);
899static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
900static rtx altivec_expand_st_builtin (tree, rtx, bool *);
901static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
902static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 903static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 904 const char *, tree, rtx);
a2369ed3 905static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
906static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
907static rtx altivec_expand_vec_set_builtin (tree);
908static rtx altivec_expand_vec_ext_builtin (tree, rtx);
909static int get_element_number (tree, tree);
78f5898b 910static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 911static void rs6000_parse_tls_size_option (void);
5da702b1 912static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
913static int first_altivec_reg_to_save (void);
914static unsigned int compute_vrsave_mask (void);
9390387d 915static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
916static void is_altivec_return_reg (rtx, void *);
917static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
918int easy_vector_constant (rtx, enum machine_mode);
3101faab 919static bool rs6000_is_opaque_type (const_tree);
a2369ed3 920static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 921static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 922static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 923static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
924static rtx rs6000_tls_get_addr (void);
925static rtx rs6000_got_sym (void);
9390387d 926static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
927static const char *rs6000_get_some_local_dynamic_name (void);
928static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 929static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 930static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 931 enum machine_mode, tree);
0b5383eb
DJ
932static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
933 HOST_WIDE_INT);
934static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
935 tree, HOST_WIDE_INT);
936static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
937 HOST_WIDE_INT,
938 rtx[], int *);
939static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
940 const_tree, HOST_WIDE_INT,
941 rtx[], int *);
942static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 943static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 944static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
945static void setup_incoming_varargs (CUMULATIVE_ARGS *,
946 enum machine_mode, tree,
947 int *, int);
8cd5a4e0 948static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 949 const_tree, bool);
78a52f11
RH
950static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
951 tree, bool);
3101faab 952static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
953#if TARGET_MACHO
954static void macho_branch_islands (void);
efdba735
SH
955static int no_previous_def (tree function_name);
956static tree get_prev_label (tree function_name);
c4e18b1c 957static void rs6000_darwin_file_start (void);
efdba735
SH
958#endif
959
c35d187f 960static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 961static void rs6000_va_start (tree, rtx);
726a989a 962static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
586de218 963static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 964static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 965static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 966static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 967 enum machine_mode);
94ff898d 968static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
969 enum machine_mode);
970static int get_vsel_insn (enum machine_mode);
971static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 972static tree rs6000_stack_protect_fail (void);
21213b4c
DP
973
974const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
975static enum machine_mode rs6000_eh_return_filter_mode (void);
976
17211ab5
GK
977/* Hash table stuff for keeping track of TOC entries. */
978
979struct toc_hash_struct GTY(())
980{
981 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
982 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
983 rtx key;
984 enum machine_mode key_mode;
985 int labelno;
986};
987
988static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
989\f
990/* Default register names. */
991char rs6000_reg_names[][8] =
992{
802a0058
MM
993 "0", "1", "2", "3", "4", "5", "6", "7",
994 "8", "9", "10", "11", "12", "13", "14", "15",
995 "16", "17", "18", "19", "20", "21", "22", "23",
996 "24", "25", "26", "27", "28", "29", "30", "31",
997 "0", "1", "2", "3", "4", "5", "6", "7",
998 "8", "9", "10", "11", "12", "13", "14", "15",
999 "16", "17", "18", "19", "20", "21", "22", "23",
1000 "24", "25", "26", "27", "28", "29", "30", "31",
1001 "mq", "lr", "ctr","ap",
1002 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
1003 "xer",
1004 /* AltiVec registers. */
0cd5e3a1
AH
1005 "0", "1", "2", "3", "4", "5", "6", "7",
1006 "8", "9", "10", "11", "12", "13", "14", "15",
1007 "16", "17", "18", "19", "20", "21", "22", "23",
1008 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1009 "vrsave", "vscr",
1010 /* SPE registers. */
7d5175e1
JJ
1011 "spe_acc", "spefscr",
1012 /* Soft frame pointer. */
1013 "sfp"
c81bebd7
MM
1014};
1015
1016#ifdef TARGET_REGNAMES
8b60264b 1017static const char alt_reg_names[][8] =
c81bebd7 1018{
802a0058
MM
1019 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1020 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1021 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1022 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1023 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1024 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1025 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1026 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1027 "mq", "lr", "ctr", "ap",
1028 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1029 "xer",
59a4c851 1030 /* AltiVec registers. */
0ac081f6 1031 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1032 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1033 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1034 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1035 "vrsave", "vscr",
1036 /* SPE registers. */
7d5175e1
JJ
1037 "spe_acc", "spefscr",
1038 /* Soft frame pointer. */
1039 "sfp"
c81bebd7
MM
1040};
1041#endif
9878760c 1042\f
daf11973
MM
1043#ifndef MASK_STRICT_ALIGN
1044#define MASK_STRICT_ALIGN 0
1045#endif
ffcfcb5f
AM
1046#ifndef TARGET_PROFILE_KERNEL
1047#define TARGET_PROFILE_KERNEL 0
1048#endif
3961e8fe
RH
1049
1050/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1051#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1052\f
1053/* Initialize the GCC target structure. */
91d231cb
JM
1054#undef TARGET_ATTRIBUTE_TABLE
1055#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1056#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1057#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1058
301d03af
RS
1059#undef TARGET_ASM_ALIGNED_DI_OP
1060#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1061
1062/* Default unaligned ops are only provided for ELF. Find the ops needed
1063 for non-ELF systems. */
1064#ifndef OBJECT_FORMAT_ELF
cbaaba19 1065#if TARGET_XCOFF
ae6c1efd 1066/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1067 64-bit targets. */
1068#undef TARGET_ASM_UNALIGNED_HI_OP
1069#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1070#undef TARGET_ASM_UNALIGNED_SI_OP
1071#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1072#undef TARGET_ASM_UNALIGNED_DI_OP
1073#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1074#else
1075/* For Darwin. */
1076#undef TARGET_ASM_UNALIGNED_HI_OP
1077#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1078#undef TARGET_ASM_UNALIGNED_SI_OP
1079#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1080#undef TARGET_ASM_UNALIGNED_DI_OP
1081#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1082#undef TARGET_ASM_ALIGNED_DI_OP
1083#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1084#endif
1085#endif
1086
1087/* This hook deals with fixups for relocatable code and DI-mode objects
1088 in 64-bit code. */
1089#undef TARGET_ASM_INTEGER
1090#define TARGET_ASM_INTEGER rs6000_assemble_integer
1091
93638d7a
AM
1092#ifdef HAVE_GAS_HIDDEN
1093#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1094#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1095#endif
1096
c4501e62
JJ
1097#undef TARGET_HAVE_TLS
1098#define TARGET_HAVE_TLS HAVE_AS_TLS
1099
1100#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1101#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1102
08c148a8
NB
1103#undef TARGET_ASM_FUNCTION_PROLOGUE
1104#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1105#undef TARGET_ASM_FUNCTION_EPILOGUE
1106#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1107
b54cf83a
DE
1108#undef TARGET_SCHED_VARIABLE_ISSUE
1109#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1110
c237e94a
ZW
1111#undef TARGET_SCHED_ISSUE_RATE
1112#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1113#undef TARGET_SCHED_ADJUST_COST
1114#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1115#undef TARGET_SCHED_ADJUST_PRIORITY
1116#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1117#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1118#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1119#undef TARGET_SCHED_INIT
1120#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1121#undef TARGET_SCHED_FINISH
1122#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1123#undef TARGET_SCHED_REORDER
1124#define TARGET_SCHED_REORDER rs6000_sched_reorder
1125#undef TARGET_SCHED_REORDER2
1126#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1127
be12c2b0
VM
1128#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1129#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1130
d296e02e
AP
1131#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1132#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1133
e855c69d
AB
1134#undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1135#define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1136#undef TARGET_SCHED_INIT_SCHED_CONTEXT
1137#define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1138#undef TARGET_SCHED_SET_SCHED_CONTEXT
1139#define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1140#undef TARGET_SCHED_FREE_SCHED_CONTEXT
1141#define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1142
7ccf35ed
DN
1143#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1144#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1145#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1146#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1147#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1148#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1149#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1150#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
0fca40f5
IR
1151#undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1152#define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
7ccf35ed 1153
5b900a4c
DN
1154#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1155#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1156
0ac081f6
AH
1157#undef TARGET_INIT_BUILTINS
1158#define TARGET_INIT_BUILTINS rs6000_init_builtins
1159
1160#undef TARGET_EXPAND_BUILTIN
1161#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1162
608063c3
JB
1163#undef TARGET_MANGLE_TYPE
1164#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1165
c15c90bb
ZW
1166#undef TARGET_INIT_LIBFUNCS
1167#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1168
f1384257 1169#if TARGET_MACHO
0e5dbd9b 1170#undef TARGET_BINDS_LOCAL_P
31920d83 1171#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1172#endif
0e5dbd9b 1173
77ccdfed
EC
1174#undef TARGET_MS_BITFIELD_LAYOUT_P
1175#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1176
3961e8fe
RH
1177#undef TARGET_ASM_OUTPUT_MI_THUNK
1178#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1179
3961e8fe 1180#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1181#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1182
4977bab6
ZW
1183#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1184#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1185
2e3f0db6
DJ
1186#undef TARGET_INVALID_WITHIN_DOLOOP
1187#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1188
3c50106f
RH
1189#undef TARGET_RTX_COSTS
1190#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67 1191#undef TARGET_ADDRESS_COST
f40751dd 1192#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 1193
c8e4f0e9 1194#undef TARGET_VECTOR_OPAQUE_P
58646b77 1195#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1196
96714395
AH
1197#undef TARGET_DWARF_REGISTER_SPAN
1198#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1199
37ea0b7e
JM
1200#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1201#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1202
c6e8c921
GK
1203/* On rs6000, function arguments are promoted, as are function return
1204 values. */
1205#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1206#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1207#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1208#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1209
c6e8c921
GK
1210#undef TARGET_RETURN_IN_MEMORY
1211#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1212
1213#undef TARGET_SETUP_INCOMING_VARARGS
1214#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1215
1216/* Always strict argument naming on rs6000. */
1217#undef TARGET_STRICT_ARGUMENT_NAMING
1218#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1219#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1220#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1221#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1222#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1223#undef TARGET_MUST_PASS_IN_STACK
1224#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1225#undef TARGET_PASS_BY_REFERENCE
1226#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1227#undef TARGET_ARG_PARTIAL_BYTES
1228#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1229
c35d187f
RH
1230#undef TARGET_BUILD_BUILTIN_VA_LIST
1231#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1232
d7bd8aeb
JJ
1233#undef TARGET_EXPAND_BUILTIN_VA_START
1234#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1235
cd3ce9b4
JM
1236#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1237#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1238
93f90be6
FJ
1239#undef TARGET_EH_RETURN_FILTER_MODE
1240#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1241
00b79d54
BE
1242#undef TARGET_SCALAR_MODE_SUPPORTED_P
1243#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1244
f676971a
EC
1245#undef TARGET_VECTOR_MODE_SUPPORTED_P
1246#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1247
4d3e6fae
FJ
1248#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1249#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1250
78f5898b
AH
1251#undef TARGET_HANDLE_OPTION
1252#define TARGET_HANDLE_OPTION rs6000_handle_option
1253
1254#undef TARGET_DEFAULT_TARGET_FLAGS
1255#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1256 (TARGET_DEFAULT)
78f5898b 1257
3aebbe5f
JJ
1258#undef TARGET_STACK_PROTECT_FAIL
1259#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1260
445cf5eb
JM
1261/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1262 The PowerPC architecture requires only weak consistency among
1263 processors--that is, memory accesses between processors need not be
1264 sequentially consistent and memory accesses among processors can occur
1265 in any order. The ability to order memory accesses weakly provides
1266 opportunities for more efficient use of the system bus. Unless a
1267 dependency exists, the 604e allows read operations to precede store
1268 operations. */
1269#undef TARGET_RELAXED_ORDERING
1270#define TARGET_RELAXED_ORDERING true
1271
fdbe66f2
EB
1272#ifdef HAVE_AS_TLS
1273#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1274#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1275#endif
1276
aacd3885
RS
1277/* Use a 32-bit anchor range. This leads to sequences like:
1278
1279 addis tmp,anchor,high
1280 add dest,tmp,low
1281
1282 where tmp itself acts as an anchor, and can be shared between
1283 accesses to the same 64k page. */
1284#undef TARGET_MIN_ANCHOR_OFFSET
1285#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1286#undef TARGET_MAX_ANCHOR_OFFSET
1287#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1288#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1289#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1290
9c78b944
DE
1291#undef TARGET_BUILTIN_RECIPROCAL
1292#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1293
e41b2a33
PB
1294#undef TARGET_EXPAND_TO_RTL_HOOK
1295#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1296
1297#undef TARGET_INSTANTIATE_DECLS
1298#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1299
f6897b10 1300struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1301\f
0d1fbc8c
AH
1302
1303/* Value is 1 if hard register REGNO can hold a value of machine-mode
1304 MODE. */
1305static int
1306rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1307{
1308 /* The GPRs can hold any mode, but values bigger than one register
1309 cannot go past R31. */
1310 if (INT_REGNO_P (regno))
1311 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1312
a5a97921 1313 /* The float registers can only hold floating modes and DImode.
7393f7f8 1314 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1315 if (FP_REGNO_P (regno))
1316 return
96038623 1317 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1318 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1319 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1320 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1321 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1322 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1323 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1324
1325 /* The CR register can only hold CC modes. */
1326 if (CR_REGNO_P (regno))
1327 return GET_MODE_CLASS (mode) == MODE_CC;
1328
1329 if (XER_REGNO_P (regno))
1330 return mode == PSImode;
1331
1332 /* AltiVec only in AldyVec registers. */
1333 if (ALTIVEC_REGNO_P (regno))
1334 return ALTIVEC_VECTOR_MODE (mode);
1335
1336 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1337 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1338 return 1;
1339
1340 /* We cannot put TImode anywhere except general register and it must be
1341 able to fit within the register set. */
1342
1343 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1344}
1345
1346/* Initialize rs6000_hard_regno_mode_ok_p table. */
1347static void
1348rs6000_init_hard_regno_mode_ok (void)
1349{
1350 int r, m;
1351
1352 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1353 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1354 if (rs6000_hard_regno_mode_ok (r, m))
1355 rs6000_hard_regno_mode_ok_p[m][r] = true;
1356}
1357
e4cad568
GK
1358#if TARGET_MACHO
1359/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1360
1361static void
1362darwin_rs6000_override_options (void)
1363{
1364 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1365 off. */
1366 rs6000_altivec_abi = 1;
1367 TARGET_ALTIVEC_VRSAVE = 1;
1368 if (DEFAULT_ABI == ABI_DARWIN)
1369 {
1370 if (MACHO_DYNAMIC_NO_PIC_P)
1371 {
1372 if (flag_pic)
1373 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1374 flag_pic = 0;
1375 }
1376 else if (flag_pic == 1)
1377 {
1378 flag_pic = 2;
1379 }
1380 }
1381 if (TARGET_64BIT && ! TARGET_POWERPC64)
1382 {
1383 target_flags |= MASK_POWERPC64;
1384 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1385 }
1386 if (flag_mkernel)
1387 {
1388 rs6000_default_long_calls = 1;
1389 target_flags |= MASK_SOFT_FLOAT;
1390 }
1391
1392 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1393 Altivec. */
1394 if (!flag_mkernel && !flag_apple_kext
1395 && TARGET_64BIT
1396 && ! (target_flags_explicit & MASK_ALTIVEC))
1397 target_flags |= MASK_ALTIVEC;
1398
1399 /* Unless the user (not the configurer) has explicitly overridden
1400 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1401 G4 unless targetting the kernel. */
1402 if (!flag_mkernel
1403 && !flag_apple_kext
1404 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1405 && ! (target_flags_explicit & MASK_ALTIVEC)
1406 && ! rs6000_select[1].string)
1407 {
1408 target_flags |= MASK_ALTIVEC;
1409 }
1410}
1411#endif
1412
c1e55850
GK
1413/* If not otherwise specified by a target, make 'long double' equivalent to
1414 'double'. */
1415
1416#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1417#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1418#endif
1419
5248c961
RK
1420/* Override command line options. Mostly we process the processor
1421 type and sometimes adjust other TARGET_ options. */
1422
1423void
d779d0dc 1424rs6000_override_options (const char *default_cpu)
5248c961 1425{
c4d38ccb 1426 size_t i, j;
8e3f41e7 1427 struct rs6000_cpu_select *ptr;
66188a7e 1428 int set_masks;
5248c961 1429
66188a7e 1430 /* Simplifications for entries below. */
85638c0d 1431
66188a7e
GK
1432 enum {
1433 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1434 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1435 };
85638c0d 1436
66188a7e
GK
1437 /* This table occasionally claims that a processor does not support
1438 a particular feature even though it does, but the feature is slower
1439 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1440 complete description of the processor's support.
66188a7e
GK
1441
1442 Please keep this list in order, and don't forget to update the
1443 documentation in invoke.texi when adding a new processor or
1444 flag. */
5248c961
RK
1445 static struct ptt
1446 {
8b60264b
KG
1447 const char *const name; /* Canonical processor name. */
1448 const enum processor_type processor; /* Processor type enum value. */
1449 const int target_enable; /* Target flags to enable. */
8b60264b 1450 } const processor_target_table[]
66188a7e 1451 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1452 {"403", PROCESSOR_PPC403,
66188a7e 1453 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1454 {"405", PROCESSOR_PPC405,
716019c0
JM
1455 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1456 {"405fp", PROCESSOR_PPC405,
1457 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1458 {"440", PROCESSOR_PPC440,
716019c0
JM
1459 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1460 {"440fp", PROCESSOR_PPC440,
1461 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1462 {"464", PROCESSOR_PPC440,
1463 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1464 {"464fp", PROCESSOR_PPC440,
1465 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1466 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1467 {"601", PROCESSOR_PPC601,
66188a7e
GK
1468 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1469 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1470 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1471 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1472 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1473 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1474 {"620", PROCESSOR_PPC620,
1475 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1476 {"630", PROCESSOR_PPC630,
1477 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1478 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1479 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1480 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1481 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1482 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1483 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1484 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1485 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1486 /* 8548 has a dummy entry for now. */
a45bce6e 1487 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1488 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1489 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1490 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1491 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1492 {"970", PROCESSOR_POWER4,
66188a7e 1493 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1494 {"cell", PROCESSOR_CELL,
1495 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1496 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1497 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1498 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1499 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1500 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1501 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1502 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1503 {"power2", PROCESSOR_POWER,
1504 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1505 {"power3", PROCESSOR_PPC630,
1506 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1507 {"power4", PROCESSOR_POWER4,
9a8d7941 1508 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1bc39d2f 1509 | MASK_MFCRF},
ec507f2d 1510 {"power5", PROCESSOR_POWER5,
9a8d7941 1511 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1512 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1513 {"power5+", PROCESSOR_POWER5,
9a8d7941 1514 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1515 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1516 {"power6", PROCESSOR_POWER6,
0783d48d
DE
1517 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1518 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1519 {"power6x", PROCESSOR_POWER6,
0783d48d
DE
1520 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1521 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1522 | MASK_MFPGPR},
d40c9e33
PB
1523 {"power7", PROCESSOR_POWER5,
1524 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1525 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
66188a7e
GK
1526 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1527 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1528 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1529 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1530 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1531 {"rios2", PROCESSOR_RIOS2,
1532 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1533 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1534 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1535 {"rs64", PROCESSOR_RS64A,
1536 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1537 };
5248c961 1538
ca7558fc 1539 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1540
66188a7e
GK
1541 /* Some OSs don't support saving the high part of 64-bit registers on
1542 context switch. Other OSs don't support saving Altivec registers.
1543 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1544 settings; if the user wants either, the user must explicitly specify
1545 them and we won't interfere with the user's specification. */
1546
1547 enum {
1548 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1549 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1550 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1551 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1552 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1553 };
0d1fbc8c 1554
c4ad648e 1555 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1556#ifdef OS_MISSING_POWERPC64
1557 if (OS_MISSING_POWERPC64)
1558 set_masks &= ~MASK_POWERPC64;
1559#endif
1560#ifdef OS_MISSING_ALTIVEC
1561 if (OS_MISSING_ALTIVEC)
1562 set_masks &= ~MASK_ALTIVEC;
1563#endif
1564
768875a8
AM
1565 /* Don't override by the processor default if given explicitly. */
1566 set_masks &= ~target_flags_explicit;
957211c3 1567
a4f6c312 1568 /* Identify the processor type. */
8e3f41e7 1569 rs6000_select[0].string = default_cpu;
3cb999d8 1570 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1571
b6a1cbae 1572 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1573 {
8e3f41e7
MM
1574 ptr = &rs6000_select[i];
1575 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1576 {
8e3f41e7
MM
1577 for (j = 0; j < ptt_size; j++)
1578 if (! strcmp (ptr->string, processor_target_table[j].name))
1579 {
1580 if (ptr->set_tune_p)
1581 rs6000_cpu = processor_target_table[j].processor;
1582
1583 if (ptr->set_arch_p)
1584 {
66188a7e
GK
1585 target_flags &= ~set_masks;
1586 target_flags |= (processor_target_table[j].target_enable
1587 & set_masks);
8e3f41e7
MM
1588 }
1589 break;
1590 }
1591
4406229e 1592 if (j == ptt_size)
8e3f41e7 1593 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1594 }
1595 }
8a61d227 1596
edae5fe3
DE
1597 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1598 && !rs6000_explicit_options.isel)
a3170dc6
AH
1599 rs6000_isel = 1;
1600
edae5fe3
DE
1601 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1602 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1603 {
1604 if (TARGET_ALTIVEC)
1605 error ("AltiVec not supported in this target");
1606 if (TARGET_SPE)
1607 error ("Spe not supported in this target");
1608 }
1609
25696a75 1610 /* Disable Cell microcode if we are optimizing for the Cell
c921bad8
AP
1611 and not optimizing for size. */
1612 if (rs6000_gen_cell_microcode == -1)
1613 rs6000_gen_cell_microcode = !(rs6000_cpu == PROCESSOR_CELL
1614 && !optimize_size);
1615
dff9f1b6 1616 /* If we are optimizing big endian systems for space, use the load/store
c921bad8
AP
1617 multiple and string instructions unless we are not generating
1618 Cell microcode. */
1619 if (BYTES_BIG_ENDIAN && optimize_size && !rs6000_gen_cell_microcode)
957211c3 1620 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1621
a4f6c312
SS
1622 /* Don't allow -mmultiple or -mstring on little endian systems
1623 unless the cpu is a 750, because the hardware doesn't support the
1624 instructions used in little endian mode, and causes an alignment
1625 trap. The 750 does not cause an alignment trap (except when the
1626 target is unaligned). */
bef84347 1627
b21fb038 1628 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1629 {
1630 if (TARGET_MULTIPLE)
1631 {
1632 target_flags &= ~MASK_MULTIPLE;
b21fb038 1633 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1634 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1635 }
1636
1637 if (TARGET_STRING)
1638 {
1639 target_flags &= ~MASK_STRING;
b21fb038 1640 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1641 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1642 }
1643 }
3933e0e1 1644
38c1f2d7
MM
1645 /* Set debug flags */
1646 if (rs6000_debug_name)
1647 {
bfc79d3b 1648 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1649 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1650 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1651 rs6000_debug_stack = 1;
bfc79d3b 1652 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1653 rs6000_debug_arg = 1;
1654 else
c725bd79 1655 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1656 }
1657
57ac7be9
AM
1658 if (rs6000_traceback_name)
1659 {
1660 if (! strncmp (rs6000_traceback_name, "full", 4))
1661 rs6000_traceback = traceback_full;
1662 else if (! strncmp (rs6000_traceback_name, "part", 4))
1663 rs6000_traceback = traceback_part;
1664 else if (! strncmp (rs6000_traceback_name, "no", 2))
1665 rs6000_traceback = traceback_none;
1666 else
9e637a26 1667 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1668 rs6000_traceback_name);
1669 }
1670
78f5898b
AH
1671 if (!rs6000_explicit_options.long_double)
1672 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1673
602ea4d3 1674#ifndef POWERPC_LINUX
d3603e8c 1675 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1676 rs6000_ieeequad = 1;
1677#endif
1678
0db747be
DE
1679 /* Enable Altivec ABI for AIX -maltivec. */
1680 if (TARGET_XCOFF && TARGET_ALTIVEC)
1681 rs6000_altivec_abi = 1;
1682
a2db2771
JJ
1683 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1684 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1685 be explicitly overridden in either case. */
1686 if (TARGET_ELF)
6d0ef01e 1687 {
a2db2771
JJ
1688 if (!rs6000_explicit_options.altivec_abi
1689 && (TARGET_64BIT || TARGET_ALTIVEC))
1690 rs6000_altivec_abi = 1;
1691
1692 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1693 if (!rs6000_explicit_options.vrsave)
1694 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1695 }
1696
594a51fe
SS
1697 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1698 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1699 {
1700 rs6000_darwin64_abi = 1;
9c7956fd 1701#if TARGET_MACHO
6ac49599 1702 darwin_one_byte_bool = 1;
9c7956fd 1703#endif
d9168963
SS
1704 /* Default to natural alignment, for better performance. */
1705 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1706 }
1707
194c524a
DE
1708 /* Place FP constants in the constant pool instead of TOC
1709 if section anchors enabled. */
1710 if (flag_section_anchors)
1711 TARGET_NO_FP_IN_TOC = 1;
1712
c4501e62
JJ
1713 /* Handle -mtls-size option. */
1714 rs6000_parse_tls_size_option ();
1715
a7ae18e2
AH
1716#ifdef SUBTARGET_OVERRIDE_OPTIONS
1717 SUBTARGET_OVERRIDE_OPTIONS;
1718#endif
1719#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1720 SUBSUBTARGET_OVERRIDE_OPTIONS;
1721#endif
4d4cbc0e
AH
1722#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1723 SUB3TARGET_OVERRIDE_OPTIONS;
1724#endif
a7ae18e2 1725
edae5fe3 1726 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1727 {
edae5fe3 1728 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1729 MASK_STRING above when optimizing for size. */
1730 if ((target_flags & MASK_STRING) != 0)
1731 target_flags = target_flags & ~MASK_STRING;
1732 }
1733 else if (rs6000_select[1].string != NULL)
1734 {
1735 /* For the powerpc-eabispe configuration, we set all these by
1736 default, so let's unset them if we manually set another
1737 CPU that is not the E500. */
a2db2771 1738 if (!rs6000_explicit_options.spe_abi)
5da702b1 1739 rs6000_spe_abi = 0;
78f5898b 1740 if (!rs6000_explicit_options.spe)
5da702b1 1741 rs6000_spe = 0;
78f5898b 1742 if (!rs6000_explicit_options.float_gprs)
5da702b1 1743 rs6000_float_gprs = 0;
78f5898b 1744 if (!rs6000_explicit_options.isel)
5da702b1
AH
1745 rs6000_isel = 0;
1746 }
b5044283 1747
eca0d5e8
JM
1748 /* Detect invalid option combinations with E500. */
1749 CHECK_E500_OPTIONS;
1750
ec507f2d 1751 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1752 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1753 && rs6000_cpu != PROCESSOR_POWER6
1754 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1755 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1756 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1757 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1758 || rs6000_cpu == PROCESSOR_POWER5
1759 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1760
ec507f2d
DE
1761 rs6000_sched_restricted_insns_priority
1762 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1763
569fa502 1764 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1765 rs6000_sched_costly_dep
1766 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1767
569fa502
DN
1768 if (rs6000_sched_costly_dep_str)
1769 {
f676971a 1770 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1771 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1772 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1773 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1774 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1775 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1776 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1777 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1778 else
c4ad648e 1779 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1780 }
1781
1782 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1783 rs6000_sched_insert_nops
1784 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1785
cbe26ab8
DN
1786 if (rs6000_sched_insert_nops_str)
1787 {
1788 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1789 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1790 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1791 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1792 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1793 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1794 else
c4ad648e 1795 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1796 }
1797
c81bebd7 1798#ifdef TARGET_REGNAMES
a4f6c312
SS
1799 /* If the user desires alternate register names, copy in the
1800 alternate names now. */
c81bebd7 1801 if (TARGET_REGNAMES)
4e135bdd 1802 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1803#endif
1804
df01da37 1805 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1806 If -maix-struct-return or -msvr4-struct-return was explicitly
1807 used, don't override with the ABI default. */
df01da37
DE
1808 if (!rs6000_explicit_options.aix_struct_ret)
1809 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1810
602ea4d3 1811 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1812 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1813
f676971a 1814 if (TARGET_TOC)
9ebbca7d 1815 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1816
301d03af
RS
1817 /* We can only guarantee the availability of DI pseudo-ops when
1818 assembling for 64-bit targets. */
ae6c1efd 1819 if (!TARGET_64BIT)
301d03af
RS
1820 {
1821 targetm.asm_out.aligned_op.di = NULL;
1822 targetm.asm_out.unaligned_op.di = NULL;
1823 }
1824
1494c534
DE
1825 /* Set branch target alignment, if not optimizing for size. */
1826 if (!optimize_size)
1827 {
d296e02e
AP
1828 /* Cell wants to be aligned 8byte for dual issue. */
1829 if (rs6000_cpu == PROCESSOR_CELL)
1830 {
1831 if (align_functions <= 0)
1832 align_functions = 8;
1833 if (align_jumps <= 0)
1834 align_jumps = 8;
1835 if (align_loops <= 0)
1836 align_loops = 8;
1837 }
44cd321e 1838 if (rs6000_align_branch_targets)
1494c534
DE
1839 {
1840 if (align_functions <= 0)
1841 align_functions = 16;
1842 if (align_jumps <= 0)
1843 align_jumps = 16;
1844 if (align_loops <= 0)
1845 align_loops = 16;
1846 }
1847 if (align_jumps_max_skip <= 0)
1848 align_jumps_max_skip = 15;
1849 if (align_loops_max_skip <= 0)
1850 align_loops_max_skip = 15;
1851 }
2792d578 1852
71f123ca
FS
1853 /* Arrange to save and restore machine status around nested functions. */
1854 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1855
1856 /* We should always be splitting complex arguments, but we can't break
1857 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1858 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1859 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1860
1861 /* Initialize rs6000_cost with the appropriate target costs. */
1862 if (optimize_size)
1863 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1864 else
1865 switch (rs6000_cpu)
1866 {
1867 case PROCESSOR_RIOS1:
1868 rs6000_cost = &rios1_cost;
1869 break;
1870
1871 case PROCESSOR_RIOS2:
1872 rs6000_cost = &rios2_cost;
1873 break;
1874
1875 case PROCESSOR_RS64A:
1876 rs6000_cost = &rs64a_cost;
1877 break;
1878
1879 case PROCESSOR_MPCCORE:
1880 rs6000_cost = &mpccore_cost;
1881 break;
1882
1883 case PROCESSOR_PPC403:
1884 rs6000_cost = &ppc403_cost;
1885 break;
1886
1887 case PROCESSOR_PPC405:
1888 rs6000_cost = &ppc405_cost;
1889 break;
1890
1891 case PROCESSOR_PPC440:
1892 rs6000_cost = &ppc440_cost;
1893 break;
1894
1895 case PROCESSOR_PPC601:
1896 rs6000_cost = &ppc601_cost;
1897 break;
1898
1899 case PROCESSOR_PPC603:
1900 rs6000_cost = &ppc603_cost;
1901 break;
1902
1903 case PROCESSOR_PPC604:
1904 rs6000_cost = &ppc604_cost;
1905 break;
1906
1907 case PROCESSOR_PPC604e:
1908 rs6000_cost = &ppc604e_cost;
1909 break;
1910
1911 case PROCESSOR_PPC620:
8b897cfa
RS
1912 rs6000_cost = &ppc620_cost;
1913 break;
1914
f0517163
RS
1915 case PROCESSOR_PPC630:
1916 rs6000_cost = &ppc630_cost;
1917 break;
1918
982afe02 1919 case PROCESSOR_CELL:
d296e02e
AP
1920 rs6000_cost = &ppccell_cost;
1921 break;
1922
8b897cfa
RS
1923 case PROCESSOR_PPC750:
1924 case PROCESSOR_PPC7400:
1925 rs6000_cost = &ppc750_cost;
1926 break;
1927
1928 case PROCESSOR_PPC7450:
1929 rs6000_cost = &ppc7450_cost;
1930 break;
1931
1932 case PROCESSOR_PPC8540:
1933 rs6000_cost = &ppc8540_cost;
1934 break;
1935
fa41c305
EW
1936 case PROCESSOR_PPCE300C2:
1937 case PROCESSOR_PPCE300C3:
1938 rs6000_cost = &ppce300c2c3_cost;
1939 break;
1940
edae5fe3
DE
1941 case PROCESSOR_PPCE500MC:
1942 rs6000_cost = &ppce500mc_cost;
1943 break;
1944
8b897cfa
RS
1945 case PROCESSOR_POWER4:
1946 case PROCESSOR_POWER5:
1947 rs6000_cost = &power4_cost;
1948 break;
1949
44cd321e
PS
1950 case PROCESSOR_POWER6:
1951 rs6000_cost = &power6_cost;
1952 break;
1953
8b897cfa 1954 default:
37409796 1955 gcc_unreachable ();
8b897cfa 1956 }
0b11da67
DE
1957
1958 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1959 set_param_value ("simultaneous-prefetches",
1960 rs6000_cost->simultaneous_prefetches);
1961 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1962 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1963 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1964 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1965 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1966 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1967
1968 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1969 can be optimized to ap = __builtin_next_arg (0). */
1970 if (DEFAULT_ABI != ABI_V4)
1971 targetm.expand_builtin_va_start = NULL;
696e45ba
ME
1972
1973 /* Set up single/double float flags.
1974 If TARGET_HARD_FLOAT is set, but neither single or double is set,
1975 then set both flags. */
1976 if (TARGET_HARD_FLOAT && TARGET_FPRS
1977 && rs6000_single_float == 0 && rs6000_double_float == 0)
1978 rs6000_single_float = rs6000_double_float = 1;
1979
1980 /* Reset single and double FP flags if target is E500. */
1981 if (TARGET_E500)
1982 {
1983 rs6000_single_float = rs6000_double_float = 0;
1984 if (TARGET_E500_SINGLE)
1985 rs6000_single_float = 1;
1986 if (TARGET_E500_DOUBLE)
1987 rs6000_single_float = rs6000_double_float = 1;
1988 }
1989
001b9eb6
PH
1990 /* If not explicitly specified via option, decide whether to generate indexed
1991 load/store instructions. */
1992 if (TARGET_AVOID_XFORM == -1)
1993 /* Avoid indexed addressing when targeting Power6 in order to avoid
1994 the DERAT mispredict penalty. */
1995 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB);
1996
696e45ba 1997 rs6000_init_hard_regno_mode_ok ();
5248c961 1998}
5accd822 1999
7ccf35ed
DN
2000/* Implement targetm.vectorize.builtin_mask_for_load. */
2001static tree
2002rs6000_builtin_mask_for_load (void)
2003{
2004 if (TARGET_ALTIVEC)
2005 return altivec_builtin_mask_for_load;
2006 else
2007 return 0;
2008}
2009
7910ae0c
DN
2010/* Implement targetm.vectorize.builtin_conversion.
2011 Returns a decl of a function that implements conversion of an integer vector
2012 into a floating-point vector, or vice-versa. TYPE is the type of the integer
2013 side of the conversion.
2014 Return NULL_TREE if it is not available. */
f57d17f1
TM
2015static tree
2016rs6000_builtin_conversion (enum tree_code code, tree type)
2017{
2018 if (!TARGET_ALTIVEC)
2019 return NULL_TREE;
982afe02 2020
f57d17f1
TM
2021 switch (code)
2022 {
7910ae0c
DN
2023 case FIX_TRUNC_EXPR:
2024 switch (TYPE_MODE (type))
2025 {
2026 case V4SImode:
2027 return TYPE_UNSIGNED (type)
2028 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2029 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2030 default:
2031 return NULL_TREE;
2032 }
2033
f57d17f1
TM
2034 case FLOAT_EXPR:
2035 switch (TYPE_MODE (type))
2036 {
2037 case V4SImode:
7910ae0c
DN
2038 return TYPE_UNSIGNED (type)
2039 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2040 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
f57d17f1
TM
2041 default:
2042 return NULL_TREE;
2043 }
7910ae0c 2044
f57d17f1
TM
2045 default:
2046 return NULL_TREE;
2047 }
2048}
2049
89d67cca
DN
2050/* Implement targetm.vectorize.builtin_mul_widen_even. */
2051static tree
2052rs6000_builtin_mul_widen_even (tree type)
2053{
2054 if (!TARGET_ALTIVEC)
2055 return NULL_TREE;
2056
2057 switch (TYPE_MODE (type))
2058 {
2059 case V8HImode:
7910ae0c
DN
2060 return TYPE_UNSIGNED (type)
2061 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2062 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
89d67cca
DN
2063
2064 case V16QImode:
7910ae0c
DN
2065 return TYPE_UNSIGNED (type)
2066 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2067 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
89d67cca
DN
2068 default:
2069 return NULL_TREE;
2070 }
2071}
2072
2073/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2074static tree
2075rs6000_builtin_mul_widen_odd (tree type)
2076{
2077 if (!TARGET_ALTIVEC)
2078 return NULL_TREE;
2079
2080 switch (TYPE_MODE (type))
2081 {
2082 case V8HImode:
7910ae0c
DN
2083 return TYPE_UNSIGNED (type)
2084 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2085 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
89d67cca
DN
2086
2087 case V16QImode:
7910ae0c
DN
2088 return TYPE_UNSIGNED (type)
2089 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2090 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
89d67cca
DN
2091 default:
2092 return NULL_TREE;
2093 }
2094}
2095
5b900a4c
DN
2096
2097/* Return true iff, data reference of TYPE can reach vector alignment (16)
2098 after applying N number of iterations. This routine does not determine
2099 how may iterations are required to reach desired alignment. */
2100
2101static bool
3101faab 2102rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2103{
2104 if (is_packed)
2105 return false;
2106
2107 if (TARGET_32BIT)
2108 {
2109 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2110 return true;
2111
2112 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2113 return true;
2114
2115 return false;
2116 }
2117 else
2118 {
2119 if (TARGET_MACHO)
2120 return false;
2121
2122 /* Assuming that all other types are naturally aligned. CHECKME! */
2123 return true;
2124 }
2125}
2126
0fca40f5
IR
2127/* Implement targetm.vectorize.builtin_vec_perm. */
2128tree
2129rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2130{
2131 tree d;
2132
2133 *mask_element_type = unsigned_char_type_node;
2134
2135 switch (TYPE_MODE (type))
2136 {
2137 case V16QImode:
2138 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2139 break;
2140
2141 case V8HImode:
2142 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2143 break;
2144
2145 case V4SImode:
2146 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2147 break;
2148
2149 case V4SFmode:
2150 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2151 break;
2152
2153 default:
2154 return NULL_TREE;
2155 }
2156
2157 gcc_assert (d);
2158 return d;
2159}
2160
5da702b1
AH
2161/* Handle generic options of the form -mfoo=yes/no.
2162 NAME is the option name.
2163 VALUE is the option value.
2164 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2165 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2166static void
5da702b1 2167rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2168{
5da702b1 2169 if (value == 0)
993f19a8 2170 return;
5da702b1
AH
2171 else if (!strcmp (value, "yes"))
2172 *flag = 1;
2173 else if (!strcmp (value, "no"))
2174 *flag = 0;
08b57fb3 2175 else
5da702b1 2176 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2177}
2178
c4501e62
JJ
2179/* Validate and record the size specified with the -mtls-size option. */
2180
2181static void
863d938c 2182rs6000_parse_tls_size_option (void)
c4501e62
JJ
2183{
2184 if (rs6000_tls_size_string == 0)
2185 return;
2186 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2187 rs6000_tls_size = 16;
2188 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2189 rs6000_tls_size = 32;
2190 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2191 rs6000_tls_size = 64;
2192 else
9e637a26 2193 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2194}
2195
5accd822 2196void
a2369ed3 2197optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2198{
2e3f0db6
DJ
2199 if (DEFAULT_ABI == ABI_DARWIN)
2200 /* The Darwin libraries never set errno, so we might as well
2201 avoid calling them when that's the only reason we would. */
2202 flag_errno_math = 0;
59d6560b
DE
2203
2204 /* Double growth factor to counter reduced min jump length. */
2205 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2206
2207 /* Enable section anchors by default.
2208 Skip section anchors for Objective C and Objective C++
2f3b0d4a
ST
2209 until front-ends fixed. */
2210 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
d6cc6ec9 2211 flag_section_anchors = 2;
5accd822 2212}
78f5898b 2213
0bb7b92e
ME
2214static enum fpu_type_t
2215rs6000_parse_fpu_option (const char *option)
2216{
2217 if (!strcmp("none", option)) return FPU_NONE;
2218 if (!strcmp("sp_lite", option)) return FPU_SF_LITE;
2219 if (!strcmp("dp_lite", option)) return FPU_DF_LITE;
2220 if (!strcmp("sp_full", option)) return FPU_SF_FULL;
2221 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
2222 error("unknown value %s for -mfpu", option);
2223 return FPU_NONE;
2224}
2225
78f5898b
AH
2226/* Implement TARGET_HANDLE_OPTION. */
2227
2228static bool
2229rs6000_handle_option (size_t code, const char *arg, int value)
2230{
0bb7b92e
ME
2231 enum fpu_type_t fpu_type = FPU_NONE;
2232
78f5898b
AH
2233 switch (code)
2234 {
2235 case OPT_mno_power:
2236 target_flags &= ~(MASK_POWER | MASK_POWER2
2237 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2238 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2239 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2240 break;
2241 case OPT_mno_powerpc:
2242 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2243 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2244 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2245 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2246 break;
2247 case OPT_mfull_toc:
d2894ab5
DE
2248 target_flags &= ~MASK_MINIMAL_TOC;
2249 TARGET_NO_FP_IN_TOC = 0;
2250 TARGET_NO_SUM_IN_TOC = 0;
2251 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2252#ifdef TARGET_USES_SYSV4_OPT
2253 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2254 just the same as -mminimal-toc. */
2255 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2256 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2257#endif
2258 break;
2259
2260#ifdef TARGET_USES_SYSV4_OPT
2261 case OPT_mtoc:
2262 /* Make -mtoc behave like -mminimal-toc. */
2263 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2264 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2265 break;
2266#endif
2267
2268#ifdef TARGET_USES_AIX64_OPT
2269 case OPT_maix64:
2270#else
2271 case OPT_m64:
2272#endif
2c9c9afd
AM
2273 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2274 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2275 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2276 break;
2277
2278#ifdef TARGET_USES_AIX64_OPT
2279 case OPT_maix32:
2280#else
2281 case OPT_m32:
2282#endif
2283 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2284 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2285 break;
2286
2287 case OPT_minsert_sched_nops_:
2288 rs6000_sched_insert_nops_str = arg;
2289 break;
2290
2291 case OPT_mminimal_toc:
2292 if (value == 1)
2293 {
d2894ab5
DE
2294 TARGET_NO_FP_IN_TOC = 0;
2295 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2296 }
2297 break;
2298
2299 case OPT_mpower:
2300 if (value == 1)
c2dba4ab
AH
2301 {
2302 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2303 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2304 }
78f5898b
AH
2305 break;
2306
2307 case OPT_mpower2:
2308 if (value == 1)
c2dba4ab
AH
2309 {
2310 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2311 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2312 }
78f5898b
AH
2313 break;
2314
2315 case OPT_mpowerpc_gpopt:
2316 case OPT_mpowerpc_gfxopt:
2317 if (value == 1)
c2dba4ab
AH
2318 {
2319 target_flags |= MASK_POWERPC;
2320 target_flags_explicit |= MASK_POWERPC;
2321 }
78f5898b
AH
2322 break;
2323
df01da37
DE
2324 case OPT_maix_struct_return:
2325 case OPT_msvr4_struct_return:
2326 rs6000_explicit_options.aix_struct_ret = true;
2327 break;
2328
b5e3caf2
BE
2329 case OPT_mvrsave:
2330 rs6000_explicit_options.vrsave = true;
2331 TARGET_ALTIVEC_VRSAVE = value;
2332 break;
2333
78f5898b 2334 case OPT_mvrsave_:
a2db2771 2335 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2336 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2337 break;
78f5898b 2338
94f4765c
NF
2339 case OPT_misel:
2340 rs6000_explicit_options.isel = true;
2341 rs6000_isel = value;
2342 break;
2343
78f5898b
AH
2344 case OPT_misel_:
2345 rs6000_explicit_options.isel = true;
2346 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2347 break;
2348
94f4765c
NF
2349 case OPT_mspe:
2350 rs6000_explicit_options.spe = true;
2351 rs6000_spe = value;
2352 break;
2353
78f5898b
AH
2354 case OPT_mspe_:
2355 rs6000_explicit_options.spe = true;
2356 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2357 break;
2358
2359 case OPT_mdebug_:
2360 rs6000_debug_name = arg;
2361 break;
2362
2363#ifdef TARGET_USES_SYSV4_OPT
2364 case OPT_mcall_:
2365 rs6000_abi_name = arg;
2366 break;
2367
2368 case OPT_msdata_:
2369 rs6000_sdata_name = arg;
2370 break;
2371
2372 case OPT_mtls_size_:
2373 rs6000_tls_size_string = arg;
2374 break;
2375
2376 case OPT_mrelocatable:
2377 if (value == 1)
c2dba4ab 2378 {
e0bf274f
AM
2379 target_flags |= MASK_MINIMAL_TOC;
2380 target_flags_explicit |= MASK_MINIMAL_TOC;
2381 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2382 }
78f5898b
AH
2383 break;
2384
2385 case OPT_mrelocatable_lib:
2386 if (value == 1)
c2dba4ab 2387 {
e0bf274f
AM
2388 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2389 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2390 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2391 }
78f5898b 2392 else
c2dba4ab
AH
2393 {
2394 target_flags &= ~MASK_RELOCATABLE;
2395 target_flags_explicit |= MASK_RELOCATABLE;
2396 }
78f5898b
AH
2397 break;
2398#endif
2399
2400 case OPT_mabi_:
78f5898b
AH
2401 if (!strcmp (arg, "altivec"))
2402 {
a2db2771 2403 rs6000_explicit_options.altivec_abi = true;
78f5898b 2404 rs6000_altivec_abi = 1;
a2db2771
JJ
2405
2406 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2407 rs6000_spe_abi = 0;
2408 }
2409 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2410 {
a2db2771 2411 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2412 rs6000_altivec_abi = 0;
2413 }
78f5898b
AH
2414 else if (! strcmp (arg, "spe"))
2415 {
a2db2771 2416 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2417 rs6000_spe_abi = 1;
2418 rs6000_altivec_abi = 0;
2419 if (!TARGET_SPE_ABI)
2420 error ("not configured for ABI: '%s'", arg);
2421 }
2422 else if (! strcmp (arg, "no-spe"))
d3603e8c 2423 {
a2db2771 2424 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2425 rs6000_spe_abi = 0;
2426 }
78f5898b
AH
2427
2428 /* These are here for testing during development only, do not
2429 document in the manual please. */
2430 else if (! strcmp (arg, "d64"))
2431 {
2432 rs6000_darwin64_abi = 1;
2433 warning (0, "Using darwin64 ABI");
2434 }
2435 else if (! strcmp (arg, "d32"))
2436 {
2437 rs6000_darwin64_abi = 0;
2438 warning (0, "Using old darwin ABI");
2439 }
2440
602ea4d3
JJ
2441 else if (! strcmp (arg, "ibmlongdouble"))
2442 {
d3603e8c 2443 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2444 rs6000_ieeequad = 0;
2445 warning (0, "Using IBM extended precision long double");
2446 }
2447 else if (! strcmp (arg, "ieeelongdouble"))
2448 {
d3603e8c 2449 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2450 rs6000_ieeequad = 1;
2451 warning (0, "Using IEEE extended precision long double");
2452 }
2453
78f5898b
AH
2454 else
2455 {
2456 error ("unknown ABI specified: '%s'", arg);
2457 return false;
2458 }
2459 break;
2460
2461 case OPT_mcpu_:
2462 rs6000_select[1].string = arg;
2463 break;
2464
2465 case OPT_mtune_:
2466 rs6000_select[2].string = arg;
2467 break;
2468
2469 case OPT_mtraceback_:
2470 rs6000_traceback_name = arg;
2471 break;
2472
2473 case OPT_mfloat_gprs_:
2474 rs6000_explicit_options.float_gprs = true;
2475 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2476 rs6000_float_gprs = 1;
2477 else if (! strcmp (arg, "double"))
2478 rs6000_float_gprs = 2;
2479 else if (! strcmp (arg, "no"))
2480 rs6000_float_gprs = 0;
2481 else
2482 {
2483 error ("invalid option for -mfloat-gprs: '%s'", arg);
2484 return false;
2485 }
2486 break;
2487
2488 case OPT_mlong_double_:
2489 rs6000_explicit_options.long_double = true;
2490 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2491 if (value != 64 && value != 128)
2492 {
2493 error ("Unknown switch -mlong-double-%s", arg);
2494 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2495 return false;
2496 }
2497 else
2498 rs6000_long_double_type_size = value;
2499 break;
2500
2501 case OPT_msched_costly_dep_:
2502 rs6000_sched_costly_dep_str = arg;
2503 break;
2504
2505 case OPT_malign_:
2506 rs6000_explicit_options.alignment = true;
2507 if (! strcmp (arg, "power"))
2508 {
2509 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2510 some C library functions, so warn about it. The flag may be
2511 useful for performance studies from time to time though, so
2512 don't disable it entirely. */
2513 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2514 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2515 " it is incompatible with the installed C and C++ libraries");
2516 rs6000_alignment_flags = MASK_ALIGN_POWER;
2517 }
2518 else if (! strcmp (arg, "natural"))
2519 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2520 else
2521 {
2522 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2523 return false;
2524 }
2525 break;
696e45ba
ME
2526
2527 case OPT_msingle_float:
2528 if (!TARGET_SINGLE_FPU)
2529 warning (0, "-msingle-float option equivalent to -mhard-float");
2530 /* -msingle-float implies -mno-double-float and TARGET_HARD_FLOAT. */
2531 rs6000_double_float = 0;
2532 target_flags &= ~MASK_SOFT_FLOAT;
2533 target_flags_explicit |= MASK_SOFT_FLOAT;
2534 break;
2535
2536 case OPT_mdouble_float:
2537 /* -mdouble-float implies -msingle-float and TARGET_HARD_FLOAT. */
2538 rs6000_single_float = 1;
2539 target_flags &= ~MASK_SOFT_FLOAT;
2540 target_flags_explicit |= MASK_SOFT_FLOAT;
2541 break;
2542
2543 case OPT_msimple_fpu:
2544 if (!TARGET_SINGLE_FPU)
2545 warning (0, "-msimple-fpu option ignored");
2546 break;
2547
2548 case OPT_mhard_float:
2549 /* -mhard_float implies -msingle-float and -mdouble-float. */
2550 rs6000_single_float = rs6000_double_float = 1;
2551 break;
2552
2553 case OPT_msoft_float:
2554 /* -msoft_float implies -mnosingle-float and -mnodouble-float. */
2555 rs6000_single_float = rs6000_double_float = 0;
2556 break;
0bb7b92e
ME
2557
2558 case OPT_mfpu_:
2559 fpu_type = rs6000_parse_fpu_option(arg);
2560 if (fpu_type != FPU_NONE)
2561 /* If -mfpu is not none, then turn off SOFT_FLOAT, turn on HARD_FLOAT. */
2562 {
2563 target_flags &= ~MASK_SOFT_FLOAT;
2564 target_flags_explicit |= MASK_SOFT_FLOAT;
2565 rs6000_xilinx_fpu = 1;
2566 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_SF_FULL)
2567 rs6000_single_float = 1;
2568 if (fpu_type == FPU_DF_LITE || fpu_type == FPU_DF_FULL)
2569 rs6000_single_float = rs6000_double_float = 1;
2570 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_DF_LITE)
2571 rs6000_simple_fpu = 1;
2572 }
2573 else
2574 {
2575 /* -mfpu=none is equivalent to -msoft-float */
2576 target_flags |= MASK_SOFT_FLOAT;
2577 target_flags_explicit |= MASK_SOFT_FLOAT;
2578 rs6000_single_float = rs6000_double_float = 0;
2579 }
2580 break;
78f5898b
AH
2581 }
2582 return true;
2583}
3cfa4909
MM
2584\f
2585/* Do anything needed at the start of the asm file. */
2586
1bc7c5b6 2587static void
863d938c 2588rs6000_file_start (void)
3cfa4909 2589{
c4d38ccb 2590 size_t i;
3cfa4909 2591 char buffer[80];
d330fd93 2592 const char *start = buffer;
3cfa4909 2593 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2594 const char *default_cpu = TARGET_CPU_DEFAULT;
2595 FILE *file = asm_out_file;
2596
2597 default_file_start ();
2598
2599#ifdef TARGET_BI_ARCH
2600 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2601 default_cpu = 0;
2602#endif
3cfa4909
MM
2603
2604 if (flag_verbose_asm)
2605 {
2606 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2607 rs6000_select[0].string = default_cpu;
2608
b6a1cbae 2609 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2610 {
2611 ptr = &rs6000_select[i];
2612 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2613 {
2614 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2615 start = "";
2616 }
2617 }
2618
9c6b4ed9 2619 if (PPC405_ERRATUM77)
b0bfee6e 2620 {
9c6b4ed9 2621 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2622 start = "";
2623 }
b0bfee6e 2624
b91da81f 2625#ifdef USING_ELFOS_H
3cfa4909
MM
2626 switch (rs6000_sdata)
2627 {
2628 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2629 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2630 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2631 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2632 }
2633
2634 if (rs6000_sdata && g_switch_value)
2635 {
307b599c
MK
2636 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2637 g_switch_value);
3cfa4909
MM
2638 start = "";
2639 }
2640#endif
2641
2642 if (*start == '\0')
949ea356 2643 putc ('\n', file);
3cfa4909 2644 }
b723e82f 2645
e51917ae
JM
2646#ifdef HAVE_AS_GNU_ATTRIBUTE
2647 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2648 {
2649 fprintf (file, "\t.gnu_attribute 4, %d\n",
696e45ba
ME
2650 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
2651 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
2652 : 2));
aaa42494
DJ
2653 fprintf (file, "\t.gnu_attribute 8, %d\n",
2654 (TARGET_ALTIVEC_ABI ? 2
2655 : TARGET_SPE_ABI ? 3
2656 : 1));
f9fd1e77
NF
2657 fprintf (file, "\t.gnu_attribute 12, %d\n",
2658 aix_struct_return ? 2 : 1);
2659
aaa42494 2660 }
e51917ae
JM
2661#endif
2662
b723e82f
JJ
2663 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2664 {
d6b5193b
RS
2665 switch_to_section (toc_section);
2666 switch_to_section (text_section);
b723e82f 2667 }
3cfa4909 2668}
c4e18b1c 2669
5248c961 2670\f
a0ab749a 2671/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2672
2673int
863d938c 2674direct_return (void)
9878760c 2675{
4697a36c
MM
2676 if (reload_completed)
2677 {
2678 rs6000_stack_t *info = rs6000_stack_info ();
2679
2680 if (info->first_gp_reg_save == 32
2681 && info->first_fp_reg_save == 64
00b960c7 2682 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2683 && ! info->lr_save_p
2684 && ! info->cr_save_p
00b960c7 2685 && info->vrsave_mask == 0
c81fc13e 2686 && ! info->push_p)
4697a36c
MM
2687 return 1;
2688 }
2689
2690 return 0;
9878760c
RK
2691}
2692
4e74d8ec
MM
2693/* Return the number of instructions it takes to form a constant in an
2694 integer register. */
2695
48d72335 2696int
a2369ed3 2697num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2698{
2699 /* signed constant loadable with {cal|addi} */
547b216d 2700 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2701 return 1;
2702
4e74d8ec 2703 /* constant loadable with {cau|addis} */
547b216d
DE
2704 else if ((value & 0xffff) == 0
2705 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2706 return 1;
2707
5f59ecb7 2708#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2709 else if (TARGET_POWERPC64)
4e74d8ec 2710 {
a65c591c
DE
2711 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2712 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2713
a65c591c 2714 if (high == 0 || high == -1)
4e74d8ec
MM
2715 return 2;
2716
a65c591c 2717 high >>= 1;
4e74d8ec 2718
a65c591c 2719 if (low == 0)
4e74d8ec 2720 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2721 else
2722 return (num_insns_constant_wide (high)
e396202a 2723 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2724 }
2725#endif
2726
2727 else
2728 return 2;
2729}
2730
2731int
a2369ed3 2732num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2733{
37409796 2734 HOST_WIDE_INT low, high;
bb8df8a6 2735
37409796 2736 switch (GET_CODE (op))
0d30d435 2737 {
37409796 2738 case CONST_INT:
0d30d435 2739#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2740 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2741 && mask64_operand (op, mode))
c4ad648e 2742 return 2;
0d30d435
DE
2743 else
2744#endif
2745 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2746
37409796 2747 case CONST_DOUBLE:
e41b2a33 2748 if (mode == SFmode || mode == SDmode)
37409796
NS
2749 {
2750 long l;
2751 REAL_VALUE_TYPE rv;
bb8df8a6 2752
37409796 2753 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2754 if (DECIMAL_FLOAT_MODE_P (mode))
2755 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2756 else
2757 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2758 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2759 }
a260abc9 2760
37409796
NS
2761 if (mode == VOIDmode || mode == DImode)
2762 {
2763 high = CONST_DOUBLE_HIGH (op);
2764 low = CONST_DOUBLE_LOW (op);
2765 }
2766 else
2767 {
2768 long l[2];
2769 REAL_VALUE_TYPE rv;
bb8df8a6 2770
37409796 2771 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2772 if (DECIMAL_FLOAT_MODE_P (mode))
2773 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2774 else
2775 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2776 high = l[WORDS_BIG_ENDIAN == 0];
2777 low = l[WORDS_BIG_ENDIAN != 0];
2778 }
47ad8c61 2779
37409796
NS
2780 if (TARGET_32BIT)
2781 return (num_insns_constant_wide (low)
2782 + num_insns_constant_wide (high));
2783 else
2784 {
2785 if ((high == 0 && low >= 0)
2786 || (high == -1 && low < 0))
2787 return num_insns_constant_wide (low);
bb8df8a6 2788
1990cd79 2789 else if (mask64_operand (op, mode))
37409796 2790 return 2;
bb8df8a6 2791
37409796
NS
2792 else if (low == 0)
2793 return num_insns_constant_wide (high) + 1;
bb8df8a6 2794
37409796
NS
2795 else
2796 return (num_insns_constant_wide (high)
2797 + num_insns_constant_wide (low) + 1);
2798 }
bb8df8a6 2799
37409796
NS
2800 default:
2801 gcc_unreachable ();
4e74d8ec 2802 }
4e74d8ec
MM
2803}
2804
0972012c
RS
2805/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2806 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2807 corresponding element of the vector, but for V4SFmode and V2SFmode,
2808 the corresponding "float" is interpreted as an SImode integer. */
2809
847535b6 2810HOST_WIDE_INT
0972012c
RS
2811const_vector_elt_as_int (rtx op, unsigned int elt)
2812{
2813 rtx tmp = CONST_VECTOR_ELT (op, elt);
2814 if (GET_MODE (op) == V4SFmode
2815 || GET_MODE (op) == V2SFmode)
2816 tmp = gen_lowpart (SImode, tmp);
2817 return INTVAL (tmp);
2818}
452a7d36 2819
77ccdfed 2820/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2821 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2822 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2823 all items are set to the same value and contain COPIES replicas of the
2824 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2825 operand and the others are set to the value of the operand's msb. */
2826
2827static bool
2828vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2829{
66180ff3
PB
2830 enum machine_mode mode = GET_MODE (op);
2831 enum machine_mode inner = GET_MODE_INNER (mode);
2832
2833 unsigned i;
2834 unsigned nunits = GET_MODE_NUNITS (mode);
2835 unsigned bitsize = GET_MODE_BITSIZE (inner);
2836 unsigned mask = GET_MODE_MASK (inner);
2837
0972012c 2838 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2839 HOST_WIDE_INT splat_val = val;
2840 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2841
2842 /* Construct the value to be splatted, if possible. If not, return 0. */
2843 for (i = 2; i <= copies; i *= 2)
452a7d36 2844 {
66180ff3
PB
2845 HOST_WIDE_INT small_val;
2846 bitsize /= 2;
2847 small_val = splat_val >> bitsize;
2848 mask >>= bitsize;
2849 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2850 return false;
2851 splat_val = small_val;
2852 }
c4ad648e 2853
66180ff3
PB
2854 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2855 if (EASY_VECTOR_15 (splat_val))
2856 ;
2857
2858 /* Also check if we can splat, and then add the result to itself. Do so if
2859 the value is positive, of if the splat instruction is using OP's mode;
2860 for splat_val < 0, the splat and the add should use the same mode. */
2861 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2862 && (splat_val >= 0 || (step == 1 && copies == 1)))
2863 ;
2864
2865 else
2866 return false;
2867
2868 /* Check if VAL is present in every STEP-th element, and the
2869 other elements are filled with its most significant bit. */
2870 for (i = 0; i < nunits - 1; ++i)
2871 {
2872 HOST_WIDE_INT desired_val;
2873 if (((i + 1) & (step - 1)) == 0)
2874 desired_val = val;
2875 else
2876 desired_val = msb_val;
2877
0972012c 2878 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2879 return false;
452a7d36 2880 }
66180ff3
PB
2881
2882 return true;
452a7d36
HP
2883}
2884
69ef87e2 2885
77ccdfed 2886/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2887 with a vspltisb, vspltish or vspltisw. */
2888
2889bool
2890easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2891{
66180ff3 2892 unsigned step, copies;
d744e06e 2893
66180ff3
PB
2894 if (mode == VOIDmode)
2895 mode = GET_MODE (op);
2896 else if (mode != GET_MODE (op))
2897 return false;
d744e06e 2898
66180ff3
PB
2899 /* Start with a vspltisw. */
2900 step = GET_MODE_NUNITS (mode) / 4;
2901 copies = 1;
2902
2903 if (vspltis_constant (op, step, copies))
2904 return true;
2905
2906 /* Then try with a vspltish. */
2907 if (step == 1)
2908 copies <<= 1;
2909 else
2910 step >>= 1;
2911
2912 if (vspltis_constant (op, step, copies))
2913 return true;
2914
2915 /* And finally a vspltisb. */
2916 if (step == 1)
2917 copies <<= 1;
2918 else
2919 step >>= 1;
2920
2921 if (vspltis_constant (op, step, copies))
2922 return true;
2923
2924 return false;
d744e06e
AH
2925}
2926
66180ff3
PB
2927/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2928 result is OP. Abort if it is not possible. */
d744e06e 2929
f676971a 2930rtx
66180ff3 2931gen_easy_altivec_constant (rtx op)
452a7d36 2932{
66180ff3
PB
2933 enum machine_mode mode = GET_MODE (op);
2934 int nunits = GET_MODE_NUNITS (mode);
2935 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2936 unsigned step = nunits / 4;
2937 unsigned copies = 1;
2938
2939 /* Start with a vspltisw. */
2940 if (vspltis_constant (op, step, copies))
2941 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2942
2943 /* Then try with a vspltish. */
2944 if (step == 1)
2945 copies <<= 1;
2946 else
2947 step >>= 1;
2948
2949 if (vspltis_constant (op, step, copies))
2950 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2951
2952 /* And finally a vspltisb. */
2953 if (step == 1)
2954 copies <<= 1;
2955 else
2956 step >>= 1;
2957
2958 if (vspltis_constant (op, step, copies))
2959 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2960
2961 gcc_unreachable ();
d744e06e
AH
2962}
2963
2964const char *
a2369ed3 2965output_vec_const_move (rtx *operands)
d744e06e
AH
2966{
2967 int cst, cst2;
2968 enum machine_mode mode;
2969 rtx dest, vec;
2970
2971 dest = operands[0];
2972 vec = operands[1];
d744e06e 2973 mode = GET_MODE (dest);
69ef87e2 2974
d744e06e
AH
2975 if (TARGET_ALTIVEC)
2976 {
66180ff3 2977 rtx splat_vec;
d744e06e
AH
2978 if (zero_constant (vec, mode))
2979 return "vxor %0,%0,%0";
37409796 2980
66180ff3
PB
2981 splat_vec = gen_easy_altivec_constant (vec);
2982 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2983 operands[1] = XEXP (splat_vec, 0);
2984 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2985 return "#";
bb8df8a6 2986
66180ff3 2987 switch (GET_MODE (splat_vec))
98ef3137 2988 {
37409796 2989 case V4SImode:
66180ff3 2990 return "vspltisw %0,%1";
c4ad648e 2991
37409796 2992 case V8HImode:
66180ff3 2993 return "vspltish %0,%1";
c4ad648e 2994
37409796 2995 case V16QImode:
66180ff3 2996 return "vspltisb %0,%1";
bb8df8a6 2997
37409796
NS
2998 default:
2999 gcc_unreachable ();
98ef3137 3000 }
69ef87e2
AH
3001 }
3002
37409796 3003 gcc_assert (TARGET_SPE);
bb8df8a6 3004
37409796
NS
3005 /* Vector constant 0 is handled as a splitter of V2SI, and in the
3006 pattern of V1DI, V4HI, and V2SF.
3007
3008 FIXME: We should probably return # and add post reload
3009 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
3010 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
3011 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
3012 operands[1] = CONST_VECTOR_ELT (vec, 0);
3013 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
3014 if (cst == cst2)
3015 return "li %0,%1\n\tevmergelo %0,%0,%0";
3016 else
3017 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
3018}
3019
f5027409
RE
3020/* Initialize TARGET of vector PAIRED to VALS. */
3021
3022void
3023paired_expand_vector_init (rtx target, rtx vals)
3024{
3025 enum machine_mode mode = GET_MODE (target);
3026 int n_elts = GET_MODE_NUNITS (mode);
3027 int n_var = 0;
0a2aaacc 3028 rtx x, new_rtx, tmp, constant_op, op1, op2;
f5027409
RE
3029 int i;
3030
3031 for (i = 0; i < n_elts; ++i)
3032 {
3033 x = XVECEXP (vals, 0, i);
3034 if (!CONSTANT_P (x))
3035 ++n_var;
3036 }
3037 if (n_var == 0)
3038 {
3039 /* Load from constant pool. */
3040 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
3041 return;
3042 }
3043
3044 if (n_var == 2)
3045 {
3046 /* The vector is initialized only with non-constants. */
0a2aaacc 3047 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
f5027409
RE
3048 XVECEXP (vals, 0, 1));
3049
0a2aaacc 3050 emit_move_insn (target, new_rtx);
f5027409
RE
3051 return;
3052 }
3053
3054 /* One field is non-constant and the other one is a constant. Load the
3055 constant from the constant pool and use ps_merge instruction to
3056 construct the whole vector. */
3057 op1 = XVECEXP (vals, 0, 0);
3058 op2 = XVECEXP (vals, 0, 1);
3059
3060 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
3061
3062 tmp = gen_reg_rtx (GET_MODE (constant_op));
3063 emit_move_insn (tmp, constant_op);
3064
3065 if (CONSTANT_P (op1))
0a2aaacc 3066 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
f5027409 3067 else
0a2aaacc 3068 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
f5027409 3069
0a2aaacc 3070 emit_move_insn (target, new_rtx);
f5027409
RE
3071}
3072
e2e95f45
RE
3073void
3074paired_expand_vector_move (rtx operands[])
3075{
3076 rtx op0 = operands[0], op1 = operands[1];
3077
3078 emit_move_insn (op0, op1);
3079}
3080
3081/* Emit vector compare for code RCODE. DEST is destination, OP1 and
3082 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
3083 operands for the relation operation COND. This is a recursive
3084 function. */
3085
3086static void
3087paired_emit_vector_compare (enum rtx_code rcode,
3088 rtx dest, rtx op0, rtx op1,
3089 rtx cc_op0, rtx cc_op1)
3090{
3091 rtx tmp = gen_reg_rtx (V2SFmode);
3092 rtx tmp1, max, min, equal_zero;
3093
3094 gcc_assert (TARGET_PAIRED_FLOAT);
3095 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
3096
3097 switch (rcode)
3098 {
3099 case LT:
3100 case LTU:
3101 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3102 return;
3103 case GE:
3104 case GEU:
3105 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3106 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3107 return;
3108 case LE:
3109 case LEU:
3110 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3111 return;
3112 case GT:
3113 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3114 return;
3115 case EQ:
3116 tmp1 = gen_reg_rtx (V2SFmode);
3117 max = gen_reg_rtx (V2SFmode);
3118 min = gen_reg_rtx (V2SFmode);
3119 equal_zero = gen_reg_rtx (V2SFmode);
3120
3121 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3122 emit_insn (gen_selv2sf4
3123 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3124 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3125 emit_insn (gen_selv2sf4
3126 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3127 emit_insn (gen_subv2sf3 (tmp1, min, max));
3128 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3129 return;
3130 case NE:
3131 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3132 return;
3133 case UNLE:
3134 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3135 return;
3136 case UNLT:
3137 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3138 return;
3139 case UNGE:
3140 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3141 return;
3142 case UNGT:
3143 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3144 return;
3145 default:
3146 gcc_unreachable ();
3147 }
3148
3149 return;
3150}
3151
3152/* Emit vector conditional expression.
3153 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3154 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3155
3156int
3157paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3158 rtx cond, rtx cc_op0, rtx cc_op1)
3159{
3160 enum rtx_code rcode = GET_CODE (cond);
3161
3162 if (!TARGET_PAIRED_FLOAT)
3163 return 0;
3164
3165 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3166
3167 return 1;
3168}
3169
7a4eca66
DE
3170/* Initialize vector TARGET to VALS. */
3171
3172void
3173rs6000_expand_vector_init (rtx target, rtx vals)
3174{
3175 enum machine_mode mode = GET_MODE (target);
3176 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3177 int n_elts = GET_MODE_NUNITS (mode);
3178 int n_var = 0, one_var = -1;
3179 bool all_same = true, all_const_zero = true;
3180 rtx x, mem;
3181 int i;
3182
3183 for (i = 0; i < n_elts; ++i)
3184 {
3185 x = XVECEXP (vals, 0, i);
3186 if (!CONSTANT_P (x))
3187 ++n_var, one_var = i;
3188 else if (x != CONST0_RTX (inner_mode))
3189 all_const_zero = false;
3190
3191 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3192 all_same = false;
3193 }
3194
3195 if (n_var == 0)
3196 {
501fb355 3197 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3198 if (mode != V4SFmode && all_const_zero)
3199 {
3200 /* Zero register. */
3201 emit_insn (gen_rtx_SET (VOIDmode, target,
3202 gen_rtx_XOR (mode, target, target)));
3203 return;
3204 }
501fb355 3205 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3206 {
3207 /* Splat immediate. */
501fb355 3208 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3209 return;
3210 }
3211 else if (all_same)
3212 ; /* Splat vector element. */
3213 else
3214 {
3215 /* Load from constant pool. */
501fb355 3216 emit_move_insn (target, const_vec);
7a4eca66
DE
3217 return;
3218 }
3219 }
3220
3221 /* Store value to stack temp. Load vector element. Splat. */
3222 if (all_same)
3223 {
3224 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3225 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3226 XVECEXP (vals, 0, 0));
3227 x = gen_rtx_UNSPEC (VOIDmode,
3228 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3229 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3230 gen_rtvec (2,
3231 gen_rtx_SET (VOIDmode,
3232 target, mem),
3233 x)));
3234 x = gen_rtx_VEC_SELECT (inner_mode, target,
3235 gen_rtx_PARALLEL (VOIDmode,
3236 gen_rtvec (1, const0_rtx)));
3237 emit_insn (gen_rtx_SET (VOIDmode, target,
3238 gen_rtx_VEC_DUPLICATE (mode, x)));
3239 return;
3240 }
3241
3242 /* One field is non-constant. Load constant then overwrite
3243 varying field. */
3244 if (n_var == 1)
3245 {
3246 rtx copy = copy_rtx (vals);
3247
57b51d4d 3248 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3249 varying element. */
3250 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3251 rs6000_expand_vector_init (target, copy);
3252
3253 /* Insert variable. */
3254 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3255 return;
3256 }
3257
3258 /* Construct the vector in memory one field at a time
3259 and load the whole vector. */
3260 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3261 for (i = 0; i < n_elts; i++)
3262 emit_move_insn (adjust_address_nv (mem, inner_mode,
3263 i * GET_MODE_SIZE (inner_mode)),
3264 XVECEXP (vals, 0, i));
3265 emit_move_insn (target, mem);
3266}
3267
3268/* Set field ELT of TARGET to VAL. */
3269
3270void
3271rs6000_expand_vector_set (rtx target, rtx val, int elt)
3272{
3273 enum machine_mode mode = GET_MODE (target);
3274 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3275 rtx reg = gen_reg_rtx (mode);
3276 rtx mask, mem, x;
3277 int width = GET_MODE_SIZE (inner_mode);
3278 int i;
3279
3280 /* Load single variable value. */
3281 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3282 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3283 x = gen_rtx_UNSPEC (VOIDmode,
3284 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3285 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3286 gen_rtvec (2,
3287 gen_rtx_SET (VOIDmode,
3288 reg, mem),
3289 x)));
3290
3291 /* Linear sequence. */
3292 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3293 for (i = 0; i < 16; ++i)
3294 XVECEXP (mask, 0, i) = GEN_INT (i);
3295
3296 /* Set permute mask to insert element into target. */
3297 for (i = 0; i < width; ++i)
3298 XVECEXP (mask, 0, elt*width + i)
3299 = GEN_INT (i + 0x10);
3300 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3301 x = gen_rtx_UNSPEC (mode,
3302 gen_rtvec (3, target, reg,
3303 force_reg (V16QImode, x)),
3304 UNSPEC_VPERM);
3305 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3306}
3307
3308/* Extract field ELT from VEC into TARGET. */
3309
3310void
3311rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3312{
3313 enum machine_mode mode = GET_MODE (vec);
3314 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3315 rtx mem, x;
3316
3317 /* Allocate mode-sized buffer. */
3318 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3319
3320 /* Add offset to field within buffer matching vector element. */
3321 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3322
3323 /* Store single field into mode-sized buffer. */
3324 x = gen_rtx_UNSPEC (VOIDmode,
3325 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3326 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3327 gen_rtvec (2,
3328 gen_rtx_SET (VOIDmode,
3329 mem, vec),
3330 x)));
3331 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3332}
3333
0ba1b2ff
AM
3334/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3335 implement ANDing by the mask IN. */
3336void
a2369ed3 3337build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3338{
3339#if HOST_BITS_PER_WIDE_INT >= 64
3340 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3341 int shift;
3342
37409796 3343 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3344
3345 c = INTVAL (in);
3346 if (c & 1)
3347 {
3348 /* Assume c initially something like 0x00fff000000fffff. The idea
3349 is to rotate the word so that the middle ^^^^^^ group of zeros
3350 is at the MS end and can be cleared with an rldicl mask. We then
3351 rotate back and clear off the MS ^^ group of zeros with a
3352 second rldicl. */
3353 c = ~c; /* c == 0xff000ffffff00000 */
3354 lsb = c & -c; /* lsb == 0x0000000000100000 */
3355 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3356 c = ~c; /* c == 0x00fff000000fffff */
3357 c &= -lsb; /* c == 0x00fff00000000000 */
3358 lsb = c & -c; /* lsb == 0x0000100000000000 */
3359 c = ~c; /* c == 0xff000fffffffffff */
3360 c &= -lsb; /* c == 0xff00000000000000 */
3361 shift = 0;
3362 while ((lsb >>= 1) != 0)
3363 shift++; /* shift == 44 on exit from loop */
3364 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3365 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3366 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3367 }
3368 else
0ba1b2ff
AM
3369 {
3370 /* Assume c initially something like 0xff000f0000000000. The idea
3371 is to rotate the word so that the ^^^ middle group of zeros
3372 is at the LS end and can be cleared with an rldicr mask. We then
3373 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3374 a second rldicr. */
3375 lsb = c & -c; /* lsb == 0x0000010000000000 */
3376 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3377 c = ~c; /* c == 0x00fff0ffffffffff */
3378 c &= -lsb; /* c == 0x00fff00000000000 */
3379 lsb = c & -c; /* lsb == 0x0000100000000000 */
3380 c = ~c; /* c == 0xff000fffffffffff */
3381 c &= -lsb; /* c == 0xff00000000000000 */
3382 shift = 0;
3383 while ((lsb >>= 1) != 0)
3384 shift++; /* shift == 44 on exit from loop */
3385 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3386 m1 >>= shift; /* m1 == 0x0000000000000fff */
3387 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3388 }
3389
3390 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3391 masks will be all 1's. We are guaranteed more than one transition. */
3392 out[0] = GEN_INT (64 - shift);
3393 out[1] = GEN_INT (m1);
3394 out[2] = GEN_INT (shift);
3395 out[3] = GEN_INT (m2);
3396#else
045572c7
GK
3397 (void)in;
3398 (void)out;
37409796 3399 gcc_unreachable ();
0ba1b2ff 3400#endif
a260abc9
DE
3401}
3402
54b695e7 3403/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3404
3405bool
54b695e7
AH
3406invalid_e500_subreg (rtx op, enum machine_mode mode)
3407{
61c76239
JM
3408 if (TARGET_E500_DOUBLE)
3409 {
17caeff2 3410 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3411 subreg:TI and reg:TF. Decimal float modes are like integer
3412 modes (only low part of each register used) for this
3413 purpose. */
61c76239 3414 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3415 && (mode == SImode || mode == DImode || mode == TImode
3416 || mode == DDmode || mode == TDmode)
61c76239 3417 && REG_P (SUBREG_REG (op))
17caeff2 3418 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3419 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3420 return true;
3421
17caeff2
JM
3422 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3423 reg:TI. */
61c76239 3424 if (GET_CODE (op) == SUBREG
4f011e1e 3425 && (mode == DFmode || mode == TFmode)
61c76239 3426 && REG_P (SUBREG_REG (op))
17caeff2 3427 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3428 || GET_MODE (SUBREG_REG (op)) == TImode
3429 || GET_MODE (SUBREG_REG (op)) == DDmode
3430 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3431 return true;
3432 }
54b695e7 3433
61c76239
JM
3434 if (TARGET_SPE
3435 && GET_CODE (op) == SUBREG
3436 && mode == SImode
54b695e7 3437 && REG_P (SUBREG_REG (op))
14502dad 3438 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3439 return true;
3440
3441 return false;
3442}
3443
58182de3 3444/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3445 field is an FP double while the FP fields remain word aligned. */
3446
19d66194 3447unsigned int
fa5b0972
AM
3448rs6000_special_round_type_align (tree type, unsigned int computed,
3449 unsigned int specified)
95727fb8 3450{
fa5b0972 3451 unsigned int align = MAX (computed, specified);
95727fb8 3452 tree field = TYPE_FIELDS (type);
95727fb8 3453
bb8df8a6 3454 /* Skip all non field decls */
85962ac8 3455 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3456 field = TREE_CHAIN (field);
3457
fa5b0972
AM
3458 if (field != NULL && field != type)
3459 {
3460 type = TREE_TYPE (field);
3461 while (TREE_CODE (type) == ARRAY_TYPE)
3462 type = TREE_TYPE (type);
3463
3464 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3465 align = MAX (align, 64);
3466 }
95727fb8 3467
fa5b0972 3468 return align;
95727fb8
AP
3469}
3470
58182de3
GK
3471/* Darwin increases record alignment to the natural alignment of
3472 the first field. */
3473
3474unsigned int
3475darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3476 unsigned int specified)
3477{
3478 unsigned int align = MAX (computed, specified);
3479
3480 if (TYPE_PACKED (type))
3481 return align;
3482
3483 /* Find the first field, looking down into aggregates. */
3484 do {
3485 tree field = TYPE_FIELDS (type);
3486 /* Skip all non field decls */
3487 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3488 field = TREE_CHAIN (field);
3489 if (! field)
3490 break;
3491 type = TREE_TYPE (field);
3492 while (TREE_CODE (type) == ARRAY_TYPE)
3493 type = TREE_TYPE (type);
3494 } while (AGGREGATE_TYPE_P (type));
3495
3496 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3497 align = MAX (align, TYPE_ALIGN (type));
3498
3499 return align;
3500}
3501
a4f6c312 3502/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3503
3504int
f676971a 3505small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3506 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3507{
38c1f2d7 3508#if TARGET_ELF
5f59ecb7 3509 rtx sym_ref;
7509c759 3510
d9407988 3511 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3512 return 0;
a54d04b7 3513
f607bc57 3514 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3515 return 0;
3516
2aa42e6e
NF
3517 /* Vector and float memory instructions have a limited offset on the
3518 SPE, so using a vector or float variable directly as an operand is
3519 not useful. */
3520 if (TARGET_SPE
3521 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3522 return 0;
3523
88228c4b
MM
3524 if (GET_CODE (op) == SYMBOL_REF)
3525 sym_ref = op;
3526
3527 else if (GET_CODE (op) != CONST
3528 || GET_CODE (XEXP (op, 0)) != PLUS
3529 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3530 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3531 return 0;
3532
88228c4b 3533 else
dbf55e53
MM
3534 {
3535 rtx sum = XEXP (op, 0);
3536 HOST_WIDE_INT summand;
3537
3538 /* We have to be careful here, because it is the referenced address
c4ad648e 3539 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3540 summand = INTVAL (XEXP (sum, 1));
307b599c 3541 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3542 return 0;
dbf55e53
MM
3543
3544 sym_ref = XEXP (sum, 0);
3545 }
88228c4b 3546
20bfcd69 3547 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3548#else
3549 return 0;
3550#endif
7509c759 3551}
46c07df8 3552
3a1f863f 3553/* Return true if either operand is a general purpose register. */
46c07df8 3554
3a1f863f
DE
3555bool
3556gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3557{
3a1f863f
DE
3558 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3559 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3560}
3561
9ebbca7d 3562\f
4d588c14
RH
3563/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3564
4d588c14 3565static bool
a2369ed3 3566constant_pool_expr_p (rtx op)
9ebbca7d 3567{
2e4316da
RS
3568 rtx base, offset;
3569
3570 split_const (op, &base, &offset);
3571 return (GET_CODE (base) == SYMBOL_REF
3572 && CONSTANT_POOL_ADDRESS_P (base)
3573 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
9ebbca7d
GK
3574}
3575
48d72335 3576bool
a2369ed3 3577toc_relative_expr_p (rtx op)
9ebbca7d 3578{
2e4316da
RS
3579 rtx base, offset;
3580
3581 if (GET_CODE (op) != CONST)
3582 return false;
3583
3584 split_const (op, &base, &offset);
3585 return (GET_CODE (base) == UNSPEC
3586 && XINT (base, 1) == UNSPEC_TOCREL);
4d588c14
RH
3587}
3588
4d588c14 3589bool
a2369ed3 3590legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3591{
3592 return (TARGET_TOC
3593 && GET_CODE (x) == PLUS
3594 && GET_CODE (XEXP (x, 0)) == REG
3595 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2e4316da 3596 && toc_relative_expr_p (XEXP (x, 1)));
4d588c14
RH
3597}
3598
d04b6e6e
EB
3599static bool
3600legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3601{
3602 return (DEFAULT_ABI == ABI_V4
3603 && !flag_pic && !TARGET_TOC
3604 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3605 && small_data_operand (x, mode));
3606}
3607
60cdabab
DE
3608/* SPE offset addressing is limited to 5-bits worth of double words. */
3609#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3610
76d2b81d
DJ
3611bool
3612rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3613{
3614 unsigned HOST_WIDE_INT offset, extra;
3615
3616 if (GET_CODE (x) != PLUS)
3617 return false;
3618 if (GET_CODE (XEXP (x, 0)) != REG)
3619 return false;
3620 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3621 return false;
60cdabab
DE
3622 if (legitimate_constant_pool_address_p (x))
3623 return true;
4d588c14
RH
3624 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3625 return false;
3626
3627 offset = INTVAL (XEXP (x, 1));
3628 extra = 0;
3629 switch (mode)
3630 {
3631 case V16QImode:
3632 case V8HImode:
3633 case V4SFmode:
3634 case V4SImode:
7a4eca66 3635 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3636 constant offset zero should not occur due to canonicalization. */
3637 return false;
4d588c14
RH
3638
3639 case V4HImode:
3640 case V2SImode:
3641 case V1DImode:
3642 case V2SFmode:
d42a3bae 3643 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3644 constant offset zero should not occur due to canonicalization. */
d42a3bae 3645 if (TARGET_PAIRED_FLOAT)
1a23970d 3646 return false;
4d588c14
RH
3647 /* SPE vector modes. */
3648 return SPE_CONST_OFFSET_OK (offset);
3649
3650 case DFmode:
4d4cbc0e
AH
3651 if (TARGET_E500_DOUBLE)
3652 return SPE_CONST_OFFSET_OK (offset);
3653
4f011e1e 3654 case DDmode:
4d588c14 3655 case DImode:
54b695e7
AH
3656 /* On e500v2, we may have:
3657
3658 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3659
3660 Which gets addressed with evldd instructions. */
3661 if (TARGET_E500_DOUBLE)
3662 return SPE_CONST_OFFSET_OK (offset);
3663
7393f7f8 3664 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3665 extra = 4;
3666 else if (offset & 3)
3667 return false;
3668 break;
3669
3670 case TFmode:
17caeff2
JM
3671 if (TARGET_E500_DOUBLE)
3672 return (SPE_CONST_OFFSET_OK (offset)
3673 && SPE_CONST_OFFSET_OK (offset + 8));
3674
4f011e1e 3675 case TDmode:
4d588c14 3676 case TImode:
7393f7f8 3677 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3678 extra = 12;
3679 else if (offset & 3)
3680 return false;
3681 else
3682 extra = 8;
3683 break;
3684
3685 default:
3686 break;
3687 }
3688
b1917422
AM
3689 offset += 0x8000;
3690 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3691}
3692
6fb5fa3c 3693bool
a2369ed3 3694legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3695{
3696 rtx op0, op1;
3697
3698 if (GET_CODE (x) != PLUS)
3699 return false;
850e8d3d 3700
4d588c14
RH
3701 op0 = XEXP (x, 0);
3702 op1 = XEXP (x, 1);
3703
bf00cc0f 3704 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3705 replaced with proper base and index regs. */
3706 if (!strict
3707 && reload_in_progress
3708 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3709 && REG_P (op1))
3710 return true;
3711
3712 return (REG_P (op0) && REG_P (op1)
3713 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3714 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3715 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3716 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3717}
3718
001b9eb6
PH
3719bool
3720avoiding_indexed_address_p (enum machine_mode mode)
3721{
3722 /* Avoid indexed addressing for modes that have non-indexed
3723 load/store instruction forms. */
3724 return TARGET_AVOID_XFORM && !ALTIVEC_VECTOR_MODE (mode);
3725}
3726
48d72335 3727inline bool
a2369ed3 3728legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3729{
3730 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3731}
3732
48d72335 3733bool
4c81e946
FJ
3734macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3735{
c4ad648e 3736 if (!TARGET_MACHO || !flag_pic
9390387d 3737 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3738 return false;
3739 x = XEXP (x, 0);
4c81e946
FJ
3740
3741 if (GET_CODE (x) != LO_SUM)
3742 return false;
3743 if (GET_CODE (XEXP (x, 0)) != REG)
3744 return false;
3745 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3746 return false;
3747 x = XEXP (x, 1);
3748
3749 return CONSTANT_P (x);
3750}
3751
4d588c14 3752static bool
a2369ed3 3753legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3754{
3755 if (GET_CODE (x) != LO_SUM)
3756 return false;
3757 if (GET_CODE (XEXP (x, 0)) != REG)
3758 return false;
3759 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3760 return false;
54b695e7 3761 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3762 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3763 || mode == DDmode || mode == TDmode
17caeff2 3764 || mode == DImode))
f82f556d 3765 return false;
4d588c14
RH
3766 x = XEXP (x, 1);
3767
8622e235 3768 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3769 {
a29077da 3770 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3771 return false;
3772 if (TARGET_TOC)
3773 return false;
3774 if (GET_MODE_NUNITS (mode) != 1)
3775 return false;
5e5f01b9 3776 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3777 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
696e45ba 3778 && !(TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
4d4447b5 3779 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3780 return false;
3781
3782 return CONSTANT_P (x);
3783 }
3784
3785 return false;
3786}
3787
3788
9ebbca7d
GK
3789/* Try machine-dependent ways of modifying an illegitimate address
3790 to be legitimate. If we find one, return the new, valid address.
3791 This is used from only one place: `memory_address' in explow.c.
3792
a4f6c312
SS
3793 OLDX is the address as it was before break_out_memory_refs was
3794 called. In some cases it is useful to look at this to decide what
3795 needs to be done.
9ebbca7d 3796
a4f6c312 3797 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3798
a4f6c312
SS
3799 It is always safe for this function to do nothing. It exists to
3800 recognize opportunities to optimize the output.
9ebbca7d
GK
3801
3802 On RS/6000, first check for the sum of a register with a constant
3803 integer that is out of range. If so, generate code to add the
3804 constant with the low-order 16 bits masked to the register and force
3805 this result into another register (this can be done with `cau').
3806 Then generate an address of REG+(CONST&0xffff), allowing for the
3807 possibility of bit 16 being a one.
3808
3809 Then check for the sum of a register and something not constant, try to
3810 load the other things into a register and return the sum. */
4d588c14 3811
9ebbca7d 3812rtx
a2369ed3
DJ
3813rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3814 enum machine_mode mode)
0ac081f6 3815{
c4501e62
JJ
3816 if (GET_CODE (x) == SYMBOL_REF)
3817 {
3818 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3819 if (model != 0)
3820 return rs6000_legitimize_tls_address (x, model);
3821 }
3822
f676971a 3823 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3824 && GET_CODE (XEXP (x, 0)) == REG
3825 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb 3826 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
7da13f1d
NF
3827 && !((TARGET_POWERPC64
3828 && (mode == DImode || mode == TImode)
3829 && (INTVAL (XEXP (x, 1)) & 3) != 0)
3830 || SPE_VECTOR_MODE (mode)
efc05e3c 3831 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3832 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3833 || mode == DImode || mode == DDmode
3834 || mode == TDmode))))
f676971a 3835 {
9ebbca7d
GK
3836 HOST_WIDE_INT high_int, low_int;
3837 rtx sum;
a65c591c
DE
3838 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3839 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3840 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3841 GEN_INT (high_int)), 0);
3842 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3843 }
f676971a 3844 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3845 && GET_CODE (XEXP (x, 0)) == REG
3846 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3847 && GET_MODE_NUNITS (mode) == 1
696e45ba 3848 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 3849 || TARGET_POWERPC64
efc05e3c 3850 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3851 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3852 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 3853 && !avoiding_indexed_address_p (mode)
efc05e3c
PB
3854 && mode != TImode
3855 && mode != TFmode
3856 && mode != TDmode)
9ebbca7d
GK
3857 {
3858 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3859 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3860 }
0ac081f6
AH
3861 else if (ALTIVEC_VECTOR_MODE (mode))
3862 {
3863 rtx reg;
3864
3865 /* Make sure both operands are registers. */
3866 if (GET_CODE (x) == PLUS)
9f85ed45 3867 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3868 force_reg (Pmode, XEXP (x, 1)));
3869
3870 reg = force_reg (Pmode, x);
3871 return reg;
3872 }
4d4cbc0e 3873 else if (SPE_VECTOR_MODE (mode)
17caeff2 3874 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3875 || mode == DDmode || mode == TDmode
54b695e7 3876 || mode == DImode)))
a3170dc6 3877 {
54b695e7
AH
3878 if (mode == DImode)
3879 return NULL_RTX;
a3170dc6
AH
3880 /* We accept [reg + reg] and [reg + OFFSET]. */
3881
3882 if (GET_CODE (x) == PLUS)
61dd226f
NF
3883 {
3884 rtx op1 = XEXP (x, 0);
3885 rtx op2 = XEXP (x, 1);
3886 rtx y;
3887
3888 op1 = force_reg (Pmode, op1);
3889
3890 if (GET_CODE (op2) != REG
3891 && (GET_CODE (op2) != CONST_INT
3892 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3893 || (GET_MODE_SIZE (mode) > 8
3894 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3895 op2 = force_reg (Pmode, op2);
3896
3897 /* We can't always do [reg + reg] for these, because [reg +
3898 reg + offset] is not a legitimate addressing mode. */
3899 y = gen_rtx_PLUS (Pmode, op1, op2);
3900
4f011e1e 3901 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3902 return force_reg (Pmode, y);
3903 else
3904 return y;
3905 }
a3170dc6
AH
3906
3907 return force_reg (Pmode, x);
3908 }
f1384257
AM
3909 else if (TARGET_ELF
3910 && TARGET_32BIT
3911 && TARGET_NO_TOC
3912 && ! flag_pic
9ebbca7d 3913 && GET_CODE (x) != CONST_INT
f676971a 3914 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3915 && CONSTANT_P (x)
6ac7bf2c
GK
3916 && GET_MODE_NUNITS (mode) == 1
3917 && (GET_MODE_BITSIZE (mode) <= 32
696e45ba 3918 || ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3919 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3920 {
3921 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3922 emit_insn (gen_elf_high (reg, x));
3923 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3924 }
ee890fe2
SS
3925 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3926 && ! flag_pic
ab82a49f
AP
3927#if TARGET_MACHO
3928 && ! MACHO_DYNAMIC_NO_PIC_P
3929#endif
ee890fe2 3930 && GET_CODE (x) != CONST_INT
f676971a 3931 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3932 && CONSTANT_P (x)
506a7bc8 3933 && GET_MODE_NUNITS (mode) == 1
696e45ba 3934 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3935 || (mode != DFmode && mode != DDmode))
f676971a 3936 && mode != DImode
ee890fe2
SS
3937 && mode != TImode)
3938 {
3939 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3940 emit_insn (gen_macho_high (reg, x));
3941 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3942 }
f676971a 3943 else if (TARGET_TOC
0cdc04e8 3944 && GET_CODE (x) == SYMBOL_REF
4d588c14 3945 && constant_pool_expr_p (x)
a9098fd0 3946 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3947 {
3948 return create_TOC_reference (x);
3949 }
3950 else
3951 return NULL_RTX;
3952}
258bfae2 3953
fdbe66f2 3954/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3955 We need to emit DTP-relative relocations. */
3956
fdbe66f2 3957static void
c973d557
JJ
3958rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3959{
3960 switch (size)
3961 {
3962 case 4:
3963 fputs ("\t.long\t", file);
3964 break;
3965 case 8:
3966 fputs (DOUBLE_INT_ASM_OP, file);
3967 break;
3968 default:
37409796 3969 gcc_unreachable ();
c973d557
JJ
3970 }
3971 output_addr_const (file, x);
3972 fputs ("@dtprel+0x8000", file);
3973}
3974
c4501e62
JJ
3975/* Construct the SYMBOL_REF for the tls_get_addr function. */
3976
3977static GTY(()) rtx rs6000_tls_symbol;
3978static rtx
863d938c 3979rs6000_tls_get_addr (void)
c4501e62
JJ
3980{
3981 if (!rs6000_tls_symbol)
3982 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3983
3984 return rs6000_tls_symbol;
3985}
3986
3987/* Construct the SYMBOL_REF for TLS GOT references. */
3988
3989static GTY(()) rtx rs6000_got_symbol;
3990static rtx
863d938c 3991rs6000_got_sym (void)
c4501e62
JJ
3992{
3993 if (!rs6000_got_symbol)
3994 {
3995 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3996 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3997 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3998 }
c4501e62
JJ
3999
4000 return rs6000_got_symbol;
4001}
4002
4003/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
4004 this (thread-local) address. */
4005
4006static rtx
a2369ed3 4007rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
4008{
4009 rtx dest, insn;
4010
4011 dest = gen_reg_rtx (Pmode);
4012 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
4013 {
4014 rtx tlsreg;
4015
4016 if (TARGET_64BIT)
4017 {
4018 tlsreg = gen_rtx_REG (Pmode, 13);
4019 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
4020 }
4021 else
4022 {
4023 tlsreg = gen_rtx_REG (Pmode, 2);
4024 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
4025 }
4026 emit_insn (insn);
4027 }
4028 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
4029 {
4030 rtx tlsreg, tmp;
4031
4032 tmp = gen_reg_rtx (Pmode);
4033 if (TARGET_64BIT)
4034 {
4035 tlsreg = gen_rtx_REG (Pmode, 13);
4036 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
4037 }
4038 else
4039 {
4040 tlsreg = gen_rtx_REG (Pmode, 2);
4041 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
4042 }
4043 emit_insn (insn);
4044 if (TARGET_64BIT)
4045 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
4046 else
4047 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
4048 emit_insn (insn);
4049 }
4050 else
4051 {
4052 rtx r3, got, tga, tmp1, tmp2, eqv;
4053
4fed8f8f
AM
4054 /* We currently use relocations like @got@tlsgd for tls, which
4055 means the linker will handle allocation of tls entries, placing
4056 them in the .got section. So use a pointer to the .got section,
4057 not one to secondary TOC sections used by 64-bit -mminimal-toc,
4058 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 4059 if (TARGET_64BIT)
972f427b 4060 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
4061 else
4062 {
4063 if (flag_pic == 1)
4064 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
4065 else
4066 {
4067 rtx gsym = rs6000_got_sym ();
4068 got = gen_reg_rtx (Pmode);
4069 if (flag_pic == 0)
4070 rs6000_emit_move (got, gsym, Pmode);
4071 else
4072 {
e65a3857 4073 rtx tmp3, mem;
c4501e62
JJ
4074 rtx first, last;
4075
c4501e62
JJ
4076 tmp1 = gen_reg_rtx (Pmode);
4077 tmp2 = gen_reg_rtx (Pmode);
4078 tmp3 = gen_reg_rtx (Pmode);
542a8afa 4079 mem = gen_const_mem (Pmode, tmp1);
c4501e62 4080
e65a3857
DE
4081 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
4082 emit_move_insn (tmp1,
1de43f85 4083 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
4084 emit_move_insn (tmp2, mem);
4085 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
4086 last = emit_move_insn (got, tmp3);
bd94cb6e 4087 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
4088 }
4089 }
4090 }
4091
4092 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4093 {
4094 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4095 tga = rs6000_tls_get_addr ();
4096
4097 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4098 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4099 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4100 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4101 else if (DEFAULT_ABI == ABI_V4)
4102 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 4103 else
02135bc1
SB
4104 gcc_unreachable ();
4105
c4501e62 4106 start_sequence ();
c4501e62 4107 insn = emit_call_insn (insn);
becfd6e5 4108 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4109 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4110 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4111 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4112 insn = get_insns ();
4113 end_sequence ();
4114 emit_libcall_block (insn, dest, r3, addr);
4115 }
4116 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4117 {
4118 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4119 tga = rs6000_tls_get_addr ();
4120
4121 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4122 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4123 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4124 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4125 else if (DEFAULT_ABI == ABI_V4)
4126 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 4127 else
02135bc1
SB
4128 gcc_unreachable ();
4129
c4501e62 4130 start_sequence ();
c4501e62 4131 insn = emit_call_insn (insn);
becfd6e5 4132 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4133 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4134 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4135 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4136 insn = get_insns ();
4137 end_sequence ();
4138 tmp1 = gen_reg_rtx (Pmode);
4139 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4140 UNSPEC_TLSLD);
4141 emit_libcall_block (insn, tmp1, r3, eqv);
4142 if (rs6000_tls_size == 16)
4143 {
4144 if (TARGET_64BIT)
4145 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4146 else
4147 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4148 }
4149 else if (rs6000_tls_size == 32)
4150 {
4151 tmp2 = gen_reg_rtx (Pmode);
4152 if (TARGET_64BIT)
4153 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4154 else
4155 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4156 emit_insn (insn);
4157 if (TARGET_64BIT)
4158 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4159 else
4160 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4161 }
4162 else
4163 {
4164 tmp2 = gen_reg_rtx (Pmode);
4165 if (TARGET_64BIT)
4166 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4167 else
4168 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4169 emit_insn (insn);
4170 insn = gen_rtx_SET (Pmode, dest,
4171 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4172 }
4173 emit_insn (insn);
4174 }
4175 else
4176 {
a7b376ee 4177 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4178 tmp2 = gen_reg_rtx (Pmode);
4179 if (TARGET_64BIT)
4180 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4181 else
4182 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4183 emit_insn (insn);
4184 if (TARGET_64BIT)
4185 insn = gen_tls_tls_64 (dest, tmp2, addr);
4186 else
4187 insn = gen_tls_tls_32 (dest, tmp2, addr);
4188 emit_insn (insn);
4189 }
4190 }
4191
4192 return dest;
4193}
4194
c4501e62
JJ
4195/* Return 1 if X contains a thread-local symbol. */
4196
4197bool
a2369ed3 4198rs6000_tls_referenced_p (rtx x)
c4501e62 4199{
cd413cab
AP
4200 if (! TARGET_HAVE_TLS)
4201 return false;
4202
c4501e62
JJ
4203 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4204}
4205
4206/* Return 1 if *X is a thread-local symbol. This is the same as
4207 rs6000_tls_symbol_ref except for the type of the unused argument. */
4208
9390387d 4209static int
a2369ed3 4210rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4211{
4212 return RS6000_SYMBOL_REF_TLS_P (*x);
4213}
4214
24ea750e
DJ
4215/* The convention appears to be to define this wherever it is used.
4216 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
4217 is now used here. */
4218#ifndef REG_MODE_OK_FOR_BASE_P
4219#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
4220#endif
4221
4222/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4223 replace the input X, or the original X if no replacement is called for.
4224 The output parameter *WIN is 1 if the calling macro should goto WIN,
4225 0 if it should not.
4226
4227 For RS/6000, we wish to handle large displacements off a base
4228 register by splitting the addend across an addiu/addis and the mem insn.
4229 This cuts number of extra insns needed from 3 to 1.
4230
4231 On Darwin, we use this to generate code for floating point constants.
4232 A movsf_low is generated so we wind up with 2 instructions rather than 3.
08a6a74b
RS
4233 The Darwin code is inside #if TARGET_MACHO because only then are the
4234 machopic_* functions defined. */
24ea750e 4235rtx
f676971a 4236rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4237 int opnum, int type,
4238 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4239{
f676971a 4240 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4241 if (GET_CODE (x) == PLUS
4242 && GET_CODE (XEXP (x, 0)) == PLUS
4243 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4244 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4245 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4246 {
4247 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4248 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4249 opnum, (enum reload_type)type);
24ea750e
DJ
4250 *win = 1;
4251 return x;
4252 }
3deb2758 4253
24ea750e
DJ
4254#if TARGET_MACHO
4255 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4256 && GET_CODE (x) == LO_SUM
4257 && GET_CODE (XEXP (x, 0)) == PLUS
4258 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4259 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
24ea750e 4260 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
08a6a74b 4261 && machopic_operand_p (XEXP (x, 1)))
24ea750e
DJ
4262 {
4263 /* Result of previous invocation of this function on Darwin
6f317ef3 4264 floating point constant. */
24ea750e 4265 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4266 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4267 opnum, (enum reload_type)type);
24ea750e
DJ
4268 *win = 1;
4269 return x;
4270 }
4271#endif
4937d02d
DE
4272
4273 /* Force ld/std non-word aligned offset into base register by wrapping
4274 in offset 0. */
4275 if (GET_CODE (x) == PLUS
4276 && GET_CODE (XEXP (x, 0)) == REG
4277 && REGNO (XEXP (x, 0)) < 32
4278 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4279 && GET_CODE (XEXP (x, 1)) == CONST_INT
4280 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4281 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4282 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4283 && TARGET_POWERPC64)
4284 {
4285 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4286 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4287 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4288 opnum, (enum reload_type) type);
4289 *win = 1;
4290 return x;
4291 }
4292
24ea750e
DJ
4293 if (GET_CODE (x) == PLUS
4294 && GET_CODE (XEXP (x, 0)) == REG
4295 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4296 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4297 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4298 && !SPE_VECTOR_MODE (mode)
17caeff2 4299 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4300 || mode == DDmode || mode == TDmode
54b695e7 4301 || mode == DImode))
78c875e8 4302 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4303 {
4304 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4305 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4306 HOST_WIDE_INT high
c4ad648e 4307 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4308
4309 /* Check for 32-bit overflow. */
4310 if (high + low != val)
c4ad648e 4311 {
24ea750e
DJ
4312 *win = 0;
4313 return x;
4314 }
4315
4316 /* Reload the high part into a base reg; leave the low part
c4ad648e 4317 in the mem directly. */
24ea750e
DJ
4318
4319 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4320 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4321 GEN_INT (high)),
4322 GEN_INT (low));
24ea750e
DJ
4323
4324 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4325 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4326 opnum, (enum reload_type)type);
24ea750e
DJ
4327 *win = 1;
4328 return x;
4329 }
4937d02d 4330
24ea750e 4331 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4332 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4333 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4334#if TARGET_MACHO
4335 && DEFAULT_ABI == ABI_DARWIN
a29077da 4336 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4337#else
4338 && DEFAULT_ABI == ABI_V4
4339 && !flag_pic
4340#endif
7393f7f8 4341 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4342 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4343 without fprs. */
0d8c1c97 4344 && mode != TFmode
7393f7f8 4345 && mode != TDmode
7b5d92b2 4346 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4347 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
696e45ba 4348 || (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)))
24ea750e 4349 {
8308679f 4350#if TARGET_MACHO
a29077da
GK
4351 if (flag_pic)
4352 {
08a6a74b 4353 rtx offset = machopic_gen_offset (x);
a29077da
GK
4354 x = gen_rtx_LO_SUM (GET_MODE (x),
4355 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4356 gen_rtx_HIGH (Pmode, offset)), offset);
4357 }
4358 else
8308679f 4359#endif
a29077da 4360 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4361 gen_rtx_HIGH (Pmode, x), x);
a29077da 4362
24ea750e 4363 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4364 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4365 opnum, (enum reload_type)type);
24ea750e
DJ
4366 *win = 1;
4367 return x;
4368 }
4937d02d 4369
dec1f3aa
DE
4370 /* Reload an offset address wrapped by an AND that represents the
4371 masking of the lower bits. Strip the outer AND and let reload
4372 convert the offset address into an indirect address. */
4373 if (TARGET_ALTIVEC
4374 && ALTIVEC_VECTOR_MODE (mode)
4375 && GET_CODE (x) == AND
4376 && GET_CODE (XEXP (x, 0)) == PLUS
4377 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4378 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4379 && GET_CODE (XEXP (x, 1)) == CONST_INT
4380 && INTVAL (XEXP (x, 1)) == -16)
4381 {
4382 x = XEXP (x, 0);
4383 *win = 1;
4384 return x;
4385 }
4386
24ea750e 4387 if (TARGET_TOC
0cdc04e8 4388 && GET_CODE (x) == SYMBOL_REF
4d588c14 4389 && constant_pool_expr_p (x)
c1f11548 4390 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4391 {
194c524a 4392 x = create_TOC_reference (x);
24ea750e
DJ
4393 *win = 1;
4394 return x;
4395 }
4396 *win = 0;
4397 return x;
f676971a 4398}
24ea750e 4399
258bfae2
FS
4400/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4401 that is a valid memory address for an instruction.
4402 The MODE argument is the machine mode for the MEM expression
4403 that wants to use this address.
4404
4405 On the RS/6000, there are four valid address: a SYMBOL_REF that
4406 refers to a constant pool entry of an address (or the sum of it
4407 plus a constant), a short (16-bit signed) constant plus a register,
4408 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4409 auto-increment. For DFmode, DDmode and DImode with a constant plus
4410 register, we must ensure that both words are addressable or PowerPC64
4411 with offset word aligned.
258bfae2 4412
4d4447b5 4413 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4414 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4415 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4416 during assembly output. */
4417int
a2369ed3 4418rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4419{
850e8d3d
DN
4420 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4421 if (TARGET_ALTIVEC
4422 && ALTIVEC_VECTOR_MODE (mode)
4423 && GET_CODE (x) == AND
4424 && GET_CODE (XEXP (x, 1)) == CONST_INT
4425 && INTVAL (XEXP (x, 1)) == -16)
4426 x = XEXP (x, 0);
4427
c4501e62
JJ
4428 if (RS6000_SYMBOL_REF_TLS_P (x))
4429 return 0;
4d588c14 4430 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4431 return 1;
4432 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4433 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4434 && !SPE_VECTOR_MODE (mode)
429ec7dc 4435 && mode != TFmode
7393f7f8 4436 && mode != TDmode
54b695e7 4437 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4438 && !(TARGET_E500_DOUBLE
4439 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4440 && TARGET_UPDATE
4d588c14 4441 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4442 return 1;
d04b6e6e 4443 if (legitimate_small_data_p (mode, x))
258bfae2 4444 return 1;
4d588c14 4445 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4446 return 1;
4447 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4448 if (! reg_ok_strict
4449 && GET_CODE (x) == PLUS
4450 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4451 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4452 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4453 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4454 return 1;
76d2b81d 4455 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4456 return 1;
4457 if (mode != TImode
76d2b81d 4458 && mode != TFmode
7393f7f8 4459 && mode != TDmode
a3170dc6
AH
4460 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4461 || TARGET_POWERPC64
4f011e1e
JM
4462 || (mode != DFmode && mode != DDmode)
4463 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4464 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 4465 && !avoiding_indexed_address_p (mode)
4d588c14 4466 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4467 return 1;
6fb5fa3c
DB
4468 if (GET_CODE (x) == PRE_MODIFY
4469 && mode != TImode
4470 && mode != TFmode
4471 && mode != TDmode
696e45ba 4472 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
6fb5fa3c 4473 || TARGET_POWERPC64
4d4447b5 4474 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4475 && (TARGET_POWERPC64 || mode != DImode)
4476 && !ALTIVEC_VECTOR_MODE (mode)
4477 && !SPE_VECTOR_MODE (mode)
4478 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4479 && !(TARGET_E500_DOUBLE
4480 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4481 && TARGET_UPDATE
4482 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4483 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
001b9eb6
PH
4484 || (!avoiding_indexed_address_p (mode)
4485 && legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict)))
6fb5fa3c
DB
4486 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4487 return 1;
4d588c14 4488 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4489 return 1;
4490 return 0;
4491}
4d588c14
RH
4492
4493/* Go to LABEL if ADDR (a legitimate address expression)
4494 has an effect that depends on the machine mode it is used for.
4495
4496 On the RS/6000 this is true of all integral offsets (since AltiVec
4497 modes don't allow them) or is a pre-increment or decrement.
4498
4499 ??? Except that due to conceptual problems in offsettable_address_p
4500 we can't really report the problems of integral offsets. So leave
f676971a 4501 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4502 sub-words of a TFmode operand, which is what we had before. */
4503
4504bool
a2369ed3 4505rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4506{
4507 switch (GET_CODE (addr))
4508 {
4509 case PLUS:
4510 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4511 {
4512 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4513 return val + 12 + 0x8000 >= 0x10000;
4514 }
4515 break;
4516
4517 case LO_SUM:
4518 return true;
4519
619fe064 4520 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4521 case PRE_MODIFY:
4522 return TARGET_UPDATE;
4d588c14
RH
4523
4524 default:
4525 break;
4526 }
4527
4528 return false;
4529}
d8ecbcdb 4530
944258eb
RS
4531/* Implement FIND_BASE_TERM. */
4532
4533rtx
4534rs6000_find_base_term (rtx op)
4535{
4536 rtx base, offset;
4537
4538 split_const (op, &base, &offset);
4539 if (GET_CODE (base) == UNSPEC)
4540 switch (XINT (base, 1))
4541 {
4542 case UNSPEC_TOCREL:
4543 case UNSPEC_MACHOPIC_OFFSET:
4544 /* OP represents SYM [+ OFFSET] - ANCHOR. SYM is the base term
4545 for aliasing purposes. */
4546 return XVECEXP (base, 0, 0);
4547 }
4548
4549 return op;
4550}
4551
d04b6e6e
EB
4552/* More elaborate version of recog's offsettable_memref_p predicate
4553 that works around the ??? note of rs6000_mode_dependent_address.
4554 In particular it accepts
4555
4556 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4557
4558 in 32-bit mode, that the recog predicate rejects. */
4559
4560bool
4561rs6000_offsettable_memref_p (rtx op)
4562{
4563 if (!MEM_P (op))
4564 return false;
4565
4566 /* First mimic offsettable_memref_p. */
4567 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4568 return true;
4569
4570 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4571 the latter predicate knows nothing about the mode of the memory
4572 reference and, therefore, assumes that it is the largest supported
4573 mode (TFmode). As a consequence, legitimate offsettable memory
4574 references are rejected. rs6000_legitimate_offset_address_p contains
4575 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4576 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4577}
4578
d8ecbcdb
AH
4579/* Return number of consecutive hard regs needed starting at reg REGNO
4580 to hold something of mode MODE.
4581 This is ordinarily the length in words of a value of mode MODE
4582 but can be less for certain modes in special long registers.
4583
4584 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4585 scalar instructions. The upper 32 bits are only available to the
4586 SIMD instructions.
4587
4588 POWER and PowerPC GPRs hold 32 bits worth;
4589 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4590
4591int
4592rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4593{
4594 if (FP_REGNO_P (regno))
4595 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4596
4597 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4598 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4599
4600 if (ALTIVEC_REGNO_P (regno))
4601 return
4602 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4603
8521c414
JM
4604 /* The value returned for SCmode in the E500 double case is 2 for
4605 ABI compatibility; storing an SCmode value in a single register
4606 would require function_arg and rs6000_spe_function_arg to handle
4607 SCmode so as to pass the value correctly in a pair of
4608 registers. */
4f011e1e
JM
4609 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4610 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4611 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4612
d8ecbcdb
AH
4613 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4614}
2aa4498c
AH
4615
4616/* Change register usage conditional on target flags. */
4617void
4618rs6000_conditional_register_usage (void)
4619{
4620 int i;
4621
4622 /* Set MQ register fixed (already call_used) if not POWER
4623 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4624 be allocated. */
4625 if (! TARGET_POWER)
4626 fixed_regs[64] = 1;
4627
7c9ac5c0 4628 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4629 if (TARGET_64BIT)
4630 fixed_regs[13] = call_used_regs[13]
4631 = call_really_used_regs[13] = 1;
4632
4633 /* Conditionally disable FPRs. */
4634 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4635 for (i = 32; i < 64; i++)
4636 fixed_regs[i] = call_used_regs[i]
c4ad648e 4637 = call_really_used_regs[i] = 1;
2aa4498c 4638
7c9ac5c0
PH
4639 /* The TOC register is not killed across calls in a way that is
4640 visible to the compiler. */
4641 if (DEFAULT_ABI == ABI_AIX)
4642 call_really_used_regs[2] = 0;
4643
2aa4498c
AH
4644 if (DEFAULT_ABI == ABI_V4
4645 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4646 && flag_pic == 2)
4647 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4648
4649 if (DEFAULT_ABI == ABI_V4
4650 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4651 && flag_pic == 1)
4652 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4653 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4654 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4655
4656 if (DEFAULT_ABI == ABI_DARWIN
4657 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4658 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4659 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4660 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4661
b4db40bf
JJ
4662 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4663 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4664 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4665
2aa4498c
AH
4666 if (TARGET_SPE)
4667 {
4668 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4669 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4670 registers in prologues and epilogues. We no longer use r14
4671 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4672 pool for link-compatibility with older versions of GCC. Once
4673 "old" code has died out, we can return r14 to the allocation
4674 pool. */
4675 fixed_regs[14]
4676 = call_used_regs[14]
4677 = call_really_used_regs[14] = 1;
2aa4498c
AH
4678 }
4679
0db747be 4680 if (!TARGET_ALTIVEC)
2aa4498c
AH
4681 {
4682 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4683 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4684 call_really_used_regs[VRSAVE_REGNO] = 1;
4685 }
4686
0db747be
DE
4687 if (TARGET_ALTIVEC)
4688 global_regs[VSCR_REGNO] = 1;
4689
2aa4498c 4690 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4691 {
4692 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4693 call_used_regs[i] = call_really_used_regs[i] = 1;
4694
4695 /* AIX reserves VR20:31 in non-extended ABI mode. */
4696 if (TARGET_XCOFF)
4697 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4698 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4699 }
2aa4498c 4700}
fb4d4348 4701\f
a4f6c312
SS
4702/* Try to output insns to set TARGET equal to the constant C if it can
4703 be done in less than N insns. Do all computations in MODE.
4704 Returns the place where the output has been placed if it can be
4705 done and the insns have been emitted. If it would take more than N
4706 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4707
4708rtx
f676971a 4709rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4710 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4711{
af8cb5c5 4712 rtx result, insn, set;
2bfcf297
DB
4713 HOST_WIDE_INT c0, c1;
4714
37409796 4715 switch (mode)
2bfcf297 4716 {
37409796
NS
4717 case QImode:
4718 case HImode:
2bfcf297 4719 if (dest == NULL)
c4ad648e 4720 dest = gen_reg_rtx (mode);
2bfcf297
DB
4721 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4722 return dest;
bb8df8a6 4723
37409796 4724 case SImode:
b3a13419 4725 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4726
d448860e 4727 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4728 GEN_INT (INTVAL (source)
4729 & (~ (HOST_WIDE_INT) 0xffff))));
4730 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4731 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4732 GEN_INT (INTVAL (source) & 0xffff))));
4733 result = dest;
37409796
NS
4734 break;
4735
4736 case DImode:
4737 switch (GET_CODE (source))
af8cb5c5 4738 {
37409796 4739 case CONST_INT:
af8cb5c5
DE
4740 c0 = INTVAL (source);
4741 c1 = -(c0 < 0);
37409796 4742 break;
bb8df8a6 4743
37409796 4744 case CONST_DOUBLE:
2bfcf297 4745#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4746 c0 = CONST_DOUBLE_LOW (source);
4747 c1 = -(c0 < 0);
2bfcf297 4748#else
af8cb5c5
DE
4749 c0 = CONST_DOUBLE_LOW (source);
4750 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4751#endif
37409796
NS
4752 break;
4753
4754 default:
4755 gcc_unreachable ();
af8cb5c5 4756 }
af8cb5c5
DE
4757
4758 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4759 break;
4760
4761 default:
4762 gcc_unreachable ();
2bfcf297 4763 }
2bfcf297 4764
af8cb5c5
DE
4765 insn = get_last_insn ();
4766 set = single_set (insn);
4767 if (! CONSTANT_P (SET_SRC (set)))
4768 set_unique_reg_note (insn, REG_EQUAL, source);
4769
4770 return result;
2bfcf297
DB
4771}
4772
4773/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4774 fall back to a straight forward decomposition. We do this to avoid
4775 exponential run times encountered when looking for longer sequences
4776 with rs6000_emit_set_const. */
4777static rtx
a2369ed3 4778rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4779{
4780 if (!TARGET_POWERPC64)
4781 {
4782 rtx operand1, operand2;
4783
4784 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4785 DImode);
d448860e 4786 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4787 DImode);
4788 emit_move_insn (operand1, GEN_INT (c1));
4789 emit_move_insn (operand2, GEN_INT (c2));
4790 }
4791 else
4792 {
bc06712d 4793 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4794
bc06712d 4795 ud1 = c1 & 0xffff;
f921c9c9 4796 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4797#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4798 c2 = c1 >> 32;
2bfcf297 4799#endif
bc06712d 4800 ud3 = c2 & 0xffff;
f921c9c9 4801 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4802
f676971a 4803 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4804 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4805 {
bc06712d 4806 if (ud1 & 0x8000)
b78d48dd 4807 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4808 else
4809 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4810 }
2bfcf297 4811
f676971a 4812 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4813 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4814 {
bc06712d 4815 if (ud2 & 0x8000)
f676971a 4816 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4817 - 0x80000000));
252b88f7 4818 else
bc06712d
TR
4819 emit_move_insn (dest, GEN_INT (ud2 << 16));
4820 if (ud1 != 0)
d448860e
JH
4821 emit_move_insn (copy_rtx (dest),
4822 gen_rtx_IOR (DImode, copy_rtx (dest),
4823 GEN_INT (ud1)));
252b88f7 4824 }
f676971a 4825 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4826 || (ud4 == 0 && ! (ud3 & 0x8000)))
4827 {
4828 if (ud3 & 0x8000)
f676971a 4829 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4830 - 0x80000000));
4831 else
4832 emit_move_insn (dest, GEN_INT (ud3 << 16));
4833
4834 if (ud2 != 0)
d448860e
JH
4835 emit_move_insn (copy_rtx (dest),
4836 gen_rtx_IOR (DImode, copy_rtx (dest),
4837 GEN_INT (ud2)));
4838 emit_move_insn (copy_rtx (dest),
4839 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4840 GEN_INT (16)));
bc06712d 4841 if (ud1 != 0)
d448860e
JH
4842 emit_move_insn (copy_rtx (dest),
4843 gen_rtx_IOR (DImode, copy_rtx (dest),
4844 GEN_INT (ud1)));
bc06712d 4845 }
f676971a 4846 else
bc06712d
TR
4847 {
4848 if (ud4 & 0x8000)
f676971a 4849 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4850 - 0x80000000));
4851 else
4852 emit_move_insn (dest, GEN_INT (ud4 << 16));
4853
4854 if (ud3 != 0)
d448860e
JH
4855 emit_move_insn (copy_rtx (dest),
4856 gen_rtx_IOR (DImode, copy_rtx (dest),
4857 GEN_INT (ud3)));
2bfcf297 4858
d448860e
JH
4859 emit_move_insn (copy_rtx (dest),
4860 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4861 GEN_INT (32)));
bc06712d 4862 if (ud2 != 0)
d448860e
JH
4863 emit_move_insn (copy_rtx (dest),
4864 gen_rtx_IOR (DImode, copy_rtx (dest),
4865 GEN_INT (ud2 << 16)));
bc06712d 4866 if (ud1 != 0)
d448860e
JH
4867 emit_move_insn (copy_rtx (dest),
4868 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4869 }
4870 }
2bfcf297
DB
4871 return dest;
4872}
4873
76d2b81d 4874/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4875 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4876
4877static void
4878rs6000_eliminate_indexed_memrefs (rtx operands[2])
4879{
4880 if (GET_CODE (operands[0]) == MEM
4881 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4882 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4883 && ! reload_in_progress)
4884 operands[0]
4885 = replace_equiv_address (operands[0],
4886 copy_addr_to_reg (XEXP (operands[0], 0)));
4887
4888 if (GET_CODE (operands[1]) == MEM
4889 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4890 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4891 && ! reload_in_progress)
4892 operands[1]
4893 = replace_equiv_address (operands[1],
4894 copy_addr_to_reg (XEXP (operands[1], 0)));
4895}
4896
fb4d4348
GK
4897/* Emit a move from SOURCE to DEST in mode MODE. */
4898void
a2369ed3 4899rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4900{
4901 rtx operands[2];
4902 operands[0] = dest;
4903 operands[1] = source;
f676971a 4904
fb4d4348
GK
4905 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4906 if (GET_CODE (operands[1]) == CONST_DOUBLE
4907 && ! FLOAT_MODE_P (mode)
4908 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4909 {
4910 /* FIXME. This should never happen. */
4911 /* Since it seems that it does, do the safe thing and convert
4912 to a CONST_INT. */
2496c7bd 4913 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4914 }
37409796
NS
4915 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4916 || FLOAT_MODE_P (mode)
4917 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4918 || CONST_DOUBLE_LOW (operands[1]) < 0)
4919 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4920 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4921
c9e8cb32
DD
4922 /* Check if GCC is setting up a block move that will end up using FP
4923 registers as temporaries. We must make sure this is acceptable. */
4924 if (GET_CODE (operands[0]) == MEM
4925 && GET_CODE (operands[1]) == MEM
4926 && mode == DImode
41543739
GK
4927 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4928 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4929 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4930 ? 32 : MEM_ALIGN (operands[0])))
4931 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4932 ? 32
41543739
GK
4933 : MEM_ALIGN (operands[1]))))
4934 && ! MEM_VOLATILE_P (operands [0])
4935 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4936 {
41543739
GK
4937 emit_move_insn (adjust_address (operands[0], SImode, 0),
4938 adjust_address (operands[1], SImode, 0));
d448860e
JH
4939 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4940 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4941 return;
4942 }
630d42a0 4943
b3a13419 4944 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4945 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4946 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4947
a3170dc6 4948 if (mode == SFmode && ! TARGET_POWERPC
696e45ba 4949 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
ffc14f31 4950 && GET_CODE (operands[0]) == MEM)
fb4d4348 4951 {
ffc14f31
GK
4952 int regnum;
4953
4954 if (reload_in_progress || reload_completed)
4955 regnum = true_regnum (operands[1]);
4956 else if (GET_CODE (operands[1]) == REG)
4957 regnum = REGNO (operands[1]);
4958 else
4959 regnum = -1;
f676971a 4960
fb4d4348
GK
4961 /* If operands[1] is a register, on POWER it may have
4962 double-precision data in it, so truncate it to single
4963 precision. */
4964 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4965 {
4966 rtx newreg;
b3a13419 4967 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4968 : gen_reg_rtx (mode));
fb4d4348
GK
4969 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4970 operands[1] = newreg;
4971 }
4972 }
4973
c4501e62
JJ
4974 /* Recognize the case where operand[1] is a reference to thread-local
4975 data and load its address to a register. */
84f52ebd 4976 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4977 {
84f52ebd
RH
4978 enum tls_model model;
4979 rtx tmp = operands[1];
4980 rtx addend = NULL;
4981
4982 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4983 {
4984 addend = XEXP (XEXP (tmp, 0), 1);
4985 tmp = XEXP (XEXP (tmp, 0), 0);
4986 }
4987
4988 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4989 model = SYMBOL_REF_TLS_MODEL (tmp);
4990 gcc_assert (model != 0);
4991
4992 tmp = rs6000_legitimize_tls_address (tmp, model);
4993 if (addend)
4994 {
4995 tmp = gen_rtx_PLUS (mode, tmp, addend);
4996 tmp = force_operand (tmp, operands[0]);
4997 }
4998 operands[1] = tmp;
c4501e62
JJ
4999 }
5000
8f4e6caf
RH
5001 /* Handle the case where reload calls us with an invalid address. */
5002 if (reload_in_progress && mode == Pmode
69ef87e2 5003 && (! general_operand (operands[1], mode)
8f4e6caf
RH
5004 || ! nonimmediate_operand (operands[0], mode)))
5005 goto emit_set;
5006
a9baceb1
GK
5007 /* 128-bit constant floating-point values on Darwin should really be
5008 loaded as two parts. */
8521c414 5009 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
5010 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
5011 {
5012 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
5013 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
5014 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
5015 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
5016 simplify_gen_subreg (imode, operands[1], mode, 0),
5017 imode);
5018 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
5019 GET_MODE_SIZE (imode)),
5020 simplify_gen_subreg (imode, operands[1], mode,
5021 GET_MODE_SIZE (imode)),
5022 imode);
a9baceb1
GK
5023 return;
5024 }
5025
e41b2a33
PB
5026 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
5027 cfun->machine->sdmode_stack_slot =
5028 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
5029
5030 if (reload_in_progress
5031 && mode == SDmode
5032 && MEM_P (operands[0])
5033 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
5034 && REG_P (operands[1]))
5035 {
5036 if (FP_REGNO_P (REGNO (operands[1])))
5037 {
5038 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
5039 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5040 emit_insn (gen_movsd_store (mem, operands[1]));
5041 }
5042 else if (INT_REGNO_P (REGNO (operands[1])))
5043 {
5044 rtx mem = adjust_address_nv (operands[0], mode, 4);
5045 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5046 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
5047 }
5048 else
5049 gcc_unreachable();
5050 return;
5051 }
5052 if (reload_in_progress
5053 && mode == SDmode
5054 && REG_P (operands[0])
5055 && MEM_P (operands[1])
5056 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
5057 {
5058 if (FP_REGNO_P (REGNO (operands[0])))
5059 {
5060 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
5061 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5062 emit_insn (gen_movsd_load (operands[0], mem));
5063 }
5064 else if (INT_REGNO_P (REGNO (operands[0])))
5065 {
5066 rtx mem = adjust_address_nv (operands[1], mode, 4);
5067 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5068 emit_insn (gen_movsd_hardfloat (operands[0], mem));
5069 }
5070 else
5071 gcc_unreachable();
5072 return;
5073 }
5074
fb4d4348
GK
5075 /* FIXME: In the long term, this switch statement should go away
5076 and be replaced by a sequence of tests based on things like
5077 mode == Pmode. */
5078 switch (mode)
5079 {
5080 case HImode:
5081 case QImode:
5082 if (CONSTANT_P (operands[1])
5083 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 5084 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
5085 break;
5086
06f4e019 5087 case TFmode:
7393f7f8 5088 case TDmode:
76d2b81d
DJ
5089 rs6000_eliminate_indexed_memrefs (operands);
5090 /* fall through */
5091
fb4d4348 5092 case DFmode:
7393f7f8 5093 case DDmode:
fb4d4348 5094 case SFmode:
e41b2a33 5095 case SDmode:
f676971a 5096 if (CONSTANT_P (operands[1])
fb4d4348 5097 && ! easy_fp_constant (operands[1], mode))
a9098fd0 5098 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5099 break;
f676971a 5100
0ac081f6
AH
5101 case V16QImode:
5102 case V8HImode:
5103 case V4SFmode:
5104 case V4SImode:
a3170dc6
AH
5105 case V4HImode:
5106 case V2SFmode:
5107 case V2SImode:
00a892b8 5108 case V1DImode:
69ef87e2 5109 if (CONSTANT_P (operands[1])
d744e06e 5110 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
5111 operands[1] = force_const_mem (mode, operands[1]);
5112 break;
f676971a 5113
fb4d4348 5114 case SImode:
a9098fd0 5115 case DImode:
fb4d4348
GK
5116 /* Use default pattern for address of ELF small data */
5117 if (TARGET_ELF
a9098fd0 5118 && mode == Pmode
f607bc57 5119 && DEFAULT_ABI == ABI_V4
f676971a 5120 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
5121 || GET_CODE (operands[1]) == CONST)
5122 && small_data_operand (operands[1], mode))
fb4d4348
GK
5123 {
5124 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5125 return;
5126 }
5127
f607bc57 5128 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
5129 && mode == Pmode && mode == SImode
5130 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
5131 {
5132 emit_insn (gen_movsi_got (operands[0], operands[1]));
5133 return;
5134 }
5135
ee890fe2 5136 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
5137 && TARGET_NO_TOC
5138 && ! flag_pic
a9098fd0 5139 && mode == Pmode
fb4d4348
GK
5140 && CONSTANT_P (operands[1])
5141 && GET_CODE (operands[1]) != HIGH
5142 && GET_CODE (operands[1]) != CONST_INT)
5143 {
b3a13419
ILT
5144 rtx target = (!can_create_pseudo_p ()
5145 ? operands[0]
5146 : gen_reg_rtx (mode));
fb4d4348
GK
5147
5148 /* If this is a function address on -mcall-aixdesc,
5149 convert it to the address of the descriptor. */
5150 if (DEFAULT_ABI == ABI_AIX
5151 && GET_CODE (operands[1]) == SYMBOL_REF
5152 && XSTR (operands[1], 0)[0] == '.')
5153 {
5154 const char *name = XSTR (operands[1], 0);
5155 rtx new_ref;
5156 while (*name == '.')
5157 name++;
5158 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5159 CONSTANT_POOL_ADDRESS_P (new_ref)
5160 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 5161 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 5162 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 5163 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
5164 operands[1] = new_ref;
5165 }
7509c759 5166
ee890fe2
SS
5167 if (DEFAULT_ABI == ABI_DARWIN)
5168 {
ab82a49f
AP
5169#if TARGET_MACHO
5170 if (MACHO_DYNAMIC_NO_PIC_P)
5171 {
5172 /* Take care of any required data indirection. */
5173 operands[1] = rs6000_machopic_legitimize_pic_address (
5174 operands[1], mode, operands[0]);
5175 if (operands[0] != operands[1])
5176 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 5177 operands[0], operands[1]));
ab82a49f
AP
5178 return;
5179 }
5180#endif
b8a55285
AP
5181 emit_insn (gen_macho_high (target, operands[1]));
5182 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5183 return;
5184 }
5185
fb4d4348
GK
5186 emit_insn (gen_elf_high (target, operands[1]));
5187 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5188 return;
5189 }
5190
a9098fd0
GK
5191 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5192 and we have put it in the TOC, we just need to make a TOC-relative
5193 reference to it. */
5194 if (TARGET_TOC
5195 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5196 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5197 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5198 get_pool_mode (operands[1])))
fb4d4348 5199 {
a9098fd0 5200 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5201 }
a9098fd0
GK
5202 else if (mode == Pmode
5203 && CONSTANT_P (operands[1])
38886f37
AO
5204 && ((GET_CODE (operands[1]) != CONST_INT
5205 && ! easy_fp_constant (operands[1], mode))
5206 || (GET_CODE (operands[1]) == CONST_INT
5207 && num_insns_constant (operands[1], mode) > 2)
5208 || (GET_CODE (operands[0]) == REG
5209 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5210 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5211 && ! legitimate_constant_pool_address_p (operands[1])
5212 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
5213 {
5214 /* Emit a USE operation so that the constant isn't deleted if
5215 expensive optimizations are turned on because nobody
5216 references it. This should only be done for operands that
5217 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5218 This should not be done for operands that contain LABEL_REFs.
5219 For now, we just handle the obvious case. */
5220 if (GET_CODE (operands[1]) != LABEL_REF)
c41c1387 5221 emit_use (operands[1]);
fb4d4348 5222
c859cda6 5223#if TARGET_MACHO
ee890fe2 5224 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5225 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5226 {
ee890fe2
SS
5227 operands[1] =
5228 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5229 operands[0]);
5230 if (operands[0] != operands[1])
5231 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5232 return;
5233 }
c859cda6 5234#endif
ee890fe2 5235
fb4d4348
GK
5236 /* If we are to limit the number of things we put in the TOC and
5237 this is a symbol plus a constant we can add in one insn,
5238 just put the symbol in the TOC and add the constant. Don't do
5239 this if reload is in progress. */
5240 if (GET_CODE (operands[1]) == CONST
5241 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5242 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5243 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5244 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5245 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5246 && ! side_effects_p (operands[0]))
5247 {
a4f6c312
SS
5248 rtx sym =
5249 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5250 rtx other = XEXP (XEXP (operands[1], 0), 1);
5251
a9098fd0
GK
5252 sym = force_reg (mode, sym);
5253 if (mode == SImode)
5254 emit_insn (gen_addsi3 (operands[0], sym, other));
5255 else
5256 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5257 return;
5258 }
5259
a9098fd0 5260 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5261
f676971a 5262 if (TARGET_TOC
0cdc04e8 5263 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5264 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5265 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5266 get_pool_constant (XEXP (operands[1], 0)),
5267 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5268 {
ba4828e0 5269 operands[1]
542a8afa 5270 = gen_const_mem (mode,
c4ad648e 5271 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5272 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5273 }
fb4d4348
GK
5274 }
5275 break;
a9098fd0 5276
fb4d4348 5277 case TImode:
76d2b81d
DJ
5278 rs6000_eliminate_indexed_memrefs (operands);
5279
27dc0551
DE
5280 if (TARGET_POWER)
5281 {
5282 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5283 gen_rtvec (2,
5284 gen_rtx_SET (VOIDmode,
5285 operands[0], operands[1]),
5286 gen_rtx_CLOBBER (VOIDmode,
5287 gen_rtx_SCRATCH (SImode)))));
5288 return;
5289 }
fb4d4348
GK
5290 break;
5291
5292 default:
37409796 5293 gcc_unreachable ();
fb4d4348
GK
5294 }
5295
a9098fd0
GK
5296 /* Above, we may have called force_const_mem which may have returned
5297 an invalid address. If we can, fix this up; otherwise, reload will
5298 have to deal with it. */
8f4e6caf
RH
5299 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5300 operands[1] = validize_mem (operands[1]);
a9098fd0 5301
8f4e6caf 5302 emit_set:
fb4d4348
GK
5303 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5304}
4697a36c 5305\f
2858f73a
GK
5306/* Nonzero if we can use a floating-point register to pass this arg. */
5307#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5308 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a 5309 && (CUM)->fregno <= FP_ARG_MAX_REG \
56f4cc04 5310 && TARGET_HARD_FLOAT && TARGET_FPRS)
2858f73a
GK
5311
5312/* Nonzero if we can use an AltiVec register to pass this arg. */
5313#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5314 (ALTIVEC_VECTOR_MODE (MODE) \
5315 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5316 && TARGET_ALTIVEC_ABI \
83953138 5317 && (NAMED))
2858f73a 5318
c6e8c921
GK
5319/* Return a nonzero value to say to return the function value in
5320 memory, just as large structures are always returned. TYPE will be
5321 the data type of the value, and FNTYPE will be the type of the
5322 function doing the returning, or @code{NULL} for libcalls.
5323
5324 The AIX ABI for the RS/6000 specifies that all structures are
5325 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5326 specifies that structures <= 8 bytes are returned in r3/r4, but a
5327 draft put them in memory, and GCC used to implement the draft
df01da37 5328 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5329 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5330 compatibility can change DRAFT_V4_STRUCT_RET to override the
5331 default, and -m switches get the final word. See
5332 rs6000_override_options for more details.
5333
5334 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5335 long double support is enabled. These values are returned in memory.
5336
5337 int_size_in_bytes returns -1 for variable size objects, which go in
5338 memory always. The cast to unsigned makes -1 > 8. */
5339
5340static bool
586de218 5341rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5342{
594a51fe
SS
5343 /* In the darwin64 abi, try to use registers for larger structs
5344 if possible. */
0b5383eb 5345 if (rs6000_darwin64_abi
594a51fe 5346 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5347 && int_size_in_bytes (type) > 0)
5348 {
5349 CUMULATIVE_ARGS valcum;
5350 rtx valret;
5351
5352 valcum.words = 0;
5353 valcum.fregno = FP_ARG_MIN_REG;
5354 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5355 /* Do a trial code generation as if this were going to be passed
5356 as an argument; if any part goes in memory, we return NULL. */
5357 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5358 if (valret)
5359 return false;
5360 /* Otherwise fall through to more conventional ABI rules. */
5361 }
594a51fe 5362
c6e8c921 5363 if (AGGREGATE_TYPE_P (type)
df01da37 5364 && (aix_struct_return
c6e8c921
GK
5365 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5366 return true;
b693336b 5367
bada2eb8
DE
5368 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5369 modes only exist for GCC vector types if -maltivec. */
5370 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5371 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5372 return false;
5373
b693336b
PB
5374 /* Return synthetic vectors in memory. */
5375 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5376 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5377 {
5378 static bool warned_for_return_big_vectors = false;
5379 if (!warned_for_return_big_vectors)
5380 {
d4ee4d25 5381 warning (0, "GCC vector returned by reference: "
b693336b
PB
5382 "non-standard ABI extension with no compatibility guarantee");
5383 warned_for_return_big_vectors = true;
5384 }
5385 return true;
5386 }
5387
602ea4d3 5388 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5389 return true;
ad630bef 5390
c6e8c921
GK
5391 return false;
5392}
5393
4697a36c
MM
5394/* Initialize a variable CUM of type CUMULATIVE_ARGS
5395 for a call to a function whose data type is FNTYPE.
5396 For a library call, FNTYPE is 0.
5397
5398 For incoming args we set the number of arguments in the prototype large
1c20ae99 5399 so we never return a PARALLEL. */
4697a36c
MM
5400
5401void
f676971a 5402init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5403 rtx libname ATTRIBUTE_UNUSED, int incoming,
5404 int libcall, int n_named_args)
4697a36c
MM
5405{
5406 static CUMULATIVE_ARGS zero_cumulative;
5407
5408 *cum = zero_cumulative;
5409 cum->words = 0;
5410 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5411 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5412 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5413 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5414 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5415 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5416 cum->stdarg = fntype
5417 && (TYPE_ARG_TYPES (fntype) != 0
5418 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5419 != void_type_node));
4697a36c 5420
0f6937fe
AM
5421 cum->nargs_prototype = 0;
5422 if (incoming || cum->prototype)
5423 cum->nargs_prototype = n_named_args;
4697a36c 5424
a5c76ee6 5425 /* Check for a longcall attribute. */
3eb4e360
AM
5426 if ((!fntype && rs6000_default_long_calls)
5427 || (fntype
5428 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5429 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5430 cum->call_cookie |= CALL_LONG;
6a4cee5f 5431
4697a36c
MM
5432 if (TARGET_DEBUG_ARG)
5433 {
5434 fprintf (stderr, "\ninit_cumulative_args:");
5435 if (fntype)
5436 {
5437 tree ret_type = TREE_TYPE (fntype);
5438 fprintf (stderr, " ret code = %s,",
5439 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5440 }
5441
6a4cee5f
MM
5442 if (cum->call_cookie & CALL_LONG)
5443 fprintf (stderr, " longcall,");
5444
4697a36c
MM
5445 fprintf (stderr, " proto = %d, nargs = %d\n",
5446 cum->prototype, cum->nargs_prototype);
5447 }
f676971a 5448
c4ad648e
AM
5449 if (fntype
5450 && !TARGET_ALTIVEC
5451 && TARGET_ALTIVEC_ABI
5452 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5453 {
c85ce869 5454 error ("cannot return value in vector register because"
c4ad648e 5455 " altivec instructions are disabled, use -maltivec"
c85ce869 5456 " to enable them");
c4ad648e 5457 }
4697a36c
MM
5458}
5459\f
fe984136
RH
5460/* Return true if TYPE must be passed on the stack and not in registers. */
5461
5462static bool
586de218 5463rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5464{
5465 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5466 return must_pass_in_stack_var_size (mode, type);
5467 else
5468 return must_pass_in_stack_var_size_or_pad (mode, type);
5469}
5470
c229cba9
DE
5471/* If defined, a C expression which determines whether, and in which
5472 direction, to pad out an argument with extra space. The value
5473 should be of type `enum direction': either `upward' to pad above
5474 the argument, `downward' to pad below, or `none' to inhibit
5475 padding.
5476
5477 For the AIX ABI structs are always stored left shifted in their
5478 argument slot. */
5479
9ebbca7d 5480enum direction
586de218 5481function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5482{
6e985040
AM
5483#ifndef AGGREGATE_PADDING_FIXED
5484#define AGGREGATE_PADDING_FIXED 0
5485#endif
5486#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5487#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5488#endif
5489
5490 if (!AGGREGATE_PADDING_FIXED)
5491 {
5492 /* GCC used to pass structures of the same size as integer types as
5493 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5494 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5495 passed padded downward, except that -mstrict-align further
5496 muddied the water in that multi-component structures of 2 and 4
5497 bytes in size were passed padded upward.
5498
5499 The following arranges for best compatibility with previous
5500 versions of gcc, but removes the -mstrict-align dependency. */
5501 if (BYTES_BIG_ENDIAN)
5502 {
5503 HOST_WIDE_INT size = 0;
5504
5505 if (mode == BLKmode)
5506 {
5507 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5508 size = int_size_in_bytes (type);
5509 }
5510 else
5511 size = GET_MODE_SIZE (mode);
5512
5513 if (size == 1 || size == 2 || size == 4)
5514 return downward;
5515 }
5516 return upward;
5517 }
5518
5519 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5520 {
5521 if (type != 0 && AGGREGATE_TYPE_P (type))
5522 return upward;
5523 }
c229cba9 5524
d3704c46
KH
5525 /* Fall back to the default. */
5526 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5527}
5528
b6c9286a 5529/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5530 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5531 PARM_BOUNDARY is used for all arguments.
f676971a 5532
84e9ad15
AM
5533 V.4 wants long longs and doubles to be double word aligned. Just
5534 testing the mode size is a boneheaded way to do this as it means
5535 that other types such as complex int are also double word aligned.
5536 However, we're stuck with this because changing the ABI might break
5537 existing library interfaces.
5538
b693336b
PB
5539 Doubleword align SPE vectors.
5540 Quadword align Altivec vectors.
5541 Quadword align large synthetic vector types. */
b6c9286a
MM
5542
5543int
b693336b 5544function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5545{
84e9ad15
AM
5546 if (DEFAULT_ABI == ABI_V4
5547 && (GET_MODE_SIZE (mode) == 8
5548 || (TARGET_HARD_FLOAT
5549 && TARGET_FPRS
7393f7f8 5550 && (mode == TFmode || mode == TDmode))))
4ed78545 5551 return 64;
ad630bef
DE
5552 else if (SPE_VECTOR_MODE (mode)
5553 || (type && TREE_CODE (type) == VECTOR_TYPE
5554 && int_size_in_bytes (type) >= 8
5555 && int_size_in_bytes (type) < 16))
e1f83b4d 5556 return 64;
ad630bef
DE
5557 else if (ALTIVEC_VECTOR_MODE (mode)
5558 || (type && TREE_CODE (type) == VECTOR_TYPE
5559 && int_size_in_bytes (type) >= 16))
0ac081f6 5560 return 128;
0b5383eb
DJ
5561 else if (rs6000_darwin64_abi && mode == BLKmode
5562 && type && TYPE_ALIGN (type) > 64)
5563 return 128;
9ebbca7d 5564 else
b6c9286a 5565 return PARM_BOUNDARY;
b6c9286a 5566}
c53bdcf5 5567
294bd182
AM
5568/* For a function parm of MODE and TYPE, return the starting word in
5569 the parameter area. NWORDS of the parameter area are already used. */
5570
5571static unsigned int
5572rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5573{
5574 unsigned int align;
5575 unsigned int parm_offset;
5576
5577 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5578 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5579 return nwords + (-(parm_offset + nwords) & align);
5580}
5581
c53bdcf5
AM
5582/* Compute the size (in words) of a function argument. */
5583
5584static unsigned long
5585rs6000_arg_size (enum machine_mode mode, tree type)
5586{
5587 unsigned long size;
5588
5589 if (mode != BLKmode)
5590 size = GET_MODE_SIZE (mode);
5591 else
5592 size = int_size_in_bytes (type);
5593
5594 if (TARGET_32BIT)
5595 return (size + 3) >> 2;
5596 else
5597 return (size + 7) >> 3;
5598}
b6c9286a 5599\f
0b5383eb 5600/* Use this to flush pending int fields. */
594a51fe
SS
5601
5602static void
0b5383eb
DJ
5603rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5604 HOST_WIDE_INT bitpos)
594a51fe 5605{
0b5383eb
DJ
5606 unsigned int startbit, endbit;
5607 int intregs, intoffset;
5608 enum machine_mode mode;
594a51fe 5609
0b5383eb
DJ
5610 if (cum->intoffset == -1)
5611 return;
594a51fe 5612
0b5383eb
DJ
5613 intoffset = cum->intoffset;
5614 cum->intoffset = -1;
5615
5616 if (intoffset % BITS_PER_WORD != 0)
5617 {
5618 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5619 MODE_INT, 0);
5620 if (mode == BLKmode)
594a51fe 5621 {
0b5383eb
DJ
5622 /* We couldn't find an appropriate mode, which happens,
5623 e.g., in packed structs when there are 3 bytes to load.
5624 Back intoffset back to the beginning of the word in this
5625 case. */
5626 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5627 }
594a51fe 5628 }
0b5383eb
DJ
5629
5630 startbit = intoffset & -BITS_PER_WORD;
5631 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5632 intregs = (endbit - startbit) / BITS_PER_WORD;
5633 cum->words += intregs;
5634}
5635
5636/* The darwin64 ABI calls for us to recurse down through structs,
5637 looking for elements passed in registers. Unfortunately, we have
5638 to track int register count here also because of misalignments
5639 in powerpc alignment mode. */
5640
5641static void
5642rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5643 tree type,
5644 HOST_WIDE_INT startbitpos)
5645{
5646 tree f;
5647
5648 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5649 if (TREE_CODE (f) == FIELD_DECL)
5650 {
5651 HOST_WIDE_INT bitpos = startbitpos;
5652 tree ftype = TREE_TYPE (f);
70fb00df
AP
5653 enum machine_mode mode;
5654 if (ftype == error_mark_node)
5655 continue;
5656 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5657
5658 if (DECL_SIZE (f) != 0
5659 && host_integerp (bit_position (f), 1))
5660 bitpos += int_bit_position (f);
5661
5662 /* ??? FIXME: else assume zero offset. */
5663
5664 if (TREE_CODE (ftype) == RECORD_TYPE)
5665 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5666 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5667 {
5668 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5669 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5670 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5671 }
5672 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5673 {
5674 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5675 cum->vregno++;
5676 cum->words += 2;
5677 }
5678 else if (cum->intoffset == -1)
5679 cum->intoffset = bitpos;
5680 }
594a51fe
SS
5681}
5682
4697a36c
MM
5683/* Update the data in CUM to advance over an argument
5684 of mode MODE and data type TYPE.
b2d04ecf
AM
5685 (TYPE is null for libcalls where that information may not be available.)
5686
5687 Note that for args passed by reference, function_arg will be called
5688 with MODE and TYPE set to that of the pointer to the arg, not the arg
5689 itself. */
4697a36c
MM
5690
5691void
f676971a 5692function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5693 tree type, int named, int depth)
4697a36c 5694{
0b5383eb
DJ
5695 int size;
5696
594a51fe
SS
5697 /* Only tick off an argument if we're not recursing. */
5698 if (depth == 0)
5699 cum->nargs_prototype--;
4697a36c 5700
ad630bef
DE
5701 if (TARGET_ALTIVEC_ABI
5702 && (ALTIVEC_VECTOR_MODE (mode)
5703 || (type && TREE_CODE (type) == VECTOR_TYPE
5704 && int_size_in_bytes (type) == 16)))
0ac081f6 5705 {
4ed78545
AM
5706 bool stack = false;
5707
2858f73a 5708 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5709 {
6d0ef01e
HP
5710 cum->vregno++;
5711 if (!TARGET_ALTIVEC)
c85ce869 5712 error ("cannot pass argument in vector register because"
6d0ef01e 5713 " altivec instructions are disabled, use -maltivec"
c85ce869 5714 " to enable them");
4ed78545
AM
5715
5716 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5717 even if it is going to be passed in a vector register.
4ed78545
AM
5718 Darwin does the same for variable-argument functions. */
5719 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5720 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5721 stack = true;
6d0ef01e 5722 }
4ed78545
AM
5723 else
5724 stack = true;
5725
5726 if (stack)
c4ad648e 5727 {
a594a19c 5728 int align;
f676971a 5729
2858f73a
GK
5730 /* Vector parameters must be 16-byte aligned. This places
5731 them at 2 mod 4 in terms of words in 32-bit mode, since
5732 the parameter save area starts at offset 24 from the
5733 stack. In 64-bit mode, they just have to start on an
5734 even word, since the parameter save area is 16-byte
5735 aligned. Space for GPRs is reserved even if the argument
5736 will be passed in memory. */
5737 if (TARGET_32BIT)
4ed78545 5738 align = (2 - cum->words) & 3;
2858f73a
GK
5739 else
5740 align = cum->words & 1;
c53bdcf5 5741 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5742
a594a19c
GK
5743 if (TARGET_DEBUG_ARG)
5744 {
f676971a 5745 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5746 cum->words, align);
5747 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5748 cum->nargs_prototype, cum->prototype,
2858f73a 5749 GET_MODE_NAME (mode));
a594a19c
GK
5750 }
5751 }
0ac081f6 5752 }
a4b0320c 5753 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5754 && !cum->stdarg
5755 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5756 cum->sysv_gregno++;
594a51fe
SS
5757
5758 else if (rs6000_darwin64_abi
5759 && mode == BLKmode
0b5383eb
DJ
5760 && TREE_CODE (type) == RECORD_TYPE
5761 && (size = int_size_in_bytes (type)) > 0)
5762 {
5763 /* Variable sized types have size == -1 and are
5764 treated as if consisting entirely of ints.
5765 Pad to 16 byte boundary if needed. */
5766 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5767 && (cum->words % 2) != 0)
5768 cum->words++;
5769 /* For varargs, we can just go up by the size of the struct. */
5770 if (!named)
5771 cum->words += (size + 7) / 8;
5772 else
5773 {
5774 /* It is tempting to say int register count just goes up by
5775 sizeof(type)/8, but this is wrong in a case such as
5776 { int; double; int; } [powerpc alignment]. We have to
5777 grovel through the fields for these too. */
5778 cum->intoffset = 0;
5779 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5780 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5781 size * BITS_PER_UNIT);
5782 }
5783 }
f607bc57 5784 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5785 {
a3170dc6 5786 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
5787 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
5788 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
5789 || (mode == TFmode && !TARGET_IEEEQUAD)
5790 || mode == SDmode || mode == DDmode || mode == TDmode))
4697a36c 5791 {
2d83f070
JJ
5792 /* _Decimal128 must use an even/odd register pair. This assumes
5793 that the register number is odd when fregno is odd. */
5794 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5795 cum->fregno++;
5796
5797 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5798 <= FP_ARG_V4_MAX_REG)
602ea4d3 5799 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5800 else
5801 {
602ea4d3 5802 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5803 if (mode == DFmode || mode == TFmode
5804 || mode == DDmode || mode == TDmode)
c4ad648e 5805 cum->words += cum->words & 1;
c53bdcf5 5806 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5807 }
4697a36c 5808 }
4cc833b7
RH
5809 else
5810 {
b2d04ecf 5811 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5812 int gregno = cum->sysv_gregno;
5813
4ed78545
AM
5814 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5815 (r7,r8) or (r9,r10). As does any other 2 word item such
5816 as complex int due to a historical mistake. */
5817 if (n_words == 2)
5818 gregno += (1 - gregno) & 1;
4cc833b7 5819
4ed78545 5820 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5821 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5822 {
4ed78545
AM
5823 /* Long long and SPE vectors are aligned on the stack.
5824 So are other 2 word items such as complex int due to
5825 a historical mistake. */
4cc833b7
RH
5826 if (n_words == 2)
5827 cum->words += cum->words & 1;
5828 cum->words += n_words;
5829 }
4697a36c 5830
4cc833b7
RH
5831 /* Note: continuing to accumulate gregno past when we've started
5832 spilling to the stack indicates the fact that we've started
5833 spilling to the stack to expand_builtin_saveregs. */
5834 cum->sysv_gregno = gregno + n_words;
5835 }
4697a36c 5836
4cc833b7
RH
5837 if (TARGET_DEBUG_ARG)
5838 {
5839 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5840 cum->words, cum->fregno);
5841 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5842 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5843 fprintf (stderr, "mode = %4s, named = %d\n",
5844 GET_MODE_NAME (mode), named);
5845 }
4697a36c
MM
5846 }
5847 else
4cc833b7 5848 {
b2d04ecf 5849 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5850 int start_words = cum->words;
5851 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5852
294bd182 5853 cum->words = align_words + n_words;
4697a36c 5854
ebb109ad 5855 if (SCALAR_FLOAT_MODE_P (mode)
56f4cc04 5856 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5857 {
5858 /* _Decimal128 must be passed in an even/odd float register pair.
5859 This assumes that the register number is odd when fregno is
5860 odd. */
5861 if (mode == TDmode && (cum->fregno % 2) == 1)
5862 cum->fregno++;
5863 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5864 }
4cc833b7
RH
5865
5866 if (TARGET_DEBUG_ARG)
5867 {
5868 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5869 cum->words, cum->fregno);
5870 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5871 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5872 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5873 named, align_words - start_words, depth);
4cc833b7
RH
5874 }
5875 }
4697a36c 5876}
a6c9bed4 5877
f82f556d
AH
5878static rtx
5879spe_build_register_parallel (enum machine_mode mode, int gregno)
5880{
17caeff2 5881 rtx r1, r3, r5, r7;
f82f556d 5882
37409796 5883 switch (mode)
f82f556d 5884 {
37409796 5885 case DFmode:
54b695e7
AH
5886 r1 = gen_rtx_REG (DImode, gregno);
5887 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5888 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5889
5890 case DCmode:
17caeff2 5891 case TFmode:
54b695e7
AH
5892 r1 = gen_rtx_REG (DImode, gregno);
5893 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5894 r3 = gen_rtx_REG (DImode, gregno + 2);
5895 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5896 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5897
17caeff2
JM
5898 case TCmode:
5899 r1 = gen_rtx_REG (DImode, gregno);
5900 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5901 r3 = gen_rtx_REG (DImode, gregno + 2);
5902 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5903 r5 = gen_rtx_REG (DImode, gregno + 4);
5904 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5905 r7 = gen_rtx_REG (DImode, gregno + 6);
5906 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5907 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5908
37409796
NS
5909 default:
5910 gcc_unreachable ();
f82f556d 5911 }
f82f556d 5912}
b78d48dd 5913
f82f556d 5914/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5915static rtx
f676971a 5916rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5917 tree type)
a6c9bed4 5918{
f82f556d
AH
5919 int gregno = cum->sysv_gregno;
5920
5921 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5922 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5923 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5924 || mode == DCmode || mode == TCmode))
f82f556d 5925 {
b5870bee
AH
5926 int n_words = rs6000_arg_size (mode, type);
5927
f82f556d 5928 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5929 if (mode == DFmode)
b5870bee 5930 gregno += (1 - gregno) & 1;
f82f556d 5931
b5870bee
AH
5932 /* Multi-reg args are not split between registers and stack. */
5933 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5934 return NULL_RTX;
5935
5936 return spe_build_register_parallel (mode, gregno);
5937 }
a6c9bed4
AH
5938 if (cum->stdarg)
5939 {
c53bdcf5 5940 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5941
5942 /* SPE vectors are put in odd registers. */
5943 if (n_words == 2 && (gregno & 1) == 0)
5944 gregno += 1;
5945
5946 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5947 {
5948 rtx r1, r2;
5949 enum machine_mode m = SImode;
5950
5951 r1 = gen_rtx_REG (m, gregno);
5952 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5953 r2 = gen_rtx_REG (m, gregno + 1);
5954 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5955 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5956 }
5957 else
b78d48dd 5958 return NULL_RTX;
a6c9bed4
AH
5959 }
5960 else
5961 {
f82f556d
AH
5962 if (gregno <= GP_ARG_MAX_REG)
5963 return gen_rtx_REG (mode, gregno);
a6c9bed4 5964 else
b78d48dd 5965 return NULL_RTX;
a6c9bed4
AH
5966 }
5967}
5968
0b5383eb
DJ
5969/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5970 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5971
0b5383eb 5972static void
bb8df8a6 5973rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5974 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5975{
0b5383eb
DJ
5976 enum machine_mode mode;
5977 unsigned int regno;
5978 unsigned int startbit, endbit;
5979 int this_regno, intregs, intoffset;
5980 rtx reg;
594a51fe 5981
0b5383eb
DJ
5982 if (cum->intoffset == -1)
5983 return;
5984
5985 intoffset = cum->intoffset;
5986 cum->intoffset = -1;
5987
5988 /* If this is the trailing part of a word, try to only load that
5989 much into the register. Otherwise load the whole register. Note
5990 that in the latter case we may pick up unwanted bits. It's not a
5991 problem at the moment but may wish to revisit. */
5992
5993 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5994 {
0b5383eb
DJ
5995 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5996 MODE_INT, 0);
5997 if (mode == BLKmode)
5998 {
5999 /* We couldn't find an appropriate mode, which happens,
6000 e.g., in packed structs when there are 3 bytes to load.
6001 Back intoffset back to the beginning of the word in this
6002 case. */
6003 intoffset = intoffset & -BITS_PER_WORD;
6004 mode = word_mode;
6005 }
6006 }
6007 else
6008 mode = word_mode;
6009
6010 startbit = intoffset & -BITS_PER_WORD;
6011 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
6012 intregs = (endbit - startbit) / BITS_PER_WORD;
6013 this_regno = cum->words + intoffset / BITS_PER_WORD;
6014
6015 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
6016 cum->use_stack = 1;
bb8df8a6 6017
0b5383eb
DJ
6018 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
6019 if (intregs <= 0)
6020 return;
6021
6022 intoffset /= BITS_PER_UNIT;
6023 do
6024 {
6025 regno = GP_ARG_MIN_REG + this_regno;
6026 reg = gen_rtx_REG (mode, regno);
6027 rvec[(*k)++] =
6028 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
6029
6030 this_regno += 1;
6031 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
6032 mode = word_mode;
6033 intregs -= 1;
6034 }
6035 while (intregs > 0);
6036}
6037
6038/* Recursive workhorse for the following. */
6039
6040static void
586de218 6041rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
6042 HOST_WIDE_INT startbitpos, rtx rvec[],
6043 int *k)
6044{
6045 tree f;
6046
6047 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6048 if (TREE_CODE (f) == FIELD_DECL)
6049 {
6050 HOST_WIDE_INT bitpos = startbitpos;
6051 tree ftype = TREE_TYPE (f);
70fb00df
AP
6052 enum machine_mode mode;
6053 if (ftype == error_mark_node)
6054 continue;
6055 mode = TYPE_MODE (ftype);
0b5383eb
DJ
6056
6057 if (DECL_SIZE (f) != 0
6058 && host_integerp (bit_position (f), 1))
6059 bitpos += int_bit_position (f);
6060
6061 /* ??? FIXME: else assume zero offset. */
6062
6063 if (TREE_CODE (ftype) == RECORD_TYPE)
6064 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
6065 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 6066 {
0b5383eb
DJ
6067#if 0
6068 switch (mode)
594a51fe 6069 {
0b5383eb
DJ
6070 case SCmode: mode = SFmode; break;
6071 case DCmode: mode = DFmode; break;
6072 case TCmode: mode = TFmode; break;
6073 default: break;
594a51fe 6074 }
0b5383eb
DJ
6075#endif
6076 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6077 rvec[(*k)++]
bb8df8a6 6078 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
6079 gen_rtx_REG (mode, cum->fregno++),
6080 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 6081 if (mode == TFmode || mode == TDmode)
0b5383eb 6082 cum->fregno++;
594a51fe 6083 }
0b5383eb
DJ
6084 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
6085 {
6086 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6087 rvec[(*k)++]
bb8df8a6
EC
6088 = gen_rtx_EXPR_LIST (VOIDmode,
6089 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
6090 GEN_INT (bitpos / BITS_PER_UNIT));
6091 }
6092 else if (cum->intoffset == -1)
6093 cum->intoffset = bitpos;
6094 }
6095}
594a51fe 6096
0b5383eb
DJ
6097/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
6098 the register(s) to be used for each field and subfield of a struct
6099 being passed by value, along with the offset of where the
6100 register's value may be found in the block. FP fields go in FP
6101 register, vector fields go in vector registers, and everything
bb8df8a6 6102 else goes in int registers, packed as in memory.
8ff40a74 6103
0b5383eb
DJ
6104 This code is also used for function return values. RETVAL indicates
6105 whether this is the case.
8ff40a74 6106
a4d05547 6107 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 6108 calling convention. */
594a51fe 6109
0b5383eb 6110static rtx
586de218 6111rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
6112 int named, bool retval)
6113{
6114 rtx rvec[FIRST_PSEUDO_REGISTER];
6115 int k = 1, kbase = 1;
6116 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6117 /* This is a copy; modifications are not visible to our caller. */
6118 CUMULATIVE_ARGS copy_cum = *orig_cum;
6119 CUMULATIVE_ARGS *cum = &copy_cum;
6120
6121 /* Pad to 16 byte boundary if needed. */
6122 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6123 && (cum->words % 2) != 0)
6124 cum->words++;
6125
6126 cum->intoffset = 0;
6127 cum->use_stack = 0;
6128 cum->named = named;
6129
6130 /* Put entries into rvec[] for individual FP and vector fields, and
6131 for the chunks of memory that go in int regs. Note we start at
6132 element 1; 0 is reserved for an indication of using memory, and
6133 may or may not be filled in below. */
6134 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6135 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6136
6137 /* If any part of the struct went on the stack put all of it there.
6138 This hack is because the generic code for
6139 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6140 parts of the struct are not at the beginning. */
6141 if (cum->use_stack)
6142 {
6143 if (retval)
6144 return NULL_RTX; /* doesn't go in registers at all */
6145 kbase = 0;
6146 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6147 }
6148 if (k > 1 || cum->use_stack)
6149 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
6150 else
6151 return NULL_RTX;
6152}
6153
b78d48dd
FJ
6154/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6155
6156static rtx
ec6376ab 6157rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 6158{
ec6376ab
AM
6159 int n_units;
6160 int i, k;
6161 rtx rvec[GP_ARG_NUM_REG + 1];
6162
6163 if (align_words >= GP_ARG_NUM_REG)
6164 return NULL_RTX;
6165
6166 n_units = rs6000_arg_size (mode, type);
6167
6168 /* Optimize the simple case where the arg fits in one gpr, except in
6169 the case of BLKmode due to assign_parms assuming that registers are
6170 BITS_PER_WORD wide. */
6171 if (n_units == 0
6172 || (n_units == 1 && mode != BLKmode))
6173 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6174
6175 k = 0;
6176 if (align_words + n_units > GP_ARG_NUM_REG)
6177 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6178 using a magic NULL_RTX component.
79773478
AM
6179 This is not strictly correct. Only some of the arg belongs in
6180 memory, not all of it. However, the normal scheme using
6181 function_arg_partial_nregs can result in unusual subregs, eg.
6182 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6183 store the whole arg to memory is often more efficient than code
6184 to store pieces, and we know that space is available in the right
6185 place for the whole arg. */
ec6376ab
AM
6186 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6187
6188 i = 0;
6189 do
36a454e1 6190 {
ec6376ab
AM
6191 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6192 rtx off = GEN_INT (i++ * 4);
6193 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6194 }
ec6376ab
AM
6195 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6196
6197 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6198}
6199
4697a36c
MM
6200/* Determine where to put an argument to a function.
6201 Value is zero to push the argument on the stack,
6202 or a hard register in which to store the argument.
6203
6204 MODE is the argument's machine mode.
6205 TYPE is the data type of the argument (as a tree).
6206 This is null for libcalls where that information may
6207 not be available.
6208 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6209 the preceding args and about the function being called. It is
6210 not modified in this routine.
4697a36c
MM
6211 NAMED is nonzero if this argument is a named parameter
6212 (otherwise it is an extra parameter matching an ellipsis).
6213
6214 On RS/6000 the first eight words of non-FP are normally in registers
6215 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6216 Under V.4, the first 8 FP args are in registers.
6217
6218 If this is floating-point and no prototype is specified, we use
6219 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6220 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6221 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6222 doesn't support PARALLEL anyway.
6223
6224 Note that for args passed by reference, function_arg will be called
6225 with MODE and TYPE set to that of the pointer to the arg, not the arg
6226 itself. */
4697a36c 6227
9390387d 6228rtx
f676971a 6229function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6230 tree type, int named)
4697a36c 6231{
4cc833b7 6232 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6233
a4f6c312
SS
6234 /* Return a marker to indicate whether CR1 needs to set or clear the
6235 bit that V.4 uses to say fp args were passed in registers.
6236 Assume that we don't need the marker for software floating point,
6237 or compiler generated library calls. */
4697a36c
MM
6238 if (mode == VOIDmode)
6239 {
f607bc57 6240 if (abi == ABI_V4
b9599e46 6241 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6242 && (cum->stdarg
6243 || (cum->nargs_prototype < 0
6244 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6245 {
a3170dc6
AH
6246 /* For the SPE, we need to crxor CR6 always. */
6247 if (TARGET_SPE_ABI)
6248 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6249 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6250 return GEN_INT (cum->call_cookie
6251 | ((cum->fregno == FP_ARG_MIN_REG)
6252 ? CALL_V4_SET_FP_ARGS
6253 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6254 }
4697a36c 6255
7509c759 6256 return GEN_INT (cum->call_cookie);
4697a36c
MM
6257 }
6258
0b5383eb
DJ
6259 if (rs6000_darwin64_abi && mode == BLKmode
6260 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6261 {
0b5383eb 6262 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6263 if (rslt != NULL_RTX)
6264 return rslt;
6265 /* Else fall through to usual handling. */
6266 }
6267
2858f73a 6268 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6269 if (TARGET_64BIT && ! cum->prototype)
6270 {
c4ad648e
AM
6271 /* Vector parameters get passed in vector register
6272 and also in GPRs or memory, in absence of prototype. */
6273 int align_words;
6274 rtx slot;
6275 align_words = (cum->words + 1) & ~1;
6276
6277 if (align_words >= GP_ARG_NUM_REG)
6278 {
6279 slot = NULL_RTX;
6280 }
6281 else
6282 {
6283 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6284 }
6285 return gen_rtx_PARALLEL (mode,
6286 gen_rtvec (2,
6287 gen_rtx_EXPR_LIST (VOIDmode,
6288 slot, const0_rtx),
6289 gen_rtx_EXPR_LIST (VOIDmode,
6290 gen_rtx_REG (mode, cum->vregno),
6291 const0_rtx)));
c72d6c26
HP
6292 }
6293 else
6294 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6295 else if (TARGET_ALTIVEC_ABI
6296 && (ALTIVEC_VECTOR_MODE (mode)
6297 || (type && TREE_CODE (type) == VECTOR_TYPE
6298 && int_size_in_bytes (type) == 16)))
0ac081f6 6299 {
2858f73a 6300 if (named || abi == ABI_V4)
a594a19c 6301 return NULL_RTX;
0ac081f6 6302 else
a594a19c
GK
6303 {
6304 /* Vector parameters to varargs functions under AIX or Darwin
6305 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6306 int align, align_words, n_words;
6307 enum machine_mode part_mode;
a594a19c
GK
6308
6309 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6310 2 mod 4 in terms of words in 32-bit mode, since the parameter
6311 save area starts at offset 24 from the stack. In 64-bit mode,
6312 they just have to start on an even word, since the parameter
6313 save area is 16-byte aligned. */
6314 if (TARGET_32BIT)
4ed78545 6315 align = (2 - cum->words) & 3;
2858f73a
GK
6316 else
6317 align = cum->words & 1;
a594a19c
GK
6318 align_words = cum->words + align;
6319
6320 /* Out of registers? Memory, then. */
6321 if (align_words >= GP_ARG_NUM_REG)
6322 return NULL_RTX;
ec6376ab
AM
6323
6324 if (TARGET_32BIT && TARGET_POWERPC64)
6325 return rs6000_mixed_function_arg (mode, type, align_words);
6326
2858f73a
GK
6327 /* The vector value goes in GPRs. Only the part of the
6328 value in GPRs is reported here. */
ec6376ab
AM
6329 part_mode = mode;
6330 n_words = rs6000_arg_size (mode, type);
6331 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6332 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6333 is either wholly in GPRs or half in GPRs and half not. */
6334 part_mode = DImode;
ec6376ab
AM
6335
6336 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6337 }
0ac081f6 6338 }
f82f556d
AH
6339 else if (TARGET_SPE_ABI && TARGET_SPE
6340 && (SPE_VECTOR_MODE (mode)
18f63bfa 6341 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6342 || mode == DCmode
6343 || mode == TFmode
6344 || mode == TCmode))))
a6c9bed4 6345 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6346
f607bc57 6347 else if (abi == ABI_V4)
4697a36c 6348 {
a3170dc6 6349 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
6350 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6351 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
cf8e1455
DE
6352 || (mode == TFmode && !TARGET_IEEEQUAD)
6353 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6354 {
2d83f070
JJ
6355 /* _Decimal128 must use an even/odd register pair. This assumes
6356 that the register number is odd when fregno is odd. */
6357 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6358 cum->fregno++;
6359
6360 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6361 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6362 return gen_rtx_REG (mode, cum->fregno);
6363 else
b78d48dd 6364 return NULL_RTX;
4cc833b7
RH
6365 }
6366 else
6367 {
b2d04ecf 6368 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6369 int gregno = cum->sysv_gregno;
6370
4ed78545
AM
6371 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6372 (r7,r8) or (r9,r10). As does any other 2 word item such
6373 as complex int due to a historical mistake. */
6374 if (n_words == 2)
6375 gregno += (1 - gregno) & 1;
4cc833b7 6376
4ed78545 6377 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6378 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6379 return NULL_RTX;
ec6376ab
AM
6380
6381 if (TARGET_32BIT && TARGET_POWERPC64)
6382 return rs6000_mixed_function_arg (mode, type,
6383 gregno - GP_ARG_MIN_REG);
6384 return gen_rtx_REG (mode, gregno);
4cc833b7 6385 }
4697a36c 6386 }
4cc833b7
RH
6387 else
6388 {
294bd182 6389 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6390
2d83f070
JJ
6391 /* _Decimal128 must be passed in an even/odd float register pair.
6392 This assumes that the register number is odd when fregno is odd. */
6393 if (mode == TDmode && (cum->fregno % 2) == 1)
6394 cum->fregno++;
6395
2858f73a 6396 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6397 {
ec6376ab
AM
6398 rtx rvec[GP_ARG_NUM_REG + 1];
6399 rtx r;
6400 int k;
c53bdcf5
AM
6401 bool needs_psave;
6402 enum machine_mode fmode = mode;
c53bdcf5
AM
6403 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6404
6405 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6406 {
c53bdcf5
AM
6407 /* Currently, we only ever need one reg here because complex
6408 doubles are split. */
7393f7f8
BE
6409 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6410 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6411
7393f7f8
BE
6412 /* Long double or _Decimal128 split over regs and memory. */
6413 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6414 }
c53bdcf5
AM
6415
6416 /* Do we also need to pass this arg in the parameter save
6417 area? */
6418 needs_psave = (type
6419 && (cum->nargs_prototype <= 0
6420 || (DEFAULT_ABI == ABI_AIX
de17c25f 6421 && TARGET_XL_COMPAT
c53bdcf5
AM
6422 && align_words >= GP_ARG_NUM_REG)));
6423
6424 if (!needs_psave && mode == fmode)
ec6376ab 6425 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6426
ec6376ab 6427 k = 0;
c53bdcf5
AM
6428 if (needs_psave)
6429 {
ec6376ab 6430 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6431 This piece must come first, before the fprs. */
c53bdcf5
AM
6432 if (align_words < GP_ARG_NUM_REG)
6433 {
6434 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6435
6436 if (align_words + n_words > GP_ARG_NUM_REG
6437 || (TARGET_32BIT && TARGET_POWERPC64))
6438 {
6439 /* If this is partially on the stack, then we only
6440 include the portion actually in registers here. */
6441 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6442 rtx off;
79773478
AM
6443 int i = 0;
6444 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6445 /* Not all of the arg fits in gprs. Say that it
6446 goes in memory too, using a magic NULL_RTX
6447 component. Also see comment in
6448 rs6000_mixed_function_arg for why the normal
6449 function_arg_partial_nregs scheme doesn't work
6450 in this case. */
6451 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6452 const0_rtx);
ec6376ab
AM
6453 do
6454 {
6455 r = gen_rtx_REG (rmode,
6456 GP_ARG_MIN_REG + align_words);
2e6c9641 6457 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6458 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6459 }
6460 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6461 }
6462 else
6463 {
6464 /* The whole arg fits in gprs. */
6465 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6466 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6467 }
c53bdcf5 6468 }
ec6376ab
AM
6469 else
6470 /* It's entirely in memory. */
6471 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6472 }
6473
ec6376ab
AM
6474 /* Describe where this piece goes in the fprs. */
6475 r = gen_rtx_REG (fmode, cum->fregno);
6476 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6477
6478 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6479 }
6480 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6481 {
ec6376ab
AM
6482 if (TARGET_32BIT && TARGET_POWERPC64)
6483 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6484
4eeca74f
AM
6485 if (mode == BLKmode)
6486 mode = Pmode;
6487
b2d04ecf
AM
6488 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6489 }
4cc833b7
RH
6490 else
6491 return NULL_RTX;
4697a36c 6492 }
4697a36c
MM
6493}
6494\f
ec6376ab 6495/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6496 the number of bytes passed in registers. For args passed entirely in
6497 registers or entirely in memory, zero. When an arg is described by a
6498 PARALLEL, perhaps using more than one register type, this function
6499 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6500
78a52f11
RH
6501static int
6502rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6503 tree type, bool named)
4697a36c 6504{
c53bdcf5 6505 int ret = 0;
ec6376ab 6506 int align_words;
c53bdcf5 6507
f607bc57 6508 if (DEFAULT_ABI == ABI_V4)
4697a36c 6509 return 0;
4697a36c 6510
c53bdcf5
AM
6511 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6512 && cum->nargs_prototype >= 0)
6513 return 0;
6514
0b5383eb
DJ
6515 /* In this complicated case we just disable the partial_nregs code. */
6516 if (rs6000_darwin64_abi && mode == BLKmode
6517 && TREE_CODE (type) == RECORD_TYPE
6518 && int_size_in_bytes (type) > 0)
6519 return 0;
6520
294bd182 6521 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6522
79773478
AM
6523 if (USE_FP_FOR_ARG_P (cum, mode, type))
6524 {
fb63c729
AM
6525 /* If we are passing this arg in the fixed parameter save area
6526 (gprs or memory) as well as fprs, then this function should
79773478
AM
6527 return the number of partial bytes passed in the parameter
6528 save area rather than partial bytes passed in fprs. */
6529 if (type
6530 && (cum->nargs_prototype <= 0
6531 || (DEFAULT_ABI == ABI_AIX
6532 && TARGET_XL_COMPAT
6533 && align_words >= GP_ARG_NUM_REG)))
6534 return 0;
6535 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6536 > FP_ARG_MAX_REG + 1)
ac7e839c 6537 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6538 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6539 return 0;
6540 }
6541
ec6376ab
AM
6542 if (align_words < GP_ARG_NUM_REG
6543 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6544 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6545
c53bdcf5 6546 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6547 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6548
c53bdcf5 6549 return ret;
4697a36c
MM
6550}
6551\f
6552/* A C expression that indicates when an argument must be passed by
6553 reference. If nonzero for an argument, a copy of that argument is
6554 made in memory and a pointer to the argument is passed instead of
6555 the argument itself. The pointer is passed in whatever way is
6556 appropriate for passing a pointer to that type.
6557
b2d04ecf
AM
6558 Under V.4, aggregates and long double are passed by reference.
6559
6560 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6561 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6562
6563 As an extension to all ABIs, variable sized types are passed by
6564 reference. */
4697a36c 6565
8cd5a4e0 6566static bool
f676971a 6567rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6568 enum machine_mode mode, const_tree type,
bada2eb8 6569 bool named ATTRIBUTE_UNUSED)
4697a36c 6570{
602ea4d3 6571 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6572 {
6573 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6574 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6575 return 1;
6576 }
6577
6578 if (!type)
6579 return 0;
4697a36c 6580
bada2eb8
DE
6581 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6582 {
6583 if (TARGET_DEBUG_ARG)
6584 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6585 return 1;
6586 }
6587
6588 if (int_size_in_bytes (type) < 0)
6589 {
6590 if (TARGET_DEBUG_ARG)
6591 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6592 return 1;
6593 }
6594
6595 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6596 modes only exist for GCC vector types if -maltivec. */
6597 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6598 {
6599 if (TARGET_DEBUG_ARG)
6600 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6601 return 1;
6602 }
b693336b
PB
6603
6604 /* Pass synthetic vectors in memory. */
bada2eb8 6605 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6606 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6607 {
6608 static bool warned_for_pass_big_vectors = false;
6609 if (TARGET_DEBUG_ARG)
6610 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6611 if (!warned_for_pass_big_vectors)
6612 {
d4ee4d25 6613 warning (0, "GCC vector passed by reference: "
b693336b
PB
6614 "non-standard ABI extension with no compatibility guarantee");
6615 warned_for_pass_big_vectors = true;
6616 }
6617 return 1;
6618 }
6619
b2d04ecf 6620 return 0;
4697a36c 6621}
5985c7a6
FJ
6622
6623static void
2d9db8eb 6624rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6625{
6626 int i;
6627 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6628
6629 if (nregs == 0)
6630 return;
6631
c4ad648e 6632 for (i = 0; i < nregs; i++)
5985c7a6 6633 {
9390387d 6634 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6635 if (reload_completed)
c4ad648e
AM
6636 {
6637 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6638 tem = NULL_RTX;
6639 else
6640 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6641 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6642 }
5985c7a6
FJ
6643 else
6644 tem = replace_equiv_address (tem, XEXP (tem, 0));
6645
37409796 6646 gcc_assert (tem);
5985c7a6
FJ
6647
6648 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6649 }
6650}
4697a36c
MM
6651\f
6652/* Perform any needed actions needed for a function that is receiving a
f676971a 6653 variable number of arguments.
4697a36c
MM
6654
6655 CUM is as above.
6656
6657 MODE and TYPE are the mode and type of the current parameter.
6658
6659 PRETEND_SIZE is a variable that should be set to the amount of stack
6660 that must be pushed by the prolog to pretend that our caller pushed
6661 it.
6662
6663 Normally, this macro will push all remaining incoming registers on the
6664 stack and set PRETEND_SIZE to the length of the registers pushed. */
6665
c6e8c921 6666static void
f676971a 6667setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6668 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6669 int no_rtl)
4697a36c 6670{
4cc833b7
RH
6671 CUMULATIVE_ARGS next_cum;
6672 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6673 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6674 int first_reg_offset;
6675 alias_set_type set;
4697a36c 6676
f31bf321 6677 /* Skip the last named argument. */
d34c5b80 6678 next_cum = *cum;
594a51fe 6679 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6680
f607bc57 6681 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6682 {
5b667039
JJ
6683 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6684
60e2d0ca 6685 if (! no_rtl)
5b667039
JJ
6686 {
6687 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6688 HOST_WIDE_INT offset = 0;
6689
6690 /* Try to optimize the size of the varargs save area.
6691 The ABI requires that ap.reg_save_area is doubleword
6692 aligned, but we don't need to allocate space for all
6693 the bytes, only those to which we actually will save
6694 anything. */
6695 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6696 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6697 if (TARGET_HARD_FLOAT && TARGET_FPRS
6698 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6699 && cfun->va_list_fpr_size)
6700 {
6701 if (gpr_reg_num)
6702 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6703 * UNITS_PER_FP_WORD;
6704 if (cfun->va_list_fpr_size
6705 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6706 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6707 else
6708 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6709 * UNITS_PER_FP_WORD;
6710 }
6711 if (gpr_reg_num)
6712 {
6713 offset = -((first_reg_offset * reg_size) & ~7);
6714 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6715 {
6716 gpr_reg_num = cfun->va_list_gpr_size;
6717 if (reg_size == 4 && (first_reg_offset & 1))
6718 gpr_reg_num++;
6719 }
6720 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6721 }
6722 else if (fpr_size)
6723 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6724 * UNITS_PER_FP_WORD
6725 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6726
5b667039
JJ
6727 if (gpr_size + fpr_size)
6728 {
6729 rtx reg_save_area
6730 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6731 gcc_assert (GET_CODE (reg_save_area) == MEM);
6732 reg_save_area = XEXP (reg_save_area, 0);
6733 if (GET_CODE (reg_save_area) == PLUS)
6734 {
6735 gcc_assert (XEXP (reg_save_area, 0)
6736 == virtual_stack_vars_rtx);
6737 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6738 offset += INTVAL (XEXP (reg_save_area, 1));
6739 }
6740 else
6741 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6742 }
6743
6744 cfun->machine->varargs_save_offset = offset;
6745 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6746 }
4697a36c 6747 }
60e2d0ca 6748 else
4697a36c 6749 {
d34c5b80 6750 first_reg_offset = next_cum.words;
4cc833b7 6751 save_area = virtual_incoming_args_rtx;
4697a36c 6752
fe984136 6753 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6754 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6755 }
4697a36c 6756
dfafc897 6757 set = get_varargs_alias_set ();
9d30f3c1
JJ
6758 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6759 && cfun->va_list_gpr_size)
4cc833b7 6760 {
9d30f3c1
JJ
6761 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6762
6763 if (va_list_gpr_counter_field)
6764 {
6765 /* V4 va_list_gpr_size counts number of registers needed. */
6766 if (nregs > cfun->va_list_gpr_size)
6767 nregs = cfun->va_list_gpr_size;
6768 }
6769 else
6770 {
6771 /* char * va_list instead counts number of bytes needed. */
6772 if (nregs > cfun->va_list_gpr_size / reg_size)
6773 nregs = cfun->va_list_gpr_size / reg_size;
6774 }
6775
dfafc897 6776 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6777 plus_constant (save_area,
13e2e16e
DE
6778 first_reg_offset * reg_size));
6779 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6780 set_mem_alias_set (mem, set);
8ac61af7 6781 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6782
f676971a 6783 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6784 nregs);
4697a36c
MM
6785 }
6786
4697a36c 6787 /* Save FP registers if needed. */
f607bc57 6788 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6789 && TARGET_HARD_FLOAT && TARGET_FPRS
6790 && ! no_rtl
9d30f3c1
JJ
6791 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6792 && cfun->va_list_fpr_size)
4697a36c 6793 {
9d30f3c1 6794 int fregno = next_cum.fregno, nregs;
9ebbca7d 6795 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6796 rtx lab = gen_label_rtx ();
5b667039
JJ
6797 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6798 * UNITS_PER_FP_WORD);
4697a36c 6799
c4ad648e
AM
6800 emit_jump_insn
6801 (gen_rtx_SET (VOIDmode,
6802 pc_rtx,
6803 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6804 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6805 const0_rtx),
39403d82 6806 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6807 pc_rtx)));
6808
9d30f3c1
JJ
6809 for (nregs = 0;
6810 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6811 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6812 {
696e45ba
ME
6813 mem = gen_rtx_MEM ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6814 ? DFmode : SFmode,
6815 plus_constant (save_area, off));
6816 MEM_NOTRAP_P (mem) = 1;
6817 set_mem_alias_set (mem, set);
6818 set_mem_align (mem, GET_MODE_ALIGNMENT (
6819 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6820 ? DFmode : SFmode));
6821 emit_move_insn (mem, gen_rtx_REG (
6822 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6823 ? DFmode : SFmode, fregno));
4697a36c 6824 }
4cc833b7
RH
6825
6826 emit_label (lab);
4697a36c 6827 }
4697a36c 6828}
4697a36c 6829
dfafc897 6830/* Create the va_list data type. */
2c4974b7 6831
c35d187f
RH
6832static tree
6833rs6000_build_builtin_va_list (void)
dfafc897 6834{
64c2816f 6835 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6836
9ebbca7d
GK
6837 /* For AIX, prefer 'char *' because that's what the system
6838 header files like. */
f607bc57 6839 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6840 return build_pointer_type (char_type_node);
dfafc897 6841
f1e639b1 6842 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6843 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6844
f676971a 6845 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6846 unsigned_char_type_node);
f676971a 6847 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6848 unsigned_char_type_node);
64c2816f
DT
6849 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6850 every user file. */
6851 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6852 short_unsigned_type_node);
dfafc897
FS
6853 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6854 ptr_type_node);
6855 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6856 ptr_type_node);
6857
9d30f3c1
JJ
6858 va_list_gpr_counter_field = f_gpr;
6859 va_list_fpr_counter_field = f_fpr;
6860
dfafc897
FS
6861 DECL_FIELD_CONTEXT (f_gpr) = record;
6862 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6863 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6864 DECL_FIELD_CONTEXT (f_ovf) = record;
6865 DECL_FIELD_CONTEXT (f_sav) = record;
6866
bab45a51
FS
6867 TREE_CHAIN (record) = type_decl;
6868 TYPE_NAME (record) = type_decl;
dfafc897
FS
6869 TYPE_FIELDS (record) = f_gpr;
6870 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6871 TREE_CHAIN (f_fpr) = f_res;
6872 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6873 TREE_CHAIN (f_ovf) = f_sav;
6874
6875 layout_type (record);
6876
6877 /* The correct type is an array type of one element. */
6878 return build_array_type (record, build_index_type (size_zero_node));
6879}
6880
6881/* Implement va_start. */
6882
d7bd8aeb 6883static void
a2369ed3 6884rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6885{
dfafc897 6886 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6887 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6888 tree gpr, fpr, ovf, sav, t;
2c4974b7 6889
dfafc897 6890 /* Only SVR4 needs something special. */
f607bc57 6891 if (DEFAULT_ABI != ABI_V4)
dfafc897 6892 {
e5faf155 6893 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6894 return;
6895 }
6896
973a648b 6897 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6898 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6899 f_res = TREE_CHAIN (f_fpr);
6900 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6901 f_sav = TREE_CHAIN (f_ovf);
6902
872a65b5 6903 valist = build_va_arg_indirect_ref (valist);
47a25a46 6904 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6905 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6906 f_fpr, NULL_TREE);
6907 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6908 f_ovf, NULL_TREE);
6909 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6910 f_sav, NULL_TREE);
dfafc897
FS
6911
6912 /* Count number of gp and fp argument registers used. */
38173d38
JH
6913 words = crtl->args.info.words;
6914 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6915 GP_ARG_NUM_REG);
38173d38 6916 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6917 FP_ARG_NUM_REG);
dfafc897
FS
6918
6919 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6920 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6921 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6922 words, n_gpr, n_fpr);
dfafc897 6923
9d30f3c1
JJ
6924 if (cfun->va_list_gpr_size)
6925 {
726a989a 6926 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
47a25a46 6927 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6928 TREE_SIDE_EFFECTS (t) = 1;
6929 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6930 }
58c8adc1 6931
9d30f3c1
JJ
6932 if (cfun->va_list_fpr_size)
6933 {
726a989a 6934 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 6935 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6936 TREE_SIDE_EFFECTS (t) = 1;
6937 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6938 }
dfafc897
FS
6939
6940 /* Find the overflow area. */
6941 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6942 if (words != 0)
5be014d5
AP
6943 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6944 size_int (words * UNITS_PER_WORD));
726a989a 6945 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6946 TREE_SIDE_EFFECTS (t) = 1;
6947 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6948
9d30f3c1
JJ
6949 /* If there were no va_arg invocations, don't set up the register
6950 save area. */
6951 if (!cfun->va_list_gpr_size
6952 && !cfun->va_list_fpr_size
6953 && n_gpr < GP_ARG_NUM_REG
6954 && n_fpr < FP_ARG_V4_MAX_REG)
6955 return;
6956
dfafc897
FS
6957 /* Find the register save area. */
6958 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6959 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6960 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6961 size_int (cfun->machine->varargs_save_offset));
726a989a 6962 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
6963 TREE_SIDE_EFFECTS (t) = 1;
6964 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6965}
6966
6967/* Implement va_arg. */
6968
23a60a04 6969tree
726a989a
RB
6970rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6971 gimple_seq *post_p)
cd3ce9b4 6972{
cd3ce9b4
JM
6973 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6974 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6975 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6976 tree lab_false, lab_over, addr;
6977 int align;
6978 tree ptrtype = build_pointer_type (type);
7393f7f8 6979 int regalign = 0;
726a989a 6980 gimple stmt;
cd3ce9b4 6981
08b0dc1b
RH
6982 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6983 {
6984 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6985 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6986 }
6987
cd3ce9b4
JM
6988 if (DEFAULT_ABI != ABI_V4)
6989 {
08b0dc1b 6990 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6991 {
6992 tree elem_type = TREE_TYPE (type);
6993 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6994 int elem_size = GET_MODE_SIZE (elem_mode);
6995
6996 if (elem_size < UNITS_PER_WORD)
6997 {
23a60a04 6998 tree real_part, imag_part;
726a989a 6999 gimple_seq post = NULL;
cd3ce9b4 7000
23a60a04
JM
7001 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
7002 &post);
7003 /* Copy the value into a temporary, lest the formal temporary
7004 be reused out from under us. */
7005 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
726a989a 7006 gimple_seq_add_seq (pre_p, post);
cd3ce9b4 7007
23a60a04
JM
7008 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
7009 post_p);
cd3ce9b4 7010
47a25a46 7011 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
7012 }
7013 }
7014
23a60a04 7015 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
7016 }
7017
7018 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7019 f_fpr = TREE_CHAIN (f_gpr);
7020 f_res = TREE_CHAIN (f_fpr);
7021 f_ovf = TREE_CHAIN (f_res);
7022 f_sav = TREE_CHAIN (f_ovf);
7023
872a65b5 7024 valist = build_va_arg_indirect_ref (valist);
47a25a46 7025 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
7026 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
7027 f_fpr, NULL_TREE);
7028 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
7029 f_ovf, NULL_TREE);
7030 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
7031 f_sav, NULL_TREE);
cd3ce9b4
JM
7032
7033 size = int_size_in_bytes (type);
7034 rsize = (size + 3) / 4;
7035 align = 1;
7036
08b0dc1b 7037 if (TARGET_HARD_FLOAT && TARGET_FPRS
696e45ba
ME
7038 && ((TARGET_SINGLE_FLOAT && TYPE_MODE (type) == SFmode)
7039 || (TARGET_DOUBLE_FLOAT
7040 && (TYPE_MODE (type) == DFmode
7041 || TYPE_MODE (type) == TFmode
7042 || TYPE_MODE (type) == SDmode
7043 || TYPE_MODE (type) == DDmode
7044 || TYPE_MODE (type) == TDmode))))
cd3ce9b4
JM
7045 {
7046 /* FP args go in FP registers, if present. */
cd3ce9b4 7047 reg = fpr;
602ea4d3 7048 n_reg = (size + 7) / 8;
696e45ba
ME
7049 sav_ofs = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4) * 4;
7050 sav_scale = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4);
e41b2a33 7051 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
7052 align = 8;
7053 }
7054 else
7055 {
7056 /* Otherwise into GP registers. */
cd3ce9b4
JM
7057 reg = gpr;
7058 n_reg = rsize;
7059 sav_ofs = 0;
7060 sav_scale = 4;
7061 if (n_reg == 2)
7062 align = 8;
7063 }
7064
7065 /* Pull the value out of the saved registers.... */
7066
7067 lab_over = NULL;
7068 addr = create_tmp_var (ptr_type_node, "addr");
7069 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7070
7071 /* AltiVec vectors never go in registers when -mabi=altivec. */
7072 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
7073 align = 16;
7074 else
7075 {
7076 lab_false = create_artificial_label ();
7077 lab_over = create_artificial_label ();
7078
7079 /* Long long and SPE vectors are aligned in the registers.
7080 As are any other 2 gpr item such as complex int due to a
7081 historical mistake. */
7082 u = reg;
602ea4d3 7083 if (n_reg == 2 && reg == gpr)
cd3ce9b4 7084 {
7393f7f8 7085 regalign = 1;
726a989a 7086 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7087 build_int_cst (TREE_TYPE (reg), n_reg - 1));
726a989a
RB
7088 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
7089 unshare_expr (reg), u);
cd3ce9b4 7090 }
7393f7f8
BE
7091 /* _Decimal128 is passed in even/odd fpr pairs; the stored
7092 reg number is 0 for f1, so we want to make it odd. */
7093 else if (reg == fpr && TYPE_MODE (type) == TDmode)
7094 {
726a989a 7095 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
383e91e4 7096 build_int_cst (TREE_TYPE (reg), 1));
726a989a 7097 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7393f7f8 7098 }
cd3ce9b4 7099
95674810 7100 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
7101 t = build2 (GE_EXPR, boolean_type_node, u, t);
7102 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7103 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7104 gimplify_and_add (t, pre_p);
7105
7106 t = sav;
7107 if (sav_ofs)
5be014d5 7108 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 7109
726a989a 7110 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7111 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
7112 u = fold_convert (sizetype, u);
7113 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7114 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 7115
e41b2a33
PB
7116 /* _Decimal32 varargs are located in the second word of the 64-bit
7117 FP register for 32-bit binaries. */
4f011e1e
JM
7118 if (!TARGET_POWERPC64
7119 && TARGET_HARD_FLOAT && TARGET_FPRS
7120 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
7121 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7122
726a989a 7123 gimplify_assign (addr, t, pre_p);
cd3ce9b4 7124
726a989a 7125 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
cd3ce9b4 7126
726a989a
RB
7127 stmt = gimple_build_label (lab_false);
7128 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4 7129
7393f7f8 7130 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
7131 {
7132 /* Ensure that we don't find any more args in regs.
7393f7f8 7133 Alignment has taken care of for special cases. */
726a989a 7134 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
cd3ce9b4
JM
7135 }
7136 }
7137
7138 /* ... otherwise out of the overflow area. */
7139
7140 /* Care for on-stack alignment if needed. */
7141 t = ovf;
7142 if (align != 1)
7143 {
5be014d5
AP
7144 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7145 t = fold_convert (sizetype, t);
4a90aeeb 7146 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
7147 size_int (-align));
7148 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
7149 }
7150 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7151
726a989a 7152 gimplify_assign (unshare_expr (addr), t, pre_p);
cd3ce9b4 7153
5be014d5 7154 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
726a989a 7155 gimplify_assign (unshare_expr (ovf), t, pre_p);
cd3ce9b4
JM
7156
7157 if (lab_over)
7158 {
726a989a
RB
7159 stmt = gimple_build_label (lab_over);
7160 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4
JM
7161 }
7162
0cfbc62b
JM
7163 if (STRICT_ALIGNMENT
7164 && (TYPE_ALIGN (type)
7165 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7166 {
7167 /* The value (of type complex double, for example) may not be
7168 aligned in memory in the saved registers, so copy via a
7169 temporary. (This is the same code as used for SPARC.) */
7170 tree tmp = create_tmp_var (type, "va_arg_tmp");
7171 tree dest_addr = build_fold_addr_expr (tmp);
7172
5039610b
SL
7173 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7174 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
7175
7176 gimplify_and_add (copy, pre_p);
7177 addr = dest_addr;
7178 }
7179
08b0dc1b 7180 addr = fold_convert (ptrtype, addr);
872a65b5 7181 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
7182}
7183
0ac081f6
AH
7184/* Builtins. */
7185
58646b77
PB
7186static void
7187def_builtin (int mask, const char *name, tree type, int code)
7188{
96038623 7189 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
7190 {
7191 if (rs6000_builtin_decls[code])
7192 abort ();
7193
7194 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7195 add_builtin_function (name, type, code, BUILT_IN_MD,
7196 NULL, NULL_TREE);
58646b77
PB
7197 }
7198}
0ac081f6 7199
24408032
AH
7200/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7201
2212663f 7202static const struct builtin_description bdesc_3arg[] =
24408032
AH
7203{
7204 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7205 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7206 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7207 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7208 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7209 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7210 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7211 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7212 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7213 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7214 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7215 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7216 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7217 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7218 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7219 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7220 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7221 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7222 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7223 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7224 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7225 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7226 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7227
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7239 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7240 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7241 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7242 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7243
7244 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7245 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7246 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7247 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7248 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7249 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7250 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7251 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7252 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7253};
2212663f 7254
95385cbb
AH
7255/* DST operations: void foo (void *, const int, const char). */
7256
7257static const struct builtin_description bdesc_dst[] =
7258{
7259 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7260 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7261 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7262 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7263
7264 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7265 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7266 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7267 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7268};
7269
2212663f 7270/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7271
a3170dc6 7272static struct builtin_description bdesc_2arg[] =
0ac081f6 7273{
f18c054f
DB
7274 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7275 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7276 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7277 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7278 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7279 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7280 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7281 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7282 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7283 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7284 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7285 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7286 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7287 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7288 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7289 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7290 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7291 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7292 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7293 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7294 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7295 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7296 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7297 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7298 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7299 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7300 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7301 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7302 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7303 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7304 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7305 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7306 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7307 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7308 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7309 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7310 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7311 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7312 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7313 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7314 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7315 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7316 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7317 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7318 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7319 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7320 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7321 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7322 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7323 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7324 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7325 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7326 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7327 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7328 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7329 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7330 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7331 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7332 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7333 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7334 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7335 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7336 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7337 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7338 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7339 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7340 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7341 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7342 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7343 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7344 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7345 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7346 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7347 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7348 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7349 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7350 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7351 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7352 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7353 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7354 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7355 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7356 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7357 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7358 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7359 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7360 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7361 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7362 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7363 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7364 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7365 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7366 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7367 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7368 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7369 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7370 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7371 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7372 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7373 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7374 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7375 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7376 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7377 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7378 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7379 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7380 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7381 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7382 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7383 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7384 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7385
58646b77
PB
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7411 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7412 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7413 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7414 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7415 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7416 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7417 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7418 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7419 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7420 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7421 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7422 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7423 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7424 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7425 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7426 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7427 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7428 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7429 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7430 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7431 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7432 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7433 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7434 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7435 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7436 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7437 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7441 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7442 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7449 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7450 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7451 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7452 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7453 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7454 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7455 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7456 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7457 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7458 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7459 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7479 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7480 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7481 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7482 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7483 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7484 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7485 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7486 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7487 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7488 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7489 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7490 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7491 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7492 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7493 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7494 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7495 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7496 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7497 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7498 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7499 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7500 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7501 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7502 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7503 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7504 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7505 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7506 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7507 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7508 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7509 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7510 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7511 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7512 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7513
96038623
DE
7514 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7515 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7516 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7517 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7518 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7519 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7520 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7521 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7522 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7523 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7524
a3170dc6
AH
7525 /* Place holder, leave as first spe builtin. */
7526 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7527 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7528 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7529 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7530 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7531 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7532 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7533 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7534 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7535 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7536 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7537 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7538 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7539 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7540 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7541 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7542 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7543 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7544 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7545 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7546 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7547 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7548 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7549 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7550 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7551 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7552 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7553 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7554 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7555 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7556 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7557 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7558 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7559 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7560 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7561 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7562 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7563 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7564 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7565 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7566 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7567 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7568 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7569 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7570 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7571 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7572 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7573 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7574 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7575 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7576 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7577 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7578 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7579 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7580 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7581 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7582 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7583 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7584 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7585 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7586 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7587 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7588 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7589 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7590 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7591 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7592 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7593 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7594 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7595 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7596 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7597 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7598 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7599 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7600 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7601 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7602 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7603 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7604 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7605 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7606 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7607 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7608 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7609 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7610 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7611 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7612 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7613 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7614 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7615 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7616 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7617 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7618 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7619 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7620 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7621 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7622 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7623 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7624 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7625 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7626 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7627 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7628 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7629 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7630 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7631 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7632 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7633 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7634 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7635
7636 /* SPE binary operations expecting a 5-bit unsigned literal. */
7637 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7638
7639 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7640 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7641 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7642 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7643 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7644 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7645 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7646 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7647 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7648 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7649 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7650 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7651 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7652 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7653 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7654 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7655 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7656 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7657 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7658 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7659 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7660 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7661 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7662 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7663 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7664 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7665
7666 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7667 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7668};
7669
7670/* AltiVec predicates. */
7671
7672struct builtin_description_predicates
7673{
7674 const unsigned int mask;
7675 const enum insn_code icode;
7676 const char *opcode;
7677 const char *const name;
7678 const enum rs6000_builtins code;
7679};
7680
7681static const struct builtin_description_predicates bdesc_altivec_preds[] =
7682{
7683 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7684 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7685 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7686 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7687 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7688 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7689 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7690 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7691 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7692 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7693 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7694 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7695 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7696
7697 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7698 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7699 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7700};
24408032 7701
a3170dc6
AH
7702/* SPE predicates. */
7703static struct builtin_description bdesc_spe_predicates[] =
7704{
7705 /* Place-holder. Leave as first. */
7706 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7707 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7708 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7709 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7710 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7711 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7712 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7713 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7714 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7715 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7716 /* Place-holder. Leave as last. */
7717 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7718};
7719
7720/* SPE evsel predicates. */
7721static struct builtin_description bdesc_spe_evsel[] =
7722{
7723 /* Place-holder. Leave as first. */
7724 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7725 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7726 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7727 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7728 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7729 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7730 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7731 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7732 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7733 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7734 /* Place-holder. Leave as last. */
7735 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7736};
7737
96038623
DE
7738/* PAIRED predicates. */
7739static const struct builtin_description bdesc_paired_preds[] =
7740{
7741 /* Place-holder. Leave as first. */
7742 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7743 /* Place-holder. Leave as last. */
7744 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7745};
7746
b6d08ca1 7747/* ABS* operations. */
100c4561
AH
7748
7749static const struct builtin_description bdesc_abs[] =
7750{
7751 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7752 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7753 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7754 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7755 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7756 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7757 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7758};
7759
617e0e1d
DB
7760/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7761 foo (VECa). */
24408032 7762
a3170dc6 7763static struct builtin_description bdesc_1arg[] =
2212663f 7764{
617e0e1d
DB
7765 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7766 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7767 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7768 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7769 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7770 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7771 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7772 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7773 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7774 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7775 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7776 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7777 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7778 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7779 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7780 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7781 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7782
58646b77
PB
7783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7798 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7799 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7800 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7801 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7802
a3170dc6
AH
7803 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7804 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7805 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7806 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7807 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7808 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7809 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7810 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7811 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7812 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7813 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7814 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7815 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7816 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7817 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7818 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7819 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7820 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7821 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7822 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7823 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7824 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7825 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7826 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7827 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7828 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7829 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7830 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7831 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7832 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7833
7834 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7835 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7836
7837 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7838 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7839 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7840 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7841 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7842};
7843
7844static rtx
5039610b 7845rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7846{
7847 rtx pat;
5039610b 7848 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7849 rtx op0 = expand_normal (arg0);
2212663f
DB
7850 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7851 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7852
0559cc77
DE
7853 if (icode == CODE_FOR_nothing)
7854 /* Builtin not supported on this processor. */
7855 return 0;
7856
20e26713
AH
7857 /* If we got invalid arguments bail out before generating bad rtl. */
7858 if (arg0 == error_mark_node)
9a171fcd 7859 return const0_rtx;
20e26713 7860
0559cc77
DE
7861 if (icode == CODE_FOR_altivec_vspltisb
7862 || icode == CODE_FOR_altivec_vspltish
7863 || icode == CODE_FOR_altivec_vspltisw
7864 || icode == CODE_FOR_spe_evsplatfi
7865 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7866 {
7867 /* Only allow 5-bit *signed* literals. */
b44140e7 7868 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7869 || INTVAL (op0) > 15
7870 || INTVAL (op0) < -16)
b44140e7
AH
7871 {
7872 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7873 return const0_rtx;
b44140e7 7874 }
b44140e7
AH
7875 }
7876
c62f2db5 7877 if (target == 0
2212663f
DB
7878 || GET_MODE (target) != tmode
7879 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7880 target = gen_reg_rtx (tmode);
7881
7882 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7883 op0 = copy_to_mode_reg (mode0, op0);
7884
7885 pat = GEN_FCN (icode) (target, op0);
7886 if (! pat)
7887 return 0;
7888 emit_insn (pat);
0ac081f6 7889
2212663f
DB
7890 return target;
7891}
ae4b4a02 7892
100c4561 7893static rtx
5039610b 7894altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7895{
7896 rtx pat, scratch1, scratch2;
5039610b 7897 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7898 rtx op0 = expand_normal (arg0);
100c4561
AH
7899 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7900 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7901
7902 /* If we have invalid arguments, bail out before generating bad rtl. */
7903 if (arg0 == error_mark_node)
9a171fcd 7904 return const0_rtx;
100c4561
AH
7905
7906 if (target == 0
7907 || GET_MODE (target) != tmode
7908 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7909 target = gen_reg_rtx (tmode);
7910
7911 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7912 op0 = copy_to_mode_reg (mode0, op0);
7913
7914 scratch1 = gen_reg_rtx (mode0);
7915 scratch2 = gen_reg_rtx (mode0);
7916
7917 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7918 if (! pat)
7919 return 0;
7920 emit_insn (pat);
7921
7922 return target;
7923}
7924
0ac081f6 7925static rtx
5039610b 7926rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7927{
7928 rtx pat;
5039610b
SL
7929 tree arg0 = CALL_EXPR_ARG (exp, 0);
7930 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7931 rtx op0 = expand_normal (arg0);
7932 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7933 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7934 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7935 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7936
0559cc77
DE
7937 if (icode == CODE_FOR_nothing)
7938 /* Builtin not supported on this processor. */
7939 return 0;
7940
20e26713
AH
7941 /* If we got invalid arguments bail out before generating bad rtl. */
7942 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7943 return const0_rtx;
20e26713 7944
0559cc77
DE
7945 if (icode == CODE_FOR_altivec_vcfux
7946 || icode == CODE_FOR_altivec_vcfsx
7947 || icode == CODE_FOR_altivec_vctsxs
7948 || icode == CODE_FOR_altivec_vctuxs
7949 || icode == CODE_FOR_altivec_vspltb
7950 || icode == CODE_FOR_altivec_vsplth
7951 || icode == CODE_FOR_altivec_vspltw
7952 || icode == CODE_FOR_spe_evaddiw
7953 || icode == CODE_FOR_spe_evldd
7954 || icode == CODE_FOR_spe_evldh
7955 || icode == CODE_FOR_spe_evldw
7956 || icode == CODE_FOR_spe_evlhhesplat
7957 || icode == CODE_FOR_spe_evlhhossplat
7958 || icode == CODE_FOR_spe_evlhhousplat
7959 || icode == CODE_FOR_spe_evlwhe
7960 || icode == CODE_FOR_spe_evlwhos
7961 || icode == CODE_FOR_spe_evlwhou
7962 || icode == CODE_FOR_spe_evlwhsplat
7963 || icode == CODE_FOR_spe_evlwwsplat
7964 || icode == CODE_FOR_spe_evrlwi
7965 || icode == CODE_FOR_spe_evslwi
7966 || icode == CODE_FOR_spe_evsrwis
f5119d10 7967 || icode == CODE_FOR_spe_evsubifw
0559cc77 7968 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7969 {
7970 /* Only allow 5-bit unsigned literals. */
8bb418a3 7971 STRIP_NOPS (arg1);
b44140e7
AH
7972 if (TREE_CODE (arg1) != INTEGER_CST
7973 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7974 {
7975 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7976 return const0_rtx;
b44140e7 7977 }
b44140e7
AH
7978 }
7979
c62f2db5 7980 if (target == 0
0ac081f6
AH
7981 || GET_MODE (target) != tmode
7982 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7983 target = gen_reg_rtx (tmode);
7984
7985 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7986 op0 = copy_to_mode_reg (mode0, op0);
7987 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7988 op1 = copy_to_mode_reg (mode1, op1);
7989
7990 pat = GEN_FCN (icode) (target, op0, op1);
7991 if (! pat)
7992 return 0;
7993 emit_insn (pat);
7994
7995 return target;
7996}
6525c0e7 7997
ae4b4a02 7998static rtx
f676971a 7999altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 8000 tree exp, rtx target)
ae4b4a02
AH
8001{
8002 rtx pat, scratch;
5039610b
SL
8003 tree cr6_form = CALL_EXPR_ARG (exp, 0);
8004 tree arg0 = CALL_EXPR_ARG (exp, 1);
8005 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8006 rtx op0 = expand_normal (arg0);
8007 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
8008 enum machine_mode tmode = SImode;
8009 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8010 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8011 int cr6_form_int;
8012
8013 if (TREE_CODE (cr6_form) != INTEGER_CST)
8014 {
8015 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 8016 return const0_rtx;
ae4b4a02
AH
8017 }
8018 else
8019 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
8020
37409796 8021 gcc_assert (mode0 == mode1);
ae4b4a02
AH
8022
8023 /* If we have invalid arguments, bail out before generating bad rtl. */
8024 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 8025 return const0_rtx;
ae4b4a02
AH
8026
8027 if (target == 0
8028 || GET_MODE (target) != tmode
8029 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8030 target = gen_reg_rtx (tmode);
8031
8032 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8033 op0 = copy_to_mode_reg (mode0, op0);
8034 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8035 op1 = copy_to_mode_reg (mode1, op1);
8036
8037 scratch = gen_reg_rtx (mode0);
8038
8039 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 8040 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
8041 if (! pat)
8042 return 0;
8043 emit_insn (pat);
8044
8045 /* The vec_any* and vec_all* predicates use the same opcodes for two
8046 different operations, but the bits in CR6 will be different
8047 depending on what information we want. So we have to play tricks
8048 with CR6 to get the right bits out.
8049
8050 If you think this is disgusting, look at the specs for the
8051 AltiVec predicates. */
8052
c4ad648e
AM
8053 switch (cr6_form_int)
8054 {
8055 case 0:
8056 emit_insn (gen_cr6_test_for_zero (target));
8057 break;
8058 case 1:
8059 emit_insn (gen_cr6_test_for_zero_reverse (target));
8060 break;
8061 case 2:
8062 emit_insn (gen_cr6_test_for_lt (target));
8063 break;
8064 case 3:
8065 emit_insn (gen_cr6_test_for_lt_reverse (target));
8066 break;
8067 default:
8068 error ("argument 1 of __builtin_altivec_predicate is out of range");
8069 break;
8070 }
ae4b4a02
AH
8071
8072 return target;
8073}
8074
96038623
DE
8075static rtx
8076paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
8077{
8078 rtx pat, addr;
8079 tree arg0 = CALL_EXPR_ARG (exp, 0);
8080 tree arg1 = CALL_EXPR_ARG (exp, 1);
8081 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8082 enum machine_mode mode0 = Pmode;
8083 enum machine_mode mode1 = Pmode;
8084 rtx op0 = expand_normal (arg0);
8085 rtx op1 = expand_normal (arg1);
8086
8087 if (icode == CODE_FOR_nothing)
8088 /* Builtin not supported on this processor. */
8089 return 0;
8090
8091 /* If we got invalid arguments bail out before generating bad rtl. */
8092 if (arg0 == error_mark_node || arg1 == error_mark_node)
8093 return const0_rtx;
8094
8095 if (target == 0
8096 || GET_MODE (target) != tmode
8097 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8098 target = gen_reg_rtx (tmode);
8099
8100 op1 = copy_to_mode_reg (mode1, op1);
8101
8102 if (op0 == const0_rtx)
8103 {
8104 addr = gen_rtx_MEM (tmode, op1);
8105 }
8106 else
8107 {
8108 op0 = copy_to_mode_reg (mode0, op0);
8109 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8110 }
8111
8112 pat = GEN_FCN (icode) (target, addr);
8113
8114 if (! pat)
8115 return 0;
8116 emit_insn (pat);
8117
8118 return target;
8119}
8120
b4a62fa0 8121static rtx
0b61703c 8122altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
b4a62fa0
SB
8123{
8124 rtx pat, addr;
5039610b
SL
8125 tree arg0 = CALL_EXPR_ARG (exp, 0);
8126 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
8127 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8128 enum machine_mode mode0 = Pmode;
8129 enum machine_mode mode1 = Pmode;
84217346
MD
8130 rtx op0 = expand_normal (arg0);
8131 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
8132
8133 if (icode == CODE_FOR_nothing)
8134 /* Builtin not supported on this processor. */
8135 return 0;
8136
8137 /* If we got invalid arguments bail out before generating bad rtl. */
8138 if (arg0 == error_mark_node || arg1 == error_mark_node)
8139 return const0_rtx;
8140
8141 if (target == 0
8142 || GET_MODE (target) != tmode
8143 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8144 target = gen_reg_rtx (tmode);
8145
f676971a 8146 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
8147
8148 if (op0 == const0_rtx)
8149 {
0b61703c 8150 addr = gen_rtx_MEM (blk ? BLKmode : tmode, op1);
b4a62fa0
SB
8151 }
8152 else
8153 {
8154 op0 = copy_to_mode_reg (mode0, op0);
0b61703c 8155 addr = gen_rtx_MEM (blk ? BLKmode : tmode, gen_rtx_PLUS (Pmode, op0, op1));
b4a62fa0
SB
8156 }
8157
8158 pat = GEN_FCN (icode) (target, addr);
8159
8160 if (! pat)
8161 return 0;
8162 emit_insn (pat);
8163
8164 return target;
8165}
8166
61bea3b0 8167static rtx
5039610b 8168spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 8169{
5039610b
SL
8170 tree arg0 = CALL_EXPR_ARG (exp, 0);
8171 tree arg1 = CALL_EXPR_ARG (exp, 1);
8172 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8173 rtx op0 = expand_normal (arg0);
8174 rtx op1 = expand_normal (arg1);
8175 rtx op2 = expand_normal (arg2);
61bea3b0
AH
8176 rtx pat;
8177 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8178 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8179 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8180
8181 /* Invalid arguments. Bail before doing anything stoopid! */
8182 if (arg0 == error_mark_node
8183 || arg1 == error_mark_node
8184 || arg2 == error_mark_node)
8185 return const0_rtx;
8186
8187 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8188 op0 = copy_to_mode_reg (mode2, op0);
8189 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8190 op1 = copy_to_mode_reg (mode0, op1);
8191 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8192 op2 = copy_to_mode_reg (mode1, op2);
8193
8194 pat = GEN_FCN (icode) (op1, op2, op0);
8195 if (pat)
8196 emit_insn (pat);
8197 return NULL_RTX;
8198}
8199
96038623
DE
8200static rtx
8201paired_expand_stv_builtin (enum insn_code icode, tree exp)
8202{
8203 tree arg0 = CALL_EXPR_ARG (exp, 0);
8204 tree arg1 = CALL_EXPR_ARG (exp, 1);
8205 tree arg2 = CALL_EXPR_ARG (exp, 2);
8206 rtx op0 = expand_normal (arg0);
8207 rtx op1 = expand_normal (arg1);
8208 rtx op2 = expand_normal (arg2);
8209 rtx pat, addr;
8210 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8211 enum machine_mode mode1 = Pmode;
8212 enum machine_mode mode2 = Pmode;
8213
8214 /* Invalid arguments. Bail before doing anything stoopid! */
8215 if (arg0 == error_mark_node
8216 || arg1 == error_mark_node
8217 || arg2 == error_mark_node)
8218 return const0_rtx;
8219
8220 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8221 op0 = copy_to_mode_reg (tmode, op0);
8222
8223 op2 = copy_to_mode_reg (mode2, op2);
8224
8225 if (op1 == const0_rtx)
8226 {
8227 addr = gen_rtx_MEM (tmode, op2);
8228 }
8229 else
8230 {
8231 op1 = copy_to_mode_reg (mode1, op1);
8232 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8233 }
8234
8235 pat = GEN_FCN (icode) (addr, op0);
8236 if (pat)
8237 emit_insn (pat);
8238 return NULL_RTX;
8239}
8240
6525c0e7 8241static rtx
5039610b 8242altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8243{
5039610b
SL
8244 tree arg0 = CALL_EXPR_ARG (exp, 0);
8245 tree arg1 = CALL_EXPR_ARG (exp, 1);
8246 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8247 rtx op0 = expand_normal (arg0);
8248 rtx op1 = expand_normal (arg1);
8249 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8250 rtx pat, addr;
8251 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8252 enum machine_mode mode1 = Pmode;
8253 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8254
8255 /* Invalid arguments. Bail before doing anything stoopid! */
8256 if (arg0 == error_mark_node
8257 || arg1 == error_mark_node
8258 || arg2 == error_mark_node)
9a171fcd 8259 return const0_rtx;
6525c0e7 8260
b4a62fa0
SB
8261 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8262 op0 = copy_to_mode_reg (tmode, op0);
8263
f676971a 8264 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8265
8266 if (op1 == const0_rtx)
8267 {
8268 addr = gen_rtx_MEM (tmode, op2);
8269 }
8270 else
8271 {
8272 op1 = copy_to_mode_reg (mode1, op1);
8273 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8274 }
6525c0e7 8275
b4a62fa0 8276 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8277 if (pat)
8278 emit_insn (pat);
8279 return NULL_RTX;
8280}
8281
2212663f 8282static rtx
5039610b 8283rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8284{
8285 rtx pat;
5039610b
SL
8286 tree arg0 = CALL_EXPR_ARG (exp, 0);
8287 tree arg1 = CALL_EXPR_ARG (exp, 1);
8288 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8289 rtx op0 = expand_normal (arg0);
8290 rtx op1 = expand_normal (arg1);
8291 rtx op2 = expand_normal (arg2);
2212663f
DB
8292 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8293 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8294 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8295 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8296
774b5662
DE
8297 if (icode == CODE_FOR_nothing)
8298 /* Builtin not supported on this processor. */
8299 return 0;
8300
20e26713
AH
8301 /* If we got invalid arguments bail out before generating bad rtl. */
8302 if (arg0 == error_mark_node
8303 || arg1 == error_mark_node
8304 || arg2 == error_mark_node)
9a171fcd 8305 return const0_rtx;
20e26713 8306
aba5fb01
NS
8307 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8308 || icode == CODE_FOR_altivec_vsldoi_v4si
8309 || icode == CODE_FOR_altivec_vsldoi_v8hi
8310 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8311 {
8312 /* Only allow 4-bit unsigned literals. */
8bb418a3 8313 STRIP_NOPS (arg2);
b44140e7
AH
8314 if (TREE_CODE (arg2) != INTEGER_CST
8315 || TREE_INT_CST_LOW (arg2) & ~0xf)
8316 {
8317 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8318 return const0_rtx;
b44140e7 8319 }
b44140e7
AH
8320 }
8321
c62f2db5 8322 if (target == 0
2212663f
DB
8323 || GET_MODE (target) != tmode
8324 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8325 target = gen_reg_rtx (tmode);
8326
8327 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8328 op0 = copy_to_mode_reg (mode0, op0);
8329 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8330 op1 = copy_to_mode_reg (mode1, op1);
8331 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8332 op2 = copy_to_mode_reg (mode2, op2);
8333
49e39588
RE
8334 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8335 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8336 else
8337 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8338 if (! pat)
8339 return 0;
8340 emit_insn (pat);
8341
8342 return target;
8343}
92898235 8344
3a9b8c7e 8345/* Expand the lvx builtins. */
0ac081f6 8346static rtx
a2369ed3 8347altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8348{
5039610b 8349 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8350 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8351 tree arg0;
8352 enum machine_mode tmode, mode0;
7c3abc73 8353 rtx pat, op0;
3a9b8c7e 8354 enum insn_code icode;
92898235 8355
0ac081f6
AH
8356 switch (fcode)
8357 {
f18c054f 8358 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8359 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8360 break;
f18c054f 8361 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8362 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8363 break;
8364 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8365 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8366 break;
8367 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8368 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8369 break;
8370 default:
8371 *expandedp = false;
8372 return NULL_RTX;
8373 }
0ac081f6 8374
3a9b8c7e 8375 *expandedp = true;
f18c054f 8376
5039610b 8377 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8378 op0 = expand_normal (arg0);
3a9b8c7e
AH
8379 tmode = insn_data[icode].operand[0].mode;
8380 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8381
3a9b8c7e
AH
8382 if (target == 0
8383 || GET_MODE (target) != tmode
8384 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8385 target = gen_reg_rtx (tmode);
24408032 8386
3a9b8c7e
AH
8387 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8388 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8389
3a9b8c7e
AH
8390 pat = GEN_FCN (icode) (target, op0);
8391 if (! pat)
8392 return 0;
8393 emit_insn (pat);
8394 return target;
8395}
f18c054f 8396
3a9b8c7e
AH
8397/* Expand the stvx builtins. */
8398static rtx
f676971a 8399altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8400 bool *expandedp)
3a9b8c7e 8401{
5039610b 8402 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8403 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8404 tree arg0, arg1;
8405 enum machine_mode mode0, mode1;
7c3abc73 8406 rtx pat, op0, op1;
3a9b8c7e 8407 enum insn_code icode;
f18c054f 8408
3a9b8c7e
AH
8409 switch (fcode)
8410 {
8411 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8412 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8413 break;
8414 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8415 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8416 break;
8417 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8418 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8419 break;
8420 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8421 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8422 break;
8423 default:
8424 *expandedp = false;
8425 return NULL_RTX;
8426 }
24408032 8427
5039610b
SL
8428 arg0 = CALL_EXPR_ARG (exp, 0);
8429 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8430 op0 = expand_normal (arg0);
8431 op1 = expand_normal (arg1);
3a9b8c7e
AH
8432 mode0 = insn_data[icode].operand[0].mode;
8433 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8434
3a9b8c7e
AH
8435 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8436 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8437 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8438 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8439
3a9b8c7e
AH
8440 pat = GEN_FCN (icode) (op0, op1);
8441 if (pat)
8442 emit_insn (pat);
f18c054f 8443
3a9b8c7e
AH
8444 *expandedp = true;
8445 return NULL_RTX;
8446}
f18c054f 8447
3a9b8c7e
AH
8448/* Expand the dst builtins. */
8449static rtx
f676971a 8450altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8451 bool *expandedp)
3a9b8c7e 8452{
5039610b 8453 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8454 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8455 tree arg0, arg1, arg2;
8456 enum machine_mode mode0, mode1, mode2;
7c3abc73 8457 rtx pat, op0, op1, op2;
586de218 8458 const struct builtin_description *d;
a3170dc6 8459 size_t i;
f18c054f 8460
3a9b8c7e 8461 *expandedp = false;
f18c054f 8462
3a9b8c7e 8463 /* Handle DST variants. */
586de218 8464 d = bdesc_dst;
3a9b8c7e
AH
8465 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8466 if (d->code == fcode)
8467 {
5039610b
SL
8468 arg0 = CALL_EXPR_ARG (exp, 0);
8469 arg1 = CALL_EXPR_ARG (exp, 1);
8470 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8471 op0 = expand_normal (arg0);
8472 op1 = expand_normal (arg1);
8473 op2 = expand_normal (arg2);
3a9b8c7e
AH
8474 mode0 = insn_data[d->icode].operand[0].mode;
8475 mode1 = insn_data[d->icode].operand[1].mode;
8476 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8477
3a9b8c7e
AH
8478 /* Invalid arguments, bail out before generating bad rtl. */
8479 if (arg0 == error_mark_node
8480 || arg1 == error_mark_node
8481 || arg2 == error_mark_node)
8482 return const0_rtx;
f18c054f 8483
86e7df90 8484 *expandedp = true;
8bb418a3 8485 STRIP_NOPS (arg2);
3a9b8c7e
AH
8486 if (TREE_CODE (arg2) != INTEGER_CST
8487 || TREE_INT_CST_LOW (arg2) & ~0x3)
8488 {
9e637a26 8489 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8490 return const0_rtx;
8491 }
f18c054f 8492
3a9b8c7e 8493 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8494 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8495 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8496 op1 = copy_to_mode_reg (mode1, op1);
24408032 8497
3a9b8c7e
AH
8498 pat = GEN_FCN (d->icode) (op0, op1, op2);
8499 if (pat != 0)
8500 emit_insn (pat);
f18c054f 8501
3a9b8c7e
AH
8502 return NULL_RTX;
8503 }
f18c054f 8504
3a9b8c7e
AH
8505 return NULL_RTX;
8506}
24408032 8507
7a4eca66
DE
8508/* Expand vec_init builtin. */
8509static rtx
5039610b 8510altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8511{
8512 enum machine_mode tmode = TYPE_MODE (type);
8513 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8514 int i, n_elt = GET_MODE_NUNITS (tmode);
8515 rtvec v = rtvec_alloc (n_elt);
8516
8517 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8518 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8519
5039610b 8520 for (i = 0; i < n_elt; ++i)
7a4eca66 8521 {
5039610b 8522 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8523 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8524 }
8525
7a4eca66
DE
8526 if (!target || !register_operand (target, tmode))
8527 target = gen_reg_rtx (tmode);
8528
8529 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8530 return target;
8531}
8532
8533/* Return the integer constant in ARG. Constrain it to be in the range
8534 of the subparts of VEC_TYPE; issue an error if not. */
8535
8536static int
8537get_element_number (tree vec_type, tree arg)
8538{
8539 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8540
8541 if (!host_integerp (arg, 1)
8542 || (elt = tree_low_cst (arg, 1), elt > max))
8543 {
8544 error ("selector must be an integer constant in the range 0..%wi", max);
8545 return 0;
8546 }
8547
8548 return elt;
8549}
8550
8551/* Expand vec_set builtin. */
8552static rtx
5039610b 8553altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8554{
8555 enum machine_mode tmode, mode1;
8556 tree arg0, arg1, arg2;
8557 int elt;
8558 rtx op0, op1;
8559
5039610b
SL
8560 arg0 = CALL_EXPR_ARG (exp, 0);
8561 arg1 = CALL_EXPR_ARG (exp, 1);
8562 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8563
8564 tmode = TYPE_MODE (TREE_TYPE (arg0));
8565 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8566 gcc_assert (VECTOR_MODE_P (tmode));
8567
8568 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8569 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8570 elt = get_element_number (TREE_TYPE (arg0), arg2);
8571
8572 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8573 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8574
8575 op0 = force_reg (tmode, op0);
8576 op1 = force_reg (mode1, op1);
8577
8578 rs6000_expand_vector_set (op0, op1, elt);
8579
8580 return op0;
8581}
8582
8583/* Expand vec_ext builtin. */
8584static rtx
5039610b 8585altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8586{
8587 enum machine_mode tmode, mode0;
8588 tree arg0, arg1;
8589 int elt;
8590 rtx op0;
8591
5039610b
SL
8592 arg0 = CALL_EXPR_ARG (exp, 0);
8593 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8594
84217346 8595 op0 = expand_normal (arg0);
7a4eca66
DE
8596 elt = get_element_number (TREE_TYPE (arg0), arg1);
8597
8598 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8599 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8600 gcc_assert (VECTOR_MODE_P (mode0));
8601
8602 op0 = force_reg (mode0, op0);
8603
8604 if (optimize || !target || !register_operand (target, tmode))
8605 target = gen_reg_rtx (tmode);
8606
8607 rs6000_expand_vector_extract (target, op0, elt);
8608
8609 return target;
8610}
8611
3a9b8c7e
AH
8612/* Expand the builtin in EXP and store the result in TARGET. Store
8613 true in *EXPANDEDP if we found a builtin to expand. */
8614static rtx
a2369ed3 8615altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8616{
586de218
KG
8617 const struct builtin_description *d;
8618 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8619 size_t i;
8620 enum insn_code icode;
5039610b 8621 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8622 tree arg0;
8623 rtx op0, pat;
8624 enum machine_mode tmode, mode0;
3a9b8c7e 8625 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8626
58646b77
PB
8627 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8628 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8629 {
8630 *expandedp = true;
ea40ba9c 8631 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8632 return const0_rtx;
8633 }
8634
3a9b8c7e
AH
8635 target = altivec_expand_ld_builtin (exp, target, expandedp);
8636 if (*expandedp)
8637 return target;
0ac081f6 8638
3a9b8c7e
AH
8639 target = altivec_expand_st_builtin (exp, target, expandedp);
8640 if (*expandedp)
8641 return target;
8642
8643 target = altivec_expand_dst_builtin (exp, target, expandedp);
8644 if (*expandedp)
8645 return target;
8646
8647 *expandedp = true;
95385cbb 8648
3a9b8c7e
AH
8649 switch (fcode)
8650 {
6525c0e7 8651 case ALTIVEC_BUILTIN_STVX:
5039610b 8652 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8653 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8654 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8655 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8656 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8657 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8658 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8659 case ALTIVEC_BUILTIN_STVXL:
5039610b 8660 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8661
0b61703c
AP
8662 case ALTIVEC_BUILTIN_STVLX:
8663 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlx, exp);
8664 case ALTIVEC_BUILTIN_STVLXL:
8665 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
8666 case ALTIVEC_BUILTIN_STVRX:
8667 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
8668 case ALTIVEC_BUILTIN_STVRXL:
8669 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
8670
95385cbb
AH
8671 case ALTIVEC_BUILTIN_MFVSCR:
8672 icode = CODE_FOR_altivec_mfvscr;
8673 tmode = insn_data[icode].operand[0].mode;
8674
8675 if (target == 0
8676 || GET_MODE (target) != tmode
8677 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8678 target = gen_reg_rtx (tmode);
f676971a 8679
95385cbb 8680 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8681 if (! pat)
8682 return 0;
8683 emit_insn (pat);
95385cbb
AH
8684 return target;
8685
8686 case ALTIVEC_BUILTIN_MTVSCR:
8687 icode = CODE_FOR_altivec_mtvscr;
5039610b 8688 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8689 op0 = expand_normal (arg0);
95385cbb
AH
8690 mode0 = insn_data[icode].operand[0].mode;
8691
8692 /* If we got invalid arguments bail out before generating bad rtl. */
8693 if (arg0 == error_mark_node)
9a171fcd 8694 return const0_rtx;
95385cbb
AH
8695
8696 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8697 op0 = copy_to_mode_reg (mode0, op0);
8698
8699 pat = GEN_FCN (icode) (op0);
8700 if (pat)
8701 emit_insn (pat);
8702 return NULL_RTX;
3a9b8c7e 8703
95385cbb
AH
8704 case ALTIVEC_BUILTIN_DSSALL:
8705 emit_insn (gen_altivec_dssall ());
8706 return NULL_RTX;
8707
8708 case ALTIVEC_BUILTIN_DSS:
8709 icode = CODE_FOR_altivec_dss;
5039610b 8710 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8711 STRIP_NOPS (arg0);
84217346 8712 op0 = expand_normal (arg0);
95385cbb
AH
8713 mode0 = insn_data[icode].operand[0].mode;
8714
8715 /* If we got invalid arguments bail out before generating bad rtl. */
8716 if (arg0 == error_mark_node)
9a171fcd 8717 return const0_rtx;
95385cbb 8718
b44140e7
AH
8719 if (TREE_CODE (arg0) != INTEGER_CST
8720 || TREE_INT_CST_LOW (arg0) & ~0x3)
8721 {
8722 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8723 return const0_rtx;
b44140e7
AH
8724 }
8725
95385cbb
AH
8726 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8727 op0 = copy_to_mode_reg (mode0, op0);
8728
8729 emit_insn (gen_altivec_dss (op0));
0ac081f6 8730 return NULL_RTX;
7a4eca66
DE
8731
8732 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8733 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8734 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8735 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8736 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8737
8738 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8739 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8740 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8741 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8742 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8743
8744 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8745 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8746 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8747 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8748 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8749
8750 default:
8751 break;
8752 /* Fall through. */
0ac081f6 8753 }
24408032 8754
100c4561 8755 /* Expand abs* operations. */
586de218 8756 d = bdesc_abs;
ca7558fc 8757 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8758 if (d->code == fcode)
5039610b 8759 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8760
ae4b4a02 8761 /* Expand the AltiVec predicates. */
586de218 8762 dp = bdesc_altivec_preds;
ca7558fc 8763 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8764 if (dp->code == fcode)
c4ad648e 8765 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8766 exp, target);
ae4b4a02 8767
6525c0e7
AH
8768 /* LV* are funky. We initialized them differently. */
8769 switch (fcode)
8770 {
8771 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8772 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
0b61703c 8773 exp, target, false);
6525c0e7 8774 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8775 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
0b61703c 8776 exp, target, false);
6525c0e7 8777 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8778 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
0b61703c 8779 exp, target, false);
6525c0e7 8780 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8781 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
0b61703c 8782 exp, target, false);
6525c0e7 8783 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8784 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
0b61703c 8785 exp, target, false);
6525c0e7 8786 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8787 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
0b61703c 8788 exp, target, false);
6525c0e7 8789 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8790 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
0b61703c
AP
8791 exp, target, false);
8792 case ALTIVEC_BUILTIN_LVLX:
8793 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
8794 exp, target, true);
8795 case ALTIVEC_BUILTIN_LVLXL:
8796 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlxl,
8797 exp, target, true);
8798 case ALTIVEC_BUILTIN_LVRX:
8799 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
8800 exp, target, true);
8801 case ALTIVEC_BUILTIN_LVRXL:
8802 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
8803 exp, target, true);
6525c0e7
AH
8804 default:
8805 break;
8806 /* Fall through. */
8807 }
95385cbb 8808
92898235 8809 *expandedp = false;
0ac081f6
AH
8810 return NULL_RTX;
8811}
8812
96038623
DE
8813/* Expand the builtin in EXP and store the result in TARGET. Store
8814 true in *EXPANDEDP if we found a builtin to expand. */
8815static rtx
8816paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8817{
8818 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8819 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8820 const struct builtin_description *d;
96038623
DE
8821 size_t i;
8822
8823 *expandedp = true;
8824
8825 switch (fcode)
8826 {
8827 case PAIRED_BUILTIN_STX:
8828 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8829 case PAIRED_BUILTIN_LX:
8830 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8831 default:
8832 break;
8833 /* Fall through. */
8834 }
8835
8836 /* Expand the paired predicates. */
23a651fc 8837 d = bdesc_paired_preds;
96038623
DE
8838 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8839 if (d->code == fcode)
8840 return paired_expand_predicate_builtin (d->icode, exp, target);
8841
8842 *expandedp = false;
8843 return NULL_RTX;
8844}
8845
a3170dc6
AH
8846/* Binops that need to be initialized manually, but can be expanded
8847 automagically by rs6000_expand_binop_builtin. */
8848static struct builtin_description bdesc_2arg_spe[] =
8849{
8850 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8851 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8852 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8853 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8854 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8855 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8856 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8857 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8858 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8859 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8860 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8861 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8862 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8863 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8864 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8865 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8866 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8867 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8868 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8869 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8870 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8871 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8872};
8873
8874/* Expand the builtin in EXP and store the result in TARGET. Store
8875 true in *EXPANDEDP if we found a builtin to expand.
8876
8877 This expands the SPE builtins that are not simple unary and binary
8878 operations. */
8879static rtx
a2369ed3 8880spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8881{
5039610b 8882 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8883 tree arg1, arg0;
8884 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8885 enum insn_code icode;
8886 enum machine_mode tmode, mode0;
8887 rtx pat, op0;
8888 struct builtin_description *d;
8889 size_t i;
8890
8891 *expandedp = true;
8892
8893 /* Syntax check for a 5-bit unsigned immediate. */
8894 switch (fcode)
8895 {
8896 case SPE_BUILTIN_EVSTDD:
8897 case SPE_BUILTIN_EVSTDH:
8898 case SPE_BUILTIN_EVSTDW:
8899 case SPE_BUILTIN_EVSTWHE:
8900 case SPE_BUILTIN_EVSTWHO:
8901 case SPE_BUILTIN_EVSTWWE:
8902 case SPE_BUILTIN_EVSTWWO:
5039610b 8903 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8904 if (TREE_CODE (arg1) != INTEGER_CST
8905 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8906 {
8907 error ("argument 2 must be a 5-bit unsigned literal");
8908 return const0_rtx;
8909 }
8910 break;
8911 default:
8912 break;
8913 }
8914
00332c9f
AH
8915 /* The evsplat*i instructions are not quite generic. */
8916 switch (fcode)
8917 {
8918 case SPE_BUILTIN_EVSPLATFI:
8919 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8920 exp, target);
00332c9f
AH
8921 case SPE_BUILTIN_EVSPLATI:
8922 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8923 exp, target);
00332c9f
AH
8924 default:
8925 break;
8926 }
8927
a3170dc6
AH
8928 d = (struct builtin_description *) bdesc_2arg_spe;
8929 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8930 if (d->code == fcode)
5039610b 8931 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8932
8933 d = (struct builtin_description *) bdesc_spe_predicates;
8934 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8935 if (d->code == fcode)
5039610b 8936 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8937
8938 d = (struct builtin_description *) bdesc_spe_evsel;
8939 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8940 if (d->code == fcode)
5039610b 8941 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8942
8943 switch (fcode)
8944 {
8945 case SPE_BUILTIN_EVSTDDX:
5039610b 8946 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8947 case SPE_BUILTIN_EVSTDHX:
5039610b 8948 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8949 case SPE_BUILTIN_EVSTDWX:
5039610b 8950 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8951 case SPE_BUILTIN_EVSTWHEX:
5039610b 8952 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8953 case SPE_BUILTIN_EVSTWHOX:
5039610b 8954 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8955 case SPE_BUILTIN_EVSTWWEX:
5039610b 8956 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8957 case SPE_BUILTIN_EVSTWWOX:
5039610b 8958 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8959 case SPE_BUILTIN_EVSTDD:
5039610b 8960 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8961 case SPE_BUILTIN_EVSTDH:
5039610b 8962 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8963 case SPE_BUILTIN_EVSTDW:
5039610b 8964 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8965 case SPE_BUILTIN_EVSTWHE:
5039610b 8966 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8967 case SPE_BUILTIN_EVSTWHO:
5039610b 8968 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8969 case SPE_BUILTIN_EVSTWWE:
5039610b 8970 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8971 case SPE_BUILTIN_EVSTWWO:
5039610b 8972 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8973 case SPE_BUILTIN_MFSPEFSCR:
8974 icode = CODE_FOR_spe_mfspefscr;
8975 tmode = insn_data[icode].operand[0].mode;
8976
8977 if (target == 0
8978 || GET_MODE (target) != tmode
8979 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8980 target = gen_reg_rtx (tmode);
f676971a 8981
a3170dc6
AH
8982 pat = GEN_FCN (icode) (target);
8983 if (! pat)
8984 return 0;
8985 emit_insn (pat);
8986 return target;
8987 case SPE_BUILTIN_MTSPEFSCR:
8988 icode = CODE_FOR_spe_mtspefscr;
5039610b 8989 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8990 op0 = expand_normal (arg0);
a3170dc6
AH
8991 mode0 = insn_data[icode].operand[0].mode;
8992
8993 if (arg0 == error_mark_node)
8994 return const0_rtx;
8995
8996 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8997 op0 = copy_to_mode_reg (mode0, op0);
8998
8999 pat = GEN_FCN (icode) (op0);
9000 if (pat)
9001 emit_insn (pat);
9002 return NULL_RTX;
9003 default:
9004 break;
9005 }
9006
9007 *expandedp = false;
9008 return NULL_RTX;
9009}
9010
96038623
DE
9011static rtx
9012paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
9013{
9014 rtx pat, scratch, tmp;
9015 tree form = CALL_EXPR_ARG (exp, 0);
9016 tree arg0 = CALL_EXPR_ARG (exp, 1);
9017 tree arg1 = CALL_EXPR_ARG (exp, 2);
9018 rtx op0 = expand_normal (arg0);
9019 rtx op1 = expand_normal (arg1);
9020 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9021 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9022 int form_int;
9023 enum rtx_code code;
9024
9025 if (TREE_CODE (form) != INTEGER_CST)
9026 {
9027 error ("argument 1 of __builtin_paired_predicate must be a constant");
9028 return const0_rtx;
9029 }
9030 else
9031 form_int = TREE_INT_CST_LOW (form);
9032
9033 gcc_assert (mode0 == mode1);
9034
9035 if (arg0 == error_mark_node || arg1 == error_mark_node)
9036 return const0_rtx;
9037
9038 if (target == 0
9039 || GET_MODE (target) != SImode
9040 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
9041 target = gen_reg_rtx (SImode);
9042 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
9043 op0 = copy_to_mode_reg (mode0, op0);
9044 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
9045 op1 = copy_to_mode_reg (mode1, op1);
9046
9047 scratch = gen_reg_rtx (CCFPmode);
9048
9049 pat = GEN_FCN (icode) (scratch, op0, op1);
9050 if (!pat)
9051 return const0_rtx;
9052
9053 emit_insn (pat);
9054
9055 switch (form_int)
9056 {
9057 /* LT bit. */
9058 case 0:
9059 code = LT;
9060 break;
9061 /* GT bit. */
9062 case 1:
9063 code = GT;
9064 break;
9065 /* EQ bit. */
9066 case 2:
9067 code = EQ;
9068 break;
9069 /* UN bit. */
9070 case 3:
9071 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9072 return target;
9073 default:
9074 error ("argument 1 of __builtin_paired_predicate is out of range");
9075 return const0_rtx;
9076 }
9077
9078 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9079 emit_move_insn (target, tmp);
9080 return target;
9081}
9082
a3170dc6 9083static rtx
5039610b 9084spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9085{
9086 rtx pat, scratch, tmp;
5039610b
SL
9087 tree form = CALL_EXPR_ARG (exp, 0);
9088 tree arg0 = CALL_EXPR_ARG (exp, 1);
9089 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
9090 rtx op0 = expand_normal (arg0);
9091 rtx op1 = expand_normal (arg1);
a3170dc6
AH
9092 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9093 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9094 int form_int;
9095 enum rtx_code code;
9096
9097 if (TREE_CODE (form) != INTEGER_CST)
9098 {
9099 error ("argument 1 of __builtin_spe_predicate must be a constant");
9100 return const0_rtx;
9101 }
9102 else
9103 form_int = TREE_INT_CST_LOW (form);
9104
37409796 9105 gcc_assert (mode0 == mode1);
a3170dc6
AH
9106
9107 if (arg0 == error_mark_node || arg1 == error_mark_node)
9108 return const0_rtx;
9109
9110 if (target == 0
9111 || GET_MODE (target) != SImode
9112 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
9113 target = gen_reg_rtx (SImode);
9114
9115 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9116 op0 = copy_to_mode_reg (mode0, op0);
9117 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9118 op1 = copy_to_mode_reg (mode1, op1);
9119
9120 scratch = gen_reg_rtx (CCmode);
9121
9122 pat = GEN_FCN (icode) (scratch, op0, op1);
9123 if (! pat)
9124 return const0_rtx;
9125 emit_insn (pat);
9126
9127 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
9128 _lower_. We use one compare, but look in different bits of the
9129 CR for each variant.
9130
9131 There are 2 elements in each SPE simd type (upper/lower). The CR
9132 bits are set as follows:
9133
9134 BIT0 | BIT 1 | BIT 2 | BIT 3
9135 U | L | (U | L) | (U & L)
9136
9137 So, for an "all" relationship, BIT 3 would be set.
9138 For an "any" relationship, BIT 2 would be set. Etc.
9139
9140 Following traditional nomenclature, these bits map to:
9141
9142 BIT0 | BIT 1 | BIT 2 | BIT 3
9143 LT | GT | EQ | OV
9144
9145 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9146 */
9147
9148 switch (form_int)
9149 {
9150 /* All variant. OV bit. */
9151 case 0:
9152 /* We need to get to the OV bit, which is the ORDERED bit. We
9153 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 9154 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
9155 So let's just use another pattern. */
9156 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9157 return target;
9158 /* Any variant. EQ bit. */
9159 case 1:
9160 code = EQ;
9161 break;
9162 /* Upper variant. LT bit. */
9163 case 2:
9164 code = LT;
9165 break;
9166 /* Lower variant. GT bit. */
9167 case 3:
9168 code = GT;
9169 break;
9170 default:
9171 error ("argument 1 of __builtin_spe_predicate is out of range");
9172 return const0_rtx;
9173 }
9174
9175 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9176 emit_move_insn (target, tmp);
9177
9178 return target;
9179}
9180
9181/* The evsel builtins look like this:
9182
9183 e = __builtin_spe_evsel_OP (a, b, c, d);
9184
9185 and work like this:
9186
9187 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9188 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9189*/
9190
9191static rtx
5039610b 9192spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9193{
9194 rtx pat, scratch;
5039610b
SL
9195 tree arg0 = CALL_EXPR_ARG (exp, 0);
9196 tree arg1 = CALL_EXPR_ARG (exp, 1);
9197 tree arg2 = CALL_EXPR_ARG (exp, 2);
9198 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
9199 rtx op0 = expand_normal (arg0);
9200 rtx op1 = expand_normal (arg1);
9201 rtx op2 = expand_normal (arg2);
9202 rtx op3 = expand_normal (arg3);
a3170dc6
AH
9203 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9204 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9205
37409796 9206 gcc_assert (mode0 == mode1);
a3170dc6
AH
9207
9208 if (arg0 == error_mark_node || arg1 == error_mark_node
9209 || arg2 == error_mark_node || arg3 == error_mark_node)
9210 return const0_rtx;
9211
9212 if (target == 0
9213 || GET_MODE (target) != mode0
9214 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9215 target = gen_reg_rtx (mode0);
9216
9217 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9218 op0 = copy_to_mode_reg (mode0, op0);
9219 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9220 op1 = copy_to_mode_reg (mode0, op1);
9221 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9222 op2 = copy_to_mode_reg (mode0, op2);
9223 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9224 op3 = copy_to_mode_reg (mode0, op3);
9225
9226 /* Generate the compare. */
9227 scratch = gen_reg_rtx (CCmode);
9228 pat = GEN_FCN (icode) (scratch, op0, op1);
9229 if (! pat)
9230 return const0_rtx;
9231 emit_insn (pat);
9232
9233 if (mode0 == V2SImode)
9234 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9235 else
9236 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9237
9238 return target;
9239}
9240
0ac081f6
AH
9241/* Expand an expression EXP that calls a built-in function,
9242 with result going to TARGET if that's convenient
9243 (and in mode MODE if that's convenient).
9244 SUBTARGET may be used as the target for computing one of EXP's operands.
9245 IGNORE is nonzero if the value is to be ignored. */
9246
9247static rtx
a2369ed3 9248rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9249 enum machine_mode mode ATTRIBUTE_UNUSED,
9250 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9251{
5039610b 9252 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9253 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9254 const struct builtin_description *d;
92898235
AH
9255 size_t i;
9256 rtx ret;
9257 bool success;
f676971a 9258
9c78b944
DE
9259 if (fcode == RS6000_BUILTIN_RECIP)
9260 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9261
9262 if (fcode == RS6000_BUILTIN_RECIPF)
9263 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9264
9265 if (fcode == RS6000_BUILTIN_RSQRTF)
9266 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9267
7ccf35ed
DN
9268 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9269 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9270 {
9271 int icode = (int) CODE_FOR_altivec_lvsr;
9272 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9273 enum machine_mode mode = insn_data[icode].operand[1].mode;
9274 tree arg;
9275 rtx op, addr, pat;
9276
37409796 9277 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9278
5039610b 9279 arg = CALL_EXPR_ARG (exp, 0);
37409796 9280 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9281 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9282 addr = memory_address (mode, op);
9283 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9284 op = addr;
9285 else
9286 {
9287 /* For the load case need to negate the address. */
9288 op = gen_reg_rtx (GET_MODE (addr));
9289 emit_insn (gen_rtx_SET (VOIDmode, op,
9290 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9291 }
7ccf35ed
DN
9292 op = gen_rtx_MEM (mode, op);
9293
9294 if (target == 0
9295 || GET_MODE (target) != tmode
9296 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9297 target = gen_reg_rtx (tmode);
9298
9299 /*pat = gen_altivec_lvsr (target, op);*/
9300 pat = GEN_FCN (icode) (target, op);
9301 if (!pat)
9302 return 0;
9303 emit_insn (pat);
9304
9305 return target;
9306 }
5039610b
SL
9307
9308 /* FIXME: There's got to be a nicer way to handle this case than
9309 constructing a new CALL_EXPR. */
f57d17f1 9310 if (fcode == ALTIVEC_BUILTIN_VCFUX
7910ae0c
DN
9311 || fcode == ALTIVEC_BUILTIN_VCFSX
9312 || fcode == ALTIVEC_BUILTIN_VCTUXS
9313 || fcode == ALTIVEC_BUILTIN_VCTSXS)
f57d17f1 9314 {
5039610b
SL
9315 if (call_expr_nargs (exp) == 1)
9316 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9317 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9318 }
7ccf35ed 9319
0ac081f6 9320 if (TARGET_ALTIVEC)
92898235
AH
9321 {
9322 ret = altivec_expand_builtin (exp, target, &success);
9323
a3170dc6
AH
9324 if (success)
9325 return ret;
9326 }
9327 if (TARGET_SPE)
9328 {
9329 ret = spe_expand_builtin (exp, target, &success);
9330
92898235
AH
9331 if (success)
9332 return ret;
9333 }
96038623
DE
9334 if (TARGET_PAIRED_FLOAT)
9335 {
9336 ret = paired_expand_builtin (exp, target, &success);
9337
9338 if (success)
9339 return ret;
9340 }
92898235 9341
96038623 9342 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9343
37409796
NS
9344 /* Handle simple unary operations. */
9345 d = (struct builtin_description *) bdesc_1arg;
9346 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9347 if (d->code == fcode)
5039610b 9348 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9349
37409796
NS
9350 /* Handle simple binary operations. */
9351 d = (struct builtin_description *) bdesc_2arg;
9352 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9353 if (d->code == fcode)
5039610b 9354 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9355
37409796 9356 /* Handle simple ternary operations. */
586de218 9357 d = bdesc_3arg;
37409796
NS
9358 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9359 if (d->code == fcode)
5039610b 9360 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9361
37409796 9362 gcc_unreachable ();
0ac081f6
AH
9363}
9364
7c62e993
PB
9365static tree
9366build_opaque_vector_type (tree node, int nunits)
9367{
9368 node = copy_node (node);
9369 TYPE_MAIN_VARIANT (node) = node;
0627bfb4 9370 TYPE_CANONICAL (node) = node;
7c62e993
PB
9371 return build_vector_type (node, nunits);
9372}
9373
0ac081f6 9374static void
863d938c 9375rs6000_init_builtins (void)
0ac081f6 9376{
5afaa917
NS
9377 tree tdecl;
9378
4a5eab38
PB
9379 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9380 V2SF_type_node = build_vector_type (float_type_node, 2);
9381 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9382 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9383 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9384 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9385 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9386
9387 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9388 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9389 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9390
7c62e993
PB
9391 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9392 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9393 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9394 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9395
8bb418a3
ZL
9396 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9397 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9398 'vector unsigned short'. */
9399
8dd16ecc
NS
9400 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9401 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9402 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9403 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9404
58646b77
PB
9405 long_integer_type_internal_node = long_integer_type_node;
9406 long_unsigned_type_internal_node = long_unsigned_type_node;
9407 intQI_type_internal_node = intQI_type_node;
9408 uintQI_type_internal_node = unsigned_intQI_type_node;
9409 intHI_type_internal_node = intHI_type_node;
9410 uintHI_type_internal_node = unsigned_intHI_type_node;
9411 intSI_type_internal_node = intSI_type_node;
9412 uintSI_type_internal_node = unsigned_intSI_type_node;
9413 float_type_internal_node = float_type_node;
9414 void_type_internal_node = void_type_node;
9415
5afaa917
NS
9416 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool char"),
9417 bool_char_type_node);
9418 TYPE_NAME (bool_char_type_node) = tdecl;
9419 (*lang_hooks.decls.pushdecl) (tdecl);
9420 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool short"),
9421 bool_short_type_node);
9422 TYPE_NAME (bool_short_type_node) = tdecl;
9423 (*lang_hooks.decls.pushdecl) (tdecl);
9424 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool int"),
9425 bool_int_type_node);
9426 TYPE_NAME (bool_int_type_node) = tdecl;
9427 (*lang_hooks.decls.pushdecl) (tdecl);
9428 tdecl = build_decl (TYPE_DECL, get_identifier ("__pixel"),
9429 pixel_type_node);
9430 TYPE_NAME (pixel_type_node) = tdecl;
9431 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9432
4a5eab38
PB
9433 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9434 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9435 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9436 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3 9437
5afaa917
NS
9438 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned char"),
9439 unsigned_V16QI_type_node);
9440 TYPE_NAME (unsigned_V16QI_type_node) = tdecl;
9441 (*lang_hooks.decls.pushdecl) (tdecl);
9442 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed char"),
9443 V16QI_type_node);
9444 TYPE_NAME (V16QI_type_node) = tdecl;
9445 (*lang_hooks.decls.pushdecl) (tdecl);
9446 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool char"),
9447 bool_V16QI_type_node);
9448 TYPE_NAME ( bool_V16QI_type_node) = tdecl;
9449 (*lang_hooks.decls.pushdecl) (tdecl);
9450
9451 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned short"),
9452 unsigned_V8HI_type_node);
9453 TYPE_NAME (unsigned_V8HI_type_node) = tdecl;
9454 (*lang_hooks.decls.pushdecl) (tdecl);
9455 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed short"),
9456 V8HI_type_node);
9457 TYPE_NAME (V8HI_type_node) = tdecl;
9458 (*lang_hooks.decls.pushdecl) (tdecl);
9459 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool short"),
9460 bool_V8HI_type_node);
9461 TYPE_NAME (bool_V8HI_type_node) = tdecl;
9462 (*lang_hooks.decls.pushdecl) (tdecl);
9463
9464 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned int"),
9465 unsigned_V4SI_type_node);
9466 TYPE_NAME (unsigned_V4SI_type_node) = tdecl;
9467 (*lang_hooks.decls.pushdecl) (tdecl);
9468 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed int"),
9469 V4SI_type_node);
9470 TYPE_NAME (V4SI_type_node) = tdecl;
9471 (*lang_hooks.decls.pushdecl) (tdecl);
9472 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool int"),
9473 bool_V4SI_type_node);
9474 TYPE_NAME (bool_V4SI_type_node) = tdecl;
9475 (*lang_hooks.decls.pushdecl) (tdecl);
9476
9477 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector float"),
9478 V4SF_type_node);
9479 TYPE_NAME (V4SF_type_node) = tdecl;
9480 (*lang_hooks.decls.pushdecl) (tdecl);
9481 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __pixel"),
9482 pixel_V8HI_type_node);
9483 TYPE_NAME (pixel_V8HI_type_node) = tdecl;
9484 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9485
96038623
DE
9486 if (TARGET_PAIRED_FLOAT)
9487 paired_init_builtins ();
a3170dc6 9488 if (TARGET_SPE)
3fdaa45a 9489 spe_init_builtins ();
0ac081f6
AH
9490 if (TARGET_ALTIVEC)
9491 altivec_init_builtins ();
96038623 9492 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9493 rs6000_common_init_builtins ();
9c78b944
DE
9494 if (TARGET_PPC_GFXOPT)
9495 {
9496 tree ftype = build_function_type_list (float_type_node,
9497 float_type_node,
9498 float_type_node,
9499 NULL_TREE);
9500 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9501 RS6000_BUILTIN_RECIPF);
9502
9503 ftype = build_function_type_list (float_type_node,
9504 float_type_node,
9505 NULL_TREE);
9506 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9507 RS6000_BUILTIN_RSQRTF);
9508 }
9509 if (TARGET_POPCNTB)
9510 {
9511 tree ftype = build_function_type_list (double_type_node,
9512 double_type_node,
9513 double_type_node,
9514 NULL_TREE);
9515 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9516 RS6000_BUILTIN_RECIP);
9517
9518 }
69ca3549
DE
9519
9520#if TARGET_XCOFF
9521 /* AIX libm provides clog as __clog. */
9522 if (built_in_decls [BUILT_IN_CLOG])
9523 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9524#endif
fb220235
FXC
9525
9526#ifdef SUBTARGET_INIT_BUILTINS
9527 SUBTARGET_INIT_BUILTINS;
9528#endif
0ac081f6
AH
9529}
9530
a3170dc6
AH
9531/* Search through a set of builtins and enable the mask bits.
9532 DESC is an array of builtins.
b6d08ca1 9533 SIZE is the total number of builtins.
a3170dc6
AH
9534 START is the builtin enum at which to start.
9535 END is the builtin enum at which to end. */
0ac081f6 9536static void
a2369ed3 9537enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9538 enum rs6000_builtins start,
a2369ed3 9539 enum rs6000_builtins end)
a3170dc6
AH
9540{
9541 int i;
9542
9543 for (i = 0; i < size; ++i)
9544 if (desc[i].code == start)
9545 break;
9546
9547 if (i == size)
9548 return;
9549
9550 for (; i < size; ++i)
9551 {
9552 /* Flip all the bits on. */
9553 desc[i].mask = target_flags;
9554 if (desc[i].code == end)
9555 break;
9556 }
9557}
9558
9559static void
863d938c 9560spe_init_builtins (void)
0ac081f6 9561{
a3170dc6
AH
9562 tree endlink = void_list_node;
9563 tree puint_type_node = build_pointer_type (unsigned_type_node);
9564 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9565 struct builtin_description *d;
0ac081f6
AH
9566 size_t i;
9567
a3170dc6
AH
9568 tree v2si_ftype_4_v2si
9569 = build_function_type
3fdaa45a
AH
9570 (opaque_V2SI_type_node,
9571 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9572 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9573 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9574 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9575 endlink)))));
9576
9577 tree v2sf_ftype_4_v2sf
9578 = build_function_type
3fdaa45a
AH
9579 (opaque_V2SF_type_node,
9580 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9581 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9582 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9583 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9584 endlink)))));
9585
9586 tree int_ftype_int_v2si_v2si
9587 = build_function_type
9588 (integer_type_node,
9589 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9590 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9591 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9592 endlink))));
9593
9594 tree int_ftype_int_v2sf_v2sf
9595 = build_function_type
9596 (integer_type_node,
9597 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9598 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9599 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9600 endlink))));
9601
9602 tree void_ftype_v2si_puint_int
9603 = build_function_type (void_type_node,
3fdaa45a 9604 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9605 tree_cons (NULL_TREE, puint_type_node,
9606 tree_cons (NULL_TREE,
9607 integer_type_node,
9608 endlink))));
9609
9610 tree void_ftype_v2si_puint_char
9611 = build_function_type (void_type_node,
3fdaa45a 9612 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9613 tree_cons (NULL_TREE, puint_type_node,
9614 tree_cons (NULL_TREE,
9615 char_type_node,
9616 endlink))));
9617
9618 tree void_ftype_v2si_pv2si_int
9619 = build_function_type (void_type_node,
3fdaa45a 9620 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9621 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9622 tree_cons (NULL_TREE,
9623 integer_type_node,
9624 endlink))));
9625
9626 tree void_ftype_v2si_pv2si_char
9627 = build_function_type (void_type_node,
3fdaa45a 9628 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9629 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9630 tree_cons (NULL_TREE,
9631 char_type_node,
9632 endlink))));
9633
9634 tree void_ftype_int
9635 = build_function_type (void_type_node,
9636 tree_cons (NULL_TREE, integer_type_node, endlink));
9637
9638 tree int_ftype_void
36e8d515 9639 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9640
9641 tree v2si_ftype_pv2si_int
3fdaa45a 9642 = build_function_type (opaque_V2SI_type_node,
6035d635 9643 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9644 tree_cons (NULL_TREE, integer_type_node,
9645 endlink)));
9646
9647 tree v2si_ftype_puint_int
3fdaa45a 9648 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9649 tree_cons (NULL_TREE, puint_type_node,
9650 tree_cons (NULL_TREE, integer_type_node,
9651 endlink)));
9652
9653 tree v2si_ftype_pushort_int
3fdaa45a 9654 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9655 tree_cons (NULL_TREE, pushort_type_node,
9656 tree_cons (NULL_TREE, integer_type_node,
9657 endlink)));
9658
00332c9f
AH
9659 tree v2si_ftype_signed_char
9660 = build_function_type (opaque_V2SI_type_node,
9661 tree_cons (NULL_TREE, signed_char_type_node,
9662 endlink));
9663
a3170dc6
AH
9664 /* The initialization of the simple binary and unary builtins is
9665 done in rs6000_common_init_builtins, but we have to enable the
9666 mask bits here manually because we have run out of `target_flags'
9667 bits. We really need to redesign this mask business. */
9668
9669 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9670 ARRAY_SIZE (bdesc_2arg),
9671 SPE_BUILTIN_EVADDW,
9672 SPE_BUILTIN_EVXOR);
9673 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9674 ARRAY_SIZE (bdesc_1arg),
9675 SPE_BUILTIN_EVABS,
9676 SPE_BUILTIN_EVSUBFUSIAAW);
9677 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9678 ARRAY_SIZE (bdesc_spe_predicates),
9679 SPE_BUILTIN_EVCMPEQ,
9680 SPE_BUILTIN_EVFSTSTLT);
9681 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9682 ARRAY_SIZE (bdesc_spe_evsel),
9683 SPE_BUILTIN_EVSEL_CMPGTS,
9684 SPE_BUILTIN_EVSEL_FSTSTEQ);
9685
36252949
AH
9686 (*lang_hooks.decls.pushdecl)
9687 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9688 opaque_V2SI_type_node));
9689
a3170dc6 9690 /* Initialize irregular SPE builtins. */
f676971a 9691
a3170dc6
AH
9692 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9693 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9694 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9695 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9696 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9697 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9698 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9699 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9700 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9701 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9702 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9703 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9704 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9705 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9706 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9707 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9708 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9709 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9710
9711 /* Loads. */
9712 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9713 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9714 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9715 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9716 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9717 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9718 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9719 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9720 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9721 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9722 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9723 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9724 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9725 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9726 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9727 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9728 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9729 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9730 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9731 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9732 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9733 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9734
9735 /* Predicates. */
9736 d = (struct builtin_description *) bdesc_spe_predicates;
9737 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9738 {
9739 tree type;
9740
9741 switch (insn_data[d->icode].operand[1].mode)
9742 {
9743 case V2SImode:
9744 type = int_ftype_int_v2si_v2si;
9745 break;
9746 case V2SFmode:
9747 type = int_ftype_int_v2sf_v2sf;
9748 break;
9749 default:
37409796 9750 gcc_unreachable ();
a3170dc6
AH
9751 }
9752
9753 def_builtin (d->mask, d->name, type, d->code);
9754 }
9755
9756 /* Evsel predicates. */
9757 d = (struct builtin_description *) bdesc_spe_evsel;
9758 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9759 {
9760 tree type;
9761
9762 switch (insn_data[d->icode].operand[1].mode)
9763 {
9764 case V2SImode:
9765 type = v2si_ftype_4_v2si;
9766 break;
9767 case V2SFmode:
9768 type = v2sf_ftype_4_v2sf;
9769 break;
9770 default:
37409796 9771 gcc_unreachable ();
a3170dc6
AH
9772 }
9773
9774 def_builtin (d->mask, d->name, type, d->code);
9775 }
9776}
9777
96038623
DE
9778static void
9779paired_init_builtins (void)
9780{
23a651fc 9781 const struct builtin_description *d;
96038623
DE
9782 size_t i;
9783 tree endlink = void_list_node;
9784
9785 tree int_ftype_int_v2sf_v2sf
9786 = build_function_type
9787 (integer_type_node,
9788 tree_cons (NULL_TREE, integer_type_node,
9789 tree_cons (NULL_TREE, V2SF_type_node,
9790 tree_cons (NULL_TREE, V2SF_type_node,
9791 endlink))));
9792 tree pcfloat_type_node =
9793 build_pointer_type (build_qualified_type
9794 (float_type_node, TYPE_QUAL_CONST));
9795
9796 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9797 long_integer_type_node,
9798 pcfloat_type_node,
9799 NULL_TREE);
9800 tree void_ftype_v2sf_long_pcfloat =
9801 build_function_type_list (void_type_node,
9802 V2SF_type_node,
9803 long_integer_type_node,
9804 pcfloat_type_node,
9805 NULL_TREE);
9806
9807
9808 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9809 PAIRED_BUILTIN_LX);
9810
9811
9812 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9813 PAIRED_BUILTIN_STX);
9814
9815 /* Predicates. */
23a651fc 9816 d = bdesc_paired_preds;
96038623
DE
9817 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9818 {
9819 tree type;
9820
9821 switch (insn_data[d->icode].operand[1].mode)
9822 {
9823 case V2SFmode:
9824 type = int_ftype_int_v2sf_v2sf;
9825 break;
9826 default:
9827 gcc_unreachable ();
9828 }
9829
9830 def_builtin (d->mask, d->name, type, d->code);
9831 }
9832}
9833
a3170dc6 9834static void
863d938c 9835altivec_init_builtins (void)
a3170dc6 9836{
586de218
KG
9837 const struct builtin_description *d;
9838 const struct builtin_description_predicates *dp;
a3170dc6 9839 size_t i;
7a4eca66
DE
9840 tree ftype;
9841
a3170dc6
AH
9842 tree pfloat_type_node = build_pointer_type (float_type_node);
9843 tree pint_type_node = build_pointer_type (integer_type_node);
9844 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9845 tree pchar_type_node = build_pointer_type (char_type_node);
9846
9847 tree pvoid_type_node = build_pointer_type (void_type_node);
9848
0dbc3651
ZW
9849 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9850 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9851 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9852 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9853
9854 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9855
58646b77
PB
9856 tree int_ftype_opaque
9857 = build_function_type_list (integer_type_node,
9858 opaque_V4SI_type_node, NULL_TREE);
266b4890
AP
9859 tree opaque_ftype_opaque
9860 = build_function_type (integer_type_node,
9861 NULL_TREE);
58646b77
PB
9862 tree opaque_ftype_opaque_int
9863 = build_function_type_list (opaque_V4SI_type_node,
9864 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9865 tree opaque_ftype_opaque_opaque_int
9866 = build_function_type_list (opaque_V4SI_type_node,
9867 opaque_V4SI_type_node, opaque_V4SI_type_node,
9868 integer_type_node, NULL_TREE);
9869 tree int_ftype_int_opaque_opaque
9870 = build_function_type_list (integer_type_node,
9871 integer_type_node, opaque_V4SI_type_node,
9872 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9873 tree int_ftype_int_v4si_v4si
9874 = build_function_type_list (integer_type_node,
9875 integer_type_node, V4SI_type_node,
9876 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9877 tree v4sf_ftype_pcfloat
9878 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9879 tree void_ftype_pfloat_v4sf
b4de2f7d 9880 = build_function_type_list (void_type_node,
a3170dc6 9881 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9882 tree v4si_ftype_pcint
9883 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9884 tree void_ftype_pint_v4si
b4de2f7d
AH
9885 = build_function_type_list (void_type_node,
9886 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9887 tree v8hi_ftype_pcshort
9888 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9889 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9890 = build_function_type_list (void_type_node,
9891 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9892 tree v16qi_ftype_pcchar
9893 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9894 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9895 = build_function_type_list (void_type_node,
9896 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9897 tree void_ftype_v4si
b4de2f7d 9898 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9899 tree v8hi_ftype_void
9900 = build_function_type (V8HI_type_node, void_list_node);
9901 tree void_ftype_void
9902 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9903 tree void_ftype_int
9904 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9905
58646b77
PB
9906 tree opaque_ftype_long_pcvoid
9907 = build_function_type_list (opaque_V4SI_type_node,
9908 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9909 tree v16qi_ftype_long_pcvoid
a3170dc6 9910 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9911 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9912 tree v8hi_ftype_long_pcvoid
a3170dc6 9913 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9914 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9915 tree v4si_ftype_long_pcvoid
a3170dc6 9916 = build_function_type_list (V4SI_type_node,
b4a62fa0 9917 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9918
58646b77
PB
9919 tree void_ftype_opaque_long_pvoid
9920 = build_function_type_list (void_type_node,
9921 opaque_V4SI_type_node, long_integer_type_node,
9922 pvoid_type_node, NULL_TREE);
b4a62fa0 9923 tree void_ftype_v4si_long_pvoid
b4de2f7d 9924 = build_function_type_list (void_type_node,
b4a62fa0 9925 V4SI_type_node, long_integer_type_node,
b4de2f7d 9926 pvoid_type_node, NULL_TREE);
b4a62fa0 9927 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9928 = build_function_type_list (void_type_node,
b4a62fa0 9929 V16QI_type_node, long_integer_type_node,
b4de2f7d 9930 pvoid_type_node, NULL_TREE);
b4a62fa0 9931 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9932 = build_function_type_list (void_type_node,
b4a62fa0 9933 V8HI_type_node, long_integer_type_node,
b4de2f7d 9934 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9935 tree int_ftype_int_v8hi_v8hi
9936 = build_function_type_list (integer_type_node,
9937 integer_type_node, V8HI_type_node,
9938 V8HI_type_node, NULL_TREE);
9939 tree int_ftype_int_v16qi_v16qi
9940 = build_function_type_list (integer_type_node,
9941 integer_type_node, V16QI_type_node,
9942 V16QI_type_node, NULL_TREE);
9943 tree int_ftype_int_v4sf_v4sf
9944 = build_function_type_list (integer_type_node,
9945 integer_type_node, V4SF_type_node,
9946 V4SF_type_node, NULL_TREE);
9947 tree v4si_ftype_v4si
9948 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9949 tree v8hi_ftype_v8hi
9950 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9951 tree v16qi_ftype_v16qi
9952 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9953 tree v4sf_ftype_v4sf
9954 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9955 tree void_ftype_pcvoid_int_int
a3170dc6 9956 = build_function_type_list (void_type_node,
0dbc3651 9957 pcvoid_type_node, integer_type_node,
8bb418a3 9958 integer_type_node, NULL_TREE);
8bb418a3 9959
0dbc3651
ZW
9960 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9961 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9962 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9963 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9964 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9965 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9966 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9967 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9968 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9969 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9970 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9971 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9972 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9973 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9974 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9975 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9976 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9977 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9978 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9979 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9980 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9981 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9982 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9983 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9984 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9985 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9986 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9987 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9988 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9989 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9990 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9991 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9992 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9993 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9994 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9995 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9996 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9997 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9998 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9999 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
10000 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
10001 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
10002 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
10003 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
10004 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
10005 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
10006
0b61703c
AP
10007 if (rs6000_cpu == PROCESSOR_CELL)
10008 {
10009 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
10010 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
10011 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
10012 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRXL);
10013
10014 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLX);
10015 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLXL);
10016 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRX);
10017 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRXL);
10018
10019 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLX);
10020 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLXL);
10021 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRX);
10022 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRXL);
10023
10024 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLX);
10025 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLXL);
10026 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRX);
10027 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRXL);
10028 }
58646b77 10029 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
266b4890
AP
10030 def_builtin (MASK_ALTIVEC, "__builtin_vec_splats", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_SPLATS);
10031 def_builtin (MASK_ALTIVEC, "__builtin_vec_promote", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_PROMOTE);
58646b77
PB
10032
10033 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
10034 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
266b4890
AP
10035 def_builtin (MASK_ALTIVEC, "__builtin_vec_extract", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_EXTRACT);
10036 def_builtin (MASK_ALTIVEC, "__builtin_vec_insert", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_INSERT);
58646b77
PB
10037 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
10038 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
10039 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
10040 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
10041 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
10042 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
10043 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
10044 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 10045
a3170dc6 10046 /* Add the DST variants. */
586de218 10047 d = bdesc_dst;
a3170dc6 10048 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 10049 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
10050
10051 /* Initialize the predicates. */
586de218 10052 dp = bdesc_altivec_preds;
a3170dc6
AH
10053 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
10054 {
10055 enum machine_mode mode1;
10056 tree type;
58646b77
PB
10057 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10058 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 10059
58646b77
PB
10060 if (is_overloaded)
10061 mode1 = VOIDmode;
10062 else
10063 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
10064
10065 switch (mode1)
10066 {
58646b77
PB
10067 case VOIDmode:
10068 type = int_ftype_int_opaque_opaque;
10069 break;
a3170dc6
AH
10070 case V4SImode:
10071 type = int_ftype_int_v4si_v4si;
10072 break;
10073 case V8HImode:
10074 type = int_ftype_int_v8hi_v8hi;
10075 break;
10076 case V16QImode:
10077 type = int_ftype_int_v16qi_v16qi;
10078 break;
10079 case V4SFmode:
10080 type = int_ftype_int_v4sf_v4sf;
10081 break;
10082 default:
37409796 10083 gcc_unreachable ();
a3170dc6 10084 }
f676971a 10085
a3170dc6
AH
10086 def_builtin (dp->mask, dp->name, type, dp->code);
10087 }
10088
10089 /* Initialize the abs* operators. */
586de218 10090 d = bdesc_abs;
a3170dc6
AH
10091 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
10092 {
10093 enum machine_mode mode0;
10094 tree type;
10095
10096 mode0 = insn_data[d->icode].operand[0].mode;
10097
10098 switch (mode0)
10099 {
10100 case V4SImode:
10101 type = v4si_ftype_v4si;
10102 break;
10103 case V8HImode:
10104 type = v8hi_ftype_v8hi;
10105 break;
10106 case V16QImode:
10107 type = v16qi_ftype_v16qi;
10108 break;
10109 case V4SFmode:
10110 type = v4sf_ftype_v4sf;
10111 break;
10112 default:
37409796 10113 gcc_unreachable ();
a3170dc6 10114 }
f676971a 10115
a3170dc6
AH
10116 def_builtin (d->mask, d->name, type, d->code);
10117 }
7ccf35ed 10118
13c62176
DN
10119 if (TARGET_ALTIVEC)
10120 {
10121 tree decl;
10122
10123 /* Initialize target builtin that implements
10124 targetm.vectorize.builtin_mask_for_load. */
10125
c79efc4d
RÁE
10126 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
10127 v16qi_ftype_long_pcvoid,
10128 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
10129 BUILT_IN_MD, NULL, NULL_TREE);
10130 TREE_READONLY (decl) = 1;
13c62176
DN
10131 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
10132 altivec_builtin_mask_for_load = decl;
13c62176 10133 }
7a4eca66
DE
10134
10135 /* Access to the vec_init patterns. */
10136 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
10137 integer_type_node, integer_type_node,
10138 integer_type_node, NULL_TREE);
10139 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
10140 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
10141
10142 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
10143 short_integer_type_node,
10144 short_integer_type_node,
10145 short_integer_type_node,
10146 short_integer_type_node,
10147 short_integer_type_node,
10148 short_integer_type_node,
10149 short_integer_type_node, NULL_TREE);
10150 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
10151 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
10152
10153 ftype = build_function_type_list (V16QI_type_node, char_type_node,
10154 char_type_node, char_type_node,
10155 char_type_node, char_type_node,
10156 char_type_node, char_type_node,
10157 char_type_node, char_type_node,
10158 char_type_node, char_type_node,
10159 char_type_node, char_type_node,
10160 char_type_node, char_type_node,
10161 char_type_node, NULL_TREE);
10162 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
10163 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
10164
10165 ftype = build_function_type_list (V4SF_type_node, float_type_node,
10166 float_type_node, float_type_node,
10167 float_type_node, NULL_TREE);
10168 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
10169 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
10170
10171 /* Access to the vec_set patterns. */
10172 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
10173 intSI_type_node,
10174 integer_type_node, NULL_TREE);
10175 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
10176 ALTIVEC_BUILTIN_VEC_SET_V4SI);
10177
10178 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10179 intHI_type_node,
10180 integer_type_node, NULL_TREE);
10181 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10182 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10183
10184 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10185 intQI_type_node,
10186 integer_type_node, NULL_TREE);
10187 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10188 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10189
10190 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10191 float_type_node,
10192 integer_type_node, NULL_TREE);
10193 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10194 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10195
10196 /* Access to the vec_extract patterns. */
10197 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10198 integer_type_node, NULL_TREE);
10199 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10200 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10201
10202 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10203 integer_type_node, NULL_TREE);
10204 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10205 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10206
10207 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10208 integer_type_node, NULL_TREE);
10209 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10210 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10211
10212 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10213 integer_type_node, NULL_TREE);
10214 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10215 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
10216}
10217
10218static void
863d938c 10219rs6000_common_init_builtins (void)
a3170dc6 10220{
586de218 10221 const struct builtin_description *d;
a3170dc6
AH
10222 size_t i;
10223
96038623
DE
10224 tree v2sf_ftype_v2sf_v2sf_v2sf
10225 = build_function_type_list (V2SF_type_node,
10226 V2SF_type_node, V2SF_type_node,
10227 V2SF_type_node, NULL_TREE);
10228
a3170dc6
AH
10229 tree v4sf_ftype_v4sf_v4sf_v16qi
10230 = build_function_type_list (V4SF_type_node,
10231 V4SF_type_node, V4SF_type_node,
10232 V16QI_type_node, NULL_TREE);
10233 tree v4si_ftype_v4si_v4si_v16qi
10234 = build_function_type_list (V4SI_type_node,
10235 V4SI_type_node, V4SI_type_node,
10236 V16QI_type_node, NULL_TREE);
10237 tree v8hi_ftype_v8hi_v8hi_v16qi
10238 = build_function_type_list (V8HI_type_node,
10239 V8HI_type_node, V8HI_type_node,
10240 V16QI_type_node, NULL_TREE);
10241 tree v16qi_ftype_v16qi_v16qi_v16qi
10242 = build_function_type_list (V16QI_type_node,
10243 V16QI_type_node, V16QI_type_node,
10244 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
10245 tree v4si_ftype_int
10246 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10247 tree v8hi_ftype_int
10248 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10249 tree v16qi_ftype_int
10250 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
10251 tree v8hi_ftype_v16qi
10252 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10253 tree v4sf_ftype_v4sf
10254 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10255
10256 tree v2si_ftype_v2si_v2si
2abe3e28
AH
10257 = build_function_type_list (opaque_V2SI_type_node,
10258 opaque_V2SI_type_node,
10259 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10260
96038623 10261 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10262 = build_function_type_list (opaque_V2SF_type_node,
10263 opaque_V2SF_type_node,
10264 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10265
96038623
DE
10266 tree v2sf_ftype_v2sf_v2sf
10267 = build_function_type_list (V2SF_type_node,
10268 V2SF_type_node,
10269 V2SF_type_node, NULL_TREE);
10270
10271
a3170dc6 10272 tree v2si_ftype_int_int
2abe3e28 10273 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10274 integer_type_node, integer_type_node,
10275 NULL_TREE);
10276
58646b77
PB
10277 tree opaque_ftype_opaque
10278 = build_function_type_list (opaque_V4SI_type_node,
10279 opaque_V4SI_type_node, NULL_TREE);
10280
a3170dc6 10281 tree v2si_ftype_v2si
2abe3e28
AH
10282 = build_function_type_list (opaque_V2SI_type_node,
10283 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10284
96038623 10285 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10286 = build_function_type_list (opaque_V2SF_type_node,
10287 opaque_V2SF_type_node, NULL_TREE);
f676971a 10288
96038623
DE
10289 tree v2sf_ftype_v2sf
10290 = build_function_type_list (V2SF_type_node,
10291 V2SF_type_node, NULL_TREE);
10292
a3170dc6 10293 tree v2sf_ftype_v2si
2abe3e28
AH
10294 = build_function_type_list (opaque_V2SF_type_node,
10295 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10296
10297 tree v2si_ftype_v2sf
2abe3e28
AH
10298 = build_function_type_list (opaque_V2SI_type_node,
10299 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10300
10301 tree v2si_ftype_v2si_char
2abe3e28
AH
10302 = build_function_type_list (opaque_V2SI_type_node,
10303 opaque_V2SI_type_node,
10304 char_type_node, NULL_TREE);
a3170dc6
AH
10305
10306 tree v2si_ftype_int_char
2abe3e28 10307 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10308 integer_type_node, char_type_node, NULL_TREE);
10309
10310 tree v2si_ftype_char
2abe3e28
AH
10311 = build_function_type_list (opaque_V2SI_type_node,
10312 char_type_node, NULL_TREE);
a3170dc6
AH
10313
10314 tree int_ftype_int_int
10315 = build_function_type_list (integer_type_node,
10316 integer_type_node, integer_type_node,
10317 NULL_TREE);
95385cbb 10318
58646b77
PB
10319 tree opaque_ftype_opaque_opaque
10320 = build_function_type_list (opaque_V4SI_type_node,
10321 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10322 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10323 = build_function_type_list (V4SI_type_node,
10324 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10325 tree v4sf_ftype_v4si_int
b4de2f7d 10326 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10327 V4SI_type_node, integer_type_node, NULL_TREE);
10328 tree v4si_ftype_v4sf_int
b4de2f7d 10329 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10330 V4SF_type_node, integer_type_node, NULL_TREE);
10331 tree v4si_ftype_v4si_int
b4de2f7d 10332 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10333 V4SI_type_node, integer_type_node, NULL_TREE);
10334 tree v8hi_ftype_v8hi_int
b4de2f7d 10335 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10336 V8HI_type_node, integer_type_node, NULL_TREE);
10337 tree v16qi_ftype_v16qi_int
b4de2f7d 10338 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10339 V16QI_type_node, integer_type_node, NULL_TREE);
10340 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10341 = build_function_type_list (V16QI_type_node,
10342 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10343 integer_type_node, NULL_TREE);
10344 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10345 = build_function_type_list (V8HI_type_node,
10346 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10347 integer_type_node, NULL_TREE);
10348 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10349 = build_function_type_list (V4SI_type_node,
10350 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10351 integer_type_node, NULL_TREE);
10352 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10353 = build_function_type_list (V4SF_type_node,
10354 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10355 integer_type_node, NULL_TREE);
0ac081f6 10356 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10357 = build_function_type_list (V4SF_type_node,
10358 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10359 tree opaque_ftype_opaque_opaque_opaque
10360 = build_function_type_list (opaque_V4SI_type_node,
10361 opaque_V4SI_type_node, opaque_V4SI_type_node,
10362 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10363 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10364 = build_function_type_list (V4SF_type_node,
10365 V4SF_type_node, V4SF_type_node,
10366 V4SI_type_node, NULL_TREE);
2212663f 10367 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10368 = build_function_type_list (V4SF_type_node,
10369 V4SF_type_node, V4SF_type_node,
10370 V4SF_type_node, NULL_TREE);
f676971a 10371 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10372 = build_function_type_list (V4SI_type_node,
10373 V4SI_type_node, V4SI_type_node,
10374 V4SI_type_node, NULL_TREE);
0ac081f6 10375 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10376 = build_function_type_list (V8HI_type_node,
10377 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10378 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10379 = build_function_type_list (V8HI_type_node,
10380 V8HI_type_node, V8HI_type_node,
10381 V8HI_type_node, NULL_TREE);
c4ad648e 10382 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10383 = build_function_type_list (V4SI_type_node,
10384 V8HI_type_node, V8HI_type_node,
10385 V4SI_type_node, NULL_TREE);
c4ad648e 10386 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10387 = build_function_type_list (V4SI_type_node,
10388 V16QI_type_node, V16QI_type_node,
10389 V4SI_type_node, NULL_TREE);
0ac081f6 10390 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10391 = build_function_type_list (V16QI_type_node,
10392 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10393 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10394 = build_function_type_list (V4SI_type_node,
10395 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10396 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10397 = build_function_type_list (V8HI_type_node,
10398 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10399 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10400 = build_function_type_list (V4SI_type_node,
10401 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10402 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10403 = build_function_type_list (V8HI_type_node,
10404 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10405 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10406 = build_function_type_list (V16QI_type_node,
10407 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10408 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10409 = build_function_type_list (V4SI_type_node,
10410 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10411 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10412 = build_function_type_list (V4SI_type_node,
10413 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10414 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10415 = build_function_type_list (V4SI_type_node,
10416 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10417 tree v4si_ftype_v8hi
10418 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10419 tree int_ftype_v4si_v4si
10420 = build_function_type_list (integer_type_node,
10421 V4SI_type_node, V4SI_type_node, NULL_TREE);
10422 tree int_ftype_v4sf_v4sf
10423 = build_function_type_list (integer_type_node,
10424 V4SF_type_node, V4SF_type_node, NULL_TREE);
10425 tree int_ftype_v16qi_v16qi
10426 = build_function_type_list (integer_type_node,
10427 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10428 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10429 = build_function_type_list (integer_type_node,
10430 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10431
6f317ef3 10432 /* Add the simple ternary operators. */
586de218 10433 d = bdesc_3arg;
ca7558fc 10434 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10435 {
2212663f
DB
10436 enum machine_mode mode0, mode1, mode2, mode3;
10437 tree type;
58646b77
PB
10438 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10439 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10440
58646b77
PB
10441 if (is_overloaded)
10442 {
10443 mode0 = VOIDmode;
10444 mode1 = VOIDmode;
10445 mode2 = VOIDmode;
10446 mode3 = VOIDmode;
10447 }
10448 else
10449 {
10450 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10451 continue;
f676971a 10452
58646b77
PB
10453 mode0 = insn_data[d->icode].operand[0].mode;
10454 mode1 = insn_data[d->icode].operand[1].mode;
10455 mode2 = insn_data[d->icode].operand[2].mode;
10456 mode3 = insn_data[d->icode].operand[3].mode;
10457 }
bb8df8a6 10458
2212663f
DB
10459 /* When all four are of the same mode. */
10460 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10461 {
10462 switch (mode0)
10463 {
58646b77
PB
10464 case VOIDmode:
10465 type = opaque_ftype_opaque_opaque_opaque;
10466 break;
617e0e1d
DB
10467 case V4SImode:
10468 type = v4si_ftype_v4si_v4si_v4si;
10469 break;
2212663f
DB
10470 case V4SFmode:
10471 type = v4sf_ftype_v4sf_v4sf_v4sf;
10472 break;
10473 case V8HImode:
10474 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10475 break;
2212663f
DB
10476 case V16QImode:
10477 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10478 break;
96038623
DE
10479 case V2SFmode:
10480 type = v2sf_ftype_v2sf_v2sf_v2sf;
10481 break;
2212663f 10482 default:
37409796 10483 gcc_unreachable ();
2212663f
DB
10484 }
10485 }
10486 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10487 {
2212663f
DB
10488 switch (mode0)
10489 {
10490 case V4SImode:
10491 type = v4si_ftype_v4si_v4si_v16qi;
10492 break;
10493 case V4SFmode:
10494 type = v4sf_ftype_v4sf_v4sf_v16qi;
10495 break;
10496 case V8HImode:
10497 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10498 break;
2212663f
DB
10499 case V16QImode:
10500 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10501 break;
2212663f 10502 default:
37409796 10503 gcc_unreachable ();
2212663f
DB
10504 }
10505 }
f676971a 10506 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10507 && mode3 == V4SImode)
24408032 10508 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10509 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10510 && mode3 == V4SImode)
24408032 10511 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10512 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10513 && mode3 == V4SImode)
24408032
AH
10514 type = v4sf_ftype_v4sf_v4sf_v4si;
10515
a7b376ee 10516 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10517 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10518 && mode3 == QImode)
b9e4e5d1 10519 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10520
a7b376ee 10521 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10522 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10523 && mode3 == QImode)
b9e4e5d1 10524 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10525
a7b376ee 10526 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10527 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10528 && mode3 == QImode)
b9e4e5d1 10529 type = v4si_ftype_v4si_v4si_int;
24408032 10530
a7b376ee 10531 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10532 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10533 && mode3 == QImode)
b9e4e5d1 10534 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10535
2212663f 10536 else
37409796 10537 gcc_unreachable ();
2212663f
DB
10538
10539 def_builtin (d->mask, d->name, type, d->code);
10540 }
10541
0ac081f6 10542 /* Add the simple binary operators. */
00b960c7 10543 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10544 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10545 {
10546 enum machine_mode mode0, mode1, mode2;
10547 tree type;
58646b77
PB
10548 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10549 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10550
58646b77
PB
10551 if (is_overloaded)
10552 {
10553 mode0 = VOIDmode;
10554 mode1 = VOIDmode;
10555 mode2 = VOIDmode;
10556 }
10557 else
bb8df8a6 10558 {
58646b77
PB
10559 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10560 continue;
f676971a 10561
58646b77
PB
10562 mode0 = insn_data[d->icode].operand[0].mode;
10563 mode1 = insn_data[d->icode].operand[1].mode;
10564 mode2 = insn_data[d->icode].operand[2].mode;
10565 }
0ac081f6
AH
10566
10567 /* When all three operands are of the same mode. */
10568 if (mode0 == mode1 && mode1 == mode2)
10569 {
10570 switch (mode0)
10571 {
58646b77
PB
10572 case VOIDmode:
10573 type = opaque_ftype_opaque_opaque;
10574 break;
0ac081f6
AH
10575 case V4SFmode:
10576 type = v4sf_ftype_v4sf_v4sf;
10577 break;
10578 case V4SImode:
10579 type = v4si_ftype_v4si_v4si;
10580 break;
10581 case V16QImode:
10582 type = v16qi_ftype_v16qi_v16qi;
10583 break;
10584 case V8HImode:
10585 type = v8hi_ftype_v8hi_v8hi;
10586 break;
a3170dc6
AH
10587 case V2SImode:
10588 type = v2si_ftype_v2si_v2si;
10589 break;
96038623
DE
10590 case V2SFmode:
10591 if (TARGET_PAIRED_FLOAT)
10592 type = v2sf_ftype_v2sf_v2sf;
10593 else
10594 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10595 break;
10596 case SImode:
10597 type = int_ftype_int_int;
10598 break;
0ac081f6 10599 default:
37409796 10600 gcc_unreachable ();
0ac081f6
AH
10601 }
10602 }
10603
10604 /* A few other combos we really don't want to do manually. */
10605
10606 /* vint, vfloat, vfloat. */
10607 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10608 type = v4si_ftype_v4sf_v4sf;
10609
10610 /* vshort, vchar, vchar. */
10611 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10612 type = v8hi_ftype_v16qi_v16qi;
10613
10614 /* vint, vshort, vshort. */
10615 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10616 type = v4si_ftype_v8hi_v8hi;
10617
10618 /* vshort, vint, vint. */
10619 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10620 type = v8hi_ftype_v4si_v4si;
10621
10622 /* vchar, vshort, vshort. */
10623 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10624 type = v16qi_ftype_v8hi_v8hi;
10625
10626 /* vint, vchar, vint. */
10627 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10628 type = v4si_ftype_v16qi_v4si;
10629
fa066a23
AH
10630 /* vint, vchar, vchar. */
10631 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10632 type = v4si_ftype_v16qi_v16qi;
10633
0ac081f6
AH
10634 /* vint, vshort, vint. */
10635 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10636 type = v4si_ftype_v8hi_v4si;
f676971a 10637
a7b376ee 10638 /* vint, vint, 5-bit literal. */
2212663f 10639 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10640 type = v4si_ftype_v4si_int;
f676971a 10641
a7b376ee 10642 /* vshort, vshort, 5-bit literal. */
2212663f 10643 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10644 type = v8hi_ftype_v8hi_int;
f676971a 10645
a7b376ee 10646 /* vchar, vchar, 5-bit literal. */
2212663f 10647 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10648 type = v16qi_ftype_v16qi_int;
0ac081f6 10649
a7b376ee 10650 /* vfloat, vint, 5-bit literal. */
617e0e1d 10651 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10652 type = v4sf_ftype_v4si_int;
f676971a 10653
a7b376ee 10654 /* vint, vfloat, 5-bit literal. */
617e0e1d 10655 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10656 type = v4si_ftype_v4sf_int;
617e0e1d 10657
a3170dc6
AH
10658 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10659 type = v2si_ftype_int_int;
10660
10661 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10662 type = v2si_ftype_v2si_char;
10663
10664 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10665 type = v2si_ftype_int_char;
10666
37409796 10667 else
0ac081f6 10668 {
37409796
NS
10669 /* int, x, x. */
10670 gcc_assert (mode0 == SImode);
0ac081f6
AH
10671 switch (mode1)
10672 {
10673 case V4SImode:
10674 type = int_ftype_v4si_v4si;
10675 break;
10676 case V4SFmode:
10677 type = int_ftype_v4sf_v4sf;
10678 break;
10679 case V16QImode:
10680 type = int_ftype_v16qi_v16qi;
10681 break;
10682 case V8HImode:
10683 type = int_ftype_v8hi_v8hi;
10684 break;
10685 default:
37409796 10686 gcc_unreachable ();
0ac081f6
AH
10687 }
10688 }
10689
2212663f
DB
10690 def_builtin (d->mask, d->name, type, d->code);
10691 }
24408032 10692
2212663f
DB
10693 /* Add the simple unary operators. */
10694 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10695 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10696 {
10697 enum machine_mode mode0, mode1;
10698 tree type;
58646b77
PB
10699 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10700 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10701
10702 if (is_overloaded)
10703 {
10704 mode0 = VOIDmode;
10705 mode1 = VOIDmode;
10706 }
10707 else
10708 {
10709 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10710 continue;
bb8df8a6 10711
58646b77
PB
10712 mode0 = insn_data[d->icode].operand[0].mode;
10713 mode1 = insn_data[d->icode].operand[1].mode;
10714 }
2212663f
DB
10715
10716 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10717 type = v4si_ftype_int;
2212663f 10718 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10719 type = v8hi_ftype_int;
2212663f 10720 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10721 type = v16qi_ftype_int;
58646b77
PB
10722 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10723 type = opaque_ftype_opaque;
617e0e1d
DB
10724 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10725 type = v4sf_ftype_v4sf;
20e26713
AH
10726 else if (mode0 == V8HImode && mode1 == V16QImode)
10727 type = v8hi_ftype_v16qi;
10728 else if (mode0 == V4SImode && mode1 == V8HImode)
10729 type = v4si_ftype_v8hi;
a3170dc6
AH
10730 else if (mode0 == V2SImode && mode1 == V2SImode)
10731 type = v2si_ftype_v2si;
10732 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10733 {
10734 if (TARGET_PAIRED_FLOAT)
10735 type = v2sf_ftype_v2sf;
10736 else
10737 type = v2sf_ftype_v2sf_spe;
10738 }
a3170dc6
AH
10739 else if (mode0 == V2SFmode && mode1 == V2SImode)
10740 type = v2sf_ftype_v2si;
10741 else if (mode0 == V2SImode && mode1 == V2SFmode)
10742 type = v2si_ftype_v2sf;
10743 else if (mode0 == V2SImode && mode1 == QImode)
10744 type = v2si_ftype_char;
2212663f 10745 else
37409796 10746 gcc_unreachable ();
2212663f 10747
0ac081f6
AH
10748 def_builtin (d->mask, d->name, type, d->code);
10749 }
10750}
10751
c15c90bb
ZW
10752static void
10753rs6000_init_libfuncs (void)
10754{
602ea4d3
JJ
10755 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10756 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10757 {
602ea4d3
JJ
10758 /* AIX library routines for float->int conversion. */
10759 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10760 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10761 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10762 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10763 }
c15c90bb 10764
602ea4d3 10765 if (!TARGET_IEEEQUAD)
98c41d98 10766 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10767 if (!TARGET_XL_COMPAT)
10768 {
10769 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10770 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10771 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10772 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10773
17caeff2 10774 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10775 {
10776 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10777 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10778 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10779 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10780 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10781 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10782 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10783
10784 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10785 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10786 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10787 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10788 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10789 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10790 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10791 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10792 }
b26941b4
JM
10793
10794 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10795 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10796 }
10797 else
10798 {
10799 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10800 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10801 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10802 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10803 }
c9034561 10804 else
c15c90bb 10805 {
c9034561 10806 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10807
10808 set_optab_libfunc (add_optab, TFmode, "_q_add");
10809 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10810 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10811 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10812 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10813 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10814 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10815
c9034561
ZW
10816 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10817 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10818 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10819 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10820 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10821 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10822
85363ca0
ZW
10823 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10824 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10825 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10826 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10827 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10828 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10829 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10830 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10831 }
10832}
fba73eb1
DE
10833
10834\f
10835/* Expand a block clear operation, and return 1 if successful. Return 0
10836 if we should let the compiler generate normal code.
10837
10838 operands[0] is the destination
10839 operands[1] is the length
57e84f18 10840 operands[3] is the alignment */
fba73eb1
DE
10841
10842int
10843expand_block_clear (rtx operands[])
10844{
10845 rtx orig_dest = operands[0];
10846 rtx bytes_rtx = operands[1];
57e84f18 10847 rtx align_rtx = operands[3];
5514620a
GK
10848 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10849 HOST_WIDE_INT align;
10850 HOST_WIDE_INT bytes;
fba73eb1
DE
10851 int offset;
10852 int clear_bytes;
5514620a 10853 int clear_step;
fba73eb1
DE
10854
10855 /* If this is not a fixed size move, just call memcpy */
10856 if (! constp)
10857 return 0;
10858
37409796
NS
10859 /* This must be a fixed size alignment */
10860 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10861 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10862
10863 /* Anything to clear? */
10864 bytes = INTVAL (bytes_rtx);
10865 if (bytes <= 0)
10866 return 1;
10867
5514620a
GK
10868 /* Use the builtin memset after a point, to avoid huge code bloat.
10869 When optimize_size, avoid any significant code bloat; calling
10870 memset is about 4 instructions, so allow for one instruction to
10871 load zero and three to do clearing. */
10872 if (TARGET_ALTIVEC && align >= 128)
10873 clear_step = 16;
10874 else if (TARGET_POWERPC64 && align >= 32)
10875 clear_step = 8;
21d818ff
NF
10876 else if (TARGET_SPE && align >= 64)
10877 clear_step = 8;
5514620a
GK
10878 else
10879 clear_step = 4;
fba73eb1 10880
5514620a
GK
10881 if (optimize_size && bytes > 3 * clear_step)
10882 return 0;
10883 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10884 return 0;
10885
10886 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10887 {
fba73eb1
DE
10888 enum machine_mode mode = BLKmode;
10889 rtx dest;
f676971a 10890
5514620a
GK
10891 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10892 {
10893 clear_bytes = 16;
10894 mode = V4SImode;
10895 }
21d818ff
NF
10896 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10897 {
10898 clear_bytes = 8;
10899 mode = V2SImode;
10900 }
5514620a 10901 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10902 /* 64-bit loads and stores require word-aligned
10903 displacements. */
10904 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10905 {
10906 clear_bytes = 8;
10907 mode = DImode;
fba73eb1 10908 }
5514620a 10909 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10910 { /* move 4 bytes */
10911 clear_bytes = 4;
10912 mode = SImode;
fba73eb1 10913 }
ec53fc93 10914 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10915 { /* move 2 bytes */
10916 clear_bytes = 2;
10917 mode = HImode;
fba73eb1
DE
10918 }
10919 else /* move 1 byte at a time */
10920 {
10921 clear_bytes = 1;
10922 mode = QImode;
fba73eb1 10923 }
f676971a 10924
fba73eb1 10925 dest = adjust_address (orig_dest, mode, offset);
f676971a 10926
5514620a 10927 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10928 }
10929
10930 return 1;
10931}
10932
35aff10b 10933\f
7e69e155
MM
10934/* Expand a block move operation, and return 1 if successful. Return 0
10935 if we should let the compiler generate normal code.
10936
10937 operands[0] is the destination
10938 operands[1] is the source
10939 operands[2] is the length
10940 operands[3] is the alignment */
10941
3933e0e1
MM
10942#define MAX_MOVE_REG 4
10943
7e69e155 10944int
a2369ed3 10945expand_block_move (rtx operands[])
7e69e155 10946{
b6c9286a
MM
10947 rtx orig_dest = operands[0];
10948 rtx orig_src = operands[1];
7e69e155 10949 rtx bytes_rtx = operands[2];
7e69e155 10950 rtx align_rtx = operands[3];
3933e0e1 10951 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10952 int align;
3933e0e1
MM
10953 int bytes;
10954 int offset;
7e69e155 10955 int move_bytes;
cabfd258
GK
10956 rtx stores[MAX_MOVE_REG];
10957 int num_reg = 0;
7e69e155 10958
3933e0e1 10959 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10960 if (! constp)
3933e0e1
MM
10961 return 0;
10962
37409796
NS
10963 /* This must be a fixed size alignment */
10964 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10965 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10966
7e69e155 10967 /* Anything to move? */
3933e0e1
MM
10968 bytes = INTVAL (bytes_rtx);
10969 if (bytes <= 0)
7e69e155
MM
10970 return 1;
10971
ea9982a8 10972 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10973 reg_parm_stack_space. */
ea9982a8 10974 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10975 return 0;
10976
cabfd258 10977 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10978 {
cabfd258 10979 union {
70128ad9 10980 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10981 rtx (*mov) (rtx, rtx);
cabfd258
GK
10982 } gen_func;
10983 enum machine_mode mode = BLKmode;
10984 rtx src, dest;
f676971a 10985
5514620a
GK
10986 /* Altivec first, since it will be faster than a string move
10987 when it applies, and usually not significantly larger. */
10988 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10989 {
10990 move_bytes = 16;
10991 mode = V4SImode;
10992 gen_func.mov = gen_movv4si;
10993 }
21d818ff
NF
10994 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10995 {
10996 move_bytes = 8;
10997 mode = V2SImode;
10998 gen_func.mov = gen_movv2si;
10999 }
5514620a 11000 else if (TARGET_STRING
cabfd258
GK
11001 && bytes > 24 /* move up to 32 bytes at a time */
11002 && ! fixed_regs[5]
11003 && ! fixed_regs[6]
11004 && ! fixed_regs[7]
11005 && ! fixed_regs[8]
11006 && ! fixed_regs[9]
11007 && ! fixed_regs[10]
11008 && ! fixed_regs[11]
11009 && ! fixed_regs[12])
7e69e155 11010 {
cabfd258 11011 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 11012 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
11013 }
11014 else if (TARGET_STRING
11015 && bytes > 16 /* move up to 24 bytes at a time */
11016 && ! fixed_regs[5]
11017 && ! fixed_regs[6]
11018 && ! fixed_regs[7]
11019 && ! fixed_regs[8]
11020 && ! fixed_regs[9]
11021 && ! fixed_regs[10])
11022 {
11023 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 11024 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
11025 }
11026 else if (TARGET_STRING
11027 && bytes > 8 /* move up to 16 bytes at a time */
11028 && ! fixed_regs[5]
11029 && ! fixed_regs[6]
11030 && ! fixed_regs[7]
11031 && ! fixed_regs[8])
11032 {
11033 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 11034 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
11035 }
11036 else if (bytes >= 8 && TARGET_POWERPC64
11037 /* 64-bit loads and stores require word-aligned
11038 displacements. */
fba73eb1 11039 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
11040 {
11041 move_bytes = 8;
11042 mode = DImode;
11043 gen_func.mov = gen_movdi;
11044 }
11045 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
11046 { /* move up to 8 bytes at a time */
11047 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 11048 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 11049 }
cd7d9ca4 11050 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
11051 { /* move 4 bytes */
11052 move_bytes = 4;
11053 mode = SImode;
11054 gen_func.mov = gen_movsi;
11055 }
ec53fc93 11056 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
11057 { /* move 2 bytes */
11058 move_bytes = 2;
11059 mode = HImode;
11060 gen_func.mov = gen_movhi;
11061 }
11062 else if (TARGET_STRING && bytes > 1)
11063 { /* move up to 4 bytes at a time */
11064 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 11065 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
11066 }
11067 else /* move 1 byte at a time */
11068 {
11069 move_bytes = 1;
11070 mode = QImode;
11071 gen_func.mov = gen_movqi;
11072 }
f676971a 11073
cabfd258
GK
11074 src = adjust_address (orig_src, mode, offset);
11075 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
11076
11077 if (mode != BLKmode)
cabfd258
GK
11078 {
11079 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 11080
cabfd258
GK
11081 emit_insn ((*gen_func.mov) (tmp_reg, src));
11082 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 11083 }
3933e0e1 11084
cabfd258
GK
11085 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
11086 {
11087 int i;
11088 for (i = 0; i < num_reg; i++)
11089 emit_insn (stores[i]);
11090 num_reg = 0;
11091 }
35aff10b 11092
cabfd258 11093 if (mode == BLKmode)
7e69e155 11094 {
70128ad9 11095 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
11096 patterns require zero offset. */
11097 if (!REG_P (XEXP (src, 0)))
b6c9286a 11098 {
cabfd258
GK
11099 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
11100 src = replace_equiv_address (src, src_reg);
b6c9286a 11101 }
cabfd258 11102 set_mem_size (src, GEN_INT (move_bytes));
f676971a 11103
cabfd258 11104 if (!REG_P (XEXP (dest, 0)))
3933e0e1 11105 {
cabfd258
GK
11106 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
11107 dest = replace_equiv_address (dest, dest_reg);
7e69e155 11108 }
cabfd258 11109 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 11110
70128ad9 11111 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
11112 GEN_INT (move_bytes & 31),
11113 align_rtx));
7e69e155 11114 }
7e69e155
MM
11115 }
11116
11117 return 1;
11118}
11119
d62294f5 11120\f
9caa3eb2
DE
11121/* Return a string to perform a load_multiple operation.
11122 operands[0] is the vector.
11123 operands[1] is the source address.
11124 operands[2] is the first destination register. */
11125
11126const char *
a2369ed3 11127rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
11128{
11129 /* We have to handle the case where the pseudo used to contain the address
11130 is assigned to one of the output registers. */
11131 int i, j;
11132 int words = XVECLEN (operands[0], 0);
11133 rtx xop[10];
11134
11135 if (XVECLEN (operands[0], 0) == 1)
11136 return "{l|lwz} %2,0(%1)";
11137
11138 for (i = 0; i < words; i++)
11139 if (refers_to_regno_p (REGNO (operands[2]) + i,
11140 REGNO (operands[2]) + i + 1, operands[1], 0))
11141 {
11142 if (i == words-1)
11143 {
11144 xop[0] = GEN_INT (4 * (words-1));
11145 xop[1] = operands[1];
11146 xop[2] = operands[2];
11147 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
11148 return "";
11149 }
11150 else if (i == 0)
11151 {
11152 xop[0] = GEN_INT (4 * (words-1));
11153 xop[1] = operands[1];
11154 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
11155 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
11156 return "";
11157 }
11158 else
11159 {
11160 for (j = 0; j < words; j++)
11161 if (j != i)
11162 {
11163 xop[0] = GEN_INT (j * 4);
11164 xop[1] = operands[1];
11165 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
11166 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
11167 }
11168 xop[0] = GEN_INT (i * 4);
11169 xop[1] = operands[1];
11170 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
11171 return "";
11172 }
11173 }
11174
11175 return "{lsi|lswi} %2,%1,%N0";
11176}
11177
9878760c 11178\f
a4f6c312
SS
11179/* A validation routine: say whether CODE, a condition code, and MODE
11180 match. The other alternatives either don't make sense or should
11181 never be generated. */
39a10a29 11182
48d72335 11183void
a2369ed3 11184validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 11185{
37409796
NS
11186 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11187 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11188 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
11189
11190 /* These don't make sense. */
37409796
NS
11191 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11192 || mode != CCUNSmode);
39a10a29 11193
37409796
NS
11194 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11195 || mode == CCUNSmode);
39a10a29 11196
37409796
NS
11197 gcc_assert (mode == CCFPmode
11198 || (code != ORDERED && code != UNORDERED
11199 && code != UNEQ && code != LTGT
11200 && code != UNGT && code != UNLT
11201 && code != UNGE && code != UNLE));
f676971a
EC
11202
11203 /* These should never be generated except for
bc9ec0e0 11204 flag_finite_math_only. */
37409796
NS
11205 gcc_assert (mode != CCFPmode
11206 || flag_finite_math_only
11207 || (code != LE && code != GE
11208 && code != UNEQ && code != LTGT
11209 && code != UNGT && code != UNLT));
39a10a29
GK
11210
11211 /* These are invalid; the information is not there. */
37409796 11212 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
11213}
11214
9878760c
RK
11215\f
11216/* Return 1 if ANDOP is a mask that has no bits on that are not in the
11217 mask required to convert the result of a rotate insn into a shift
b1765bde 11218 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
11219
11220int
a2369ed3 11221includes_lshift_p (rtx shiftop, rtx andop)
9878760c 11222{
e2c953b6
DE
11223 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11224
11225 shift_mask <<= INTVAL (shiftop);
9878760c 11226
b1765bde 11227 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
11228}
11229
11230/* Similar, but for right shift. */
11231
11232int
a2369ed3 11233includes_rshift_p (rtx shiftop, rtx andop)
9878760c 11234{
a7653a2c 11235 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
11236
11237 shift_mask >>= INTVAL (shiftop);
11238
b1765bde 11239 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
11240}
11241
c5059423
AM
11242/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11243 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 11244 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
11245
11246int
a2369ed3 11247includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 11248{
c5059423
AM
11249 if (GET_CODE (andop) == CONST_INT)
11250 {
02071907 11251 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 11252
c5059423 11253 c = INTVAL (andop);
02071907 11254 if (c == 0 || c == ~0)
c5059423 11255 return 0;
e2c953b6 11256
02071907 11257 shift_mask = ~0;
c5059423
AM
11258 shift_mask <<= INTVAL (shiftop);
11259
b6d08ca1 11260 /* Find the least significant one bit. */
c5059423
AM
11261 lsb = c & -c;
11262
11263 /* It must coincide with the LSB of the shift mask. */
11264 if (-lsb != shift_mask)
11265 return 0;
e2c953b6 11266
c5059423
AM
11267 /* Invert to look for the next transition (if any). */
11268 c = ~c;
11269
11270 /* Remove the low group of ones (originally low group of zeros). */
11271 c &= -lsb;
11272
11273 /* Again find the lsb, and check we have all 1's above. */
11274 lsb = c & -c;
11275 return c == -lsb;
11276 }
11277 else if (GET_CODE (andop) == CONST_DOUBLE
11278 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11279 {
02071907
AM
11280 HOST_WIDE_INT low, high, lsb;
11281 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11282
11283 low = CONST_DOUBLE_LOW (andop);
11284 if (HOST_BITS_PER_WIDE_INT < 64)
11285 high = CONST_DOUBLE_HIGH (andop);
11286
11287 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11288 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11289 return 0;
11290
11291 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11292 {
02071907 11293 shift_mask_high = ~0;
c5059423
AM
11294 if (INTVAL (shiftop) > 32)
11295 shift_mask_high <<= INTVAL (shiftop) - 32;
11296
11297 lsb = high & -high;
11298
11299 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11300 return 0;
11301
11302 high = ~high;
11303 high &= -lsb;
11304
11305 lsb = high & -high;
11306 return high == -lsb;
11307 }
11308
02071907 11309 shift_mask_low = ~0;
c5059423
AM
11310 shift_mask_low <<= INTVAL (shiftop);
11311
11312 lsb = low & -low;
11313
11314 if (-lsb != shift_mask_low)
11315 return 0;
11316
11317 if (HOST_BITS_PER_WIDE_INT < 64)
11318 high = ~high;
11319 low = ~low;
11320 low &= -lsb;
11321
11322 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11323 {
11324 lsb = high & -high;
11325 return high == -lsb;
11326 }
11327
11328 lsb = low & -low;
11329 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11330 }
11331 else
11332 return 0;
11333}
e2c953b6 11334
c5059423
AM
11335/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11336 to perform a left shift. It must have SHIFTOP or more least
c1207243 11337 significant 0's, with the remainder of the word 1's. */
e2c953b6 11338
c5059423 11339int
a2369ed3 11340includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11341{
e2c953b6 11342 if (GET_CODE (andop) == CONST_INT)
c5059423 11343 {
02071907 11344 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11345
02071907 11346 shift_mask = ~0;
c5059423
AM
11347 shift_mask <<= INTVAL (shiftop);
11348 c = INTVAL (andop);
11349
c1207243 11350 /* Find the least significant one bit. */
c5059423
AM
11351 lsb = c & -c;
11352
11353 /* It must be covered by the shift mask.
a4f6c312 11354 This test also rejects c == 0. */
c5059423
AM
11355 if ((lsb & shift_mask) == 0)
11356 return 0;
11357
11358 /* Check we have all 1's above the transition, and reject all 1's. */
11359 return c == -lsb && lsb != 1;
11360 }
11361 else if (GET_CODE (andop) == CONST_DOUBLE
11362 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11363 {
02071907 11364 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11365
11366 low = CONST_DOUBLE_LOW (andop);
11367
11368 if (HOST_BITS_PER_WIDE_INT < 64)
11369 {
02071907 11370 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11371
11372 high = CONST_DOUBLE_HIGH (andop);
11373
11374 if (low == 0)
11375 {
02071907 11376 shift_mask_high = ~0;
c5059423
AM
11377 if (INTVAL (shiftop) > 32)
11378 shift_mask_high <<= INTVAL (shiftop) - 32;
11379
11380 lsb = high & -high;
11381
11382 if ((lsb & shift_mask_high) == 0)
11383 return 0;
11384
11385 return high == -lsb;
11386 }
11387 if (high != ~0)
11388 return 0;
11389 }
11390
02071907 11391 shift_mask_low = ~0;
c5059423
AM
11392 shift_mask_low <<= INTVAL (shiftop);
11393
11394 lsb = low & -low;
11395
11396 if ((lsb & shift_mask_low) == 0)
11397 return 0;
11398
11399 return low == -lsb && lsb != 1;
11400 }
e2c953b6 11401 else
c5059423 11402 return 0;
9878760c 11403}
35068b43 11404
11ac38b2
DE
11405/* Return 1 if operands will generate a valid arguments to rlwimi
11406instruction for insert with right shift in 64-bit mode. The mask may
11407not start on the first bit or stop on the last bit because wrap-around
11408effects of instruction do not correspond to semantics of RTL insn. */
11409
11410int
11411insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11412{
429ec7dc
DE
11413 if (INTVAL (startop) > 32
11414 && INTVAL (startop) < 64
11415 && INTVAL (sizeop) > 1
11416 && INTVAL (sizeop) + INTVAL (startop) < 64
11417 && INTVAL (shiftop) > 0
11418 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11419 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11420 return 1;
11421
11422 return 0;
11423}
11424
35068b43 11425/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11426 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11427
11428int
a2369ed3 11429registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11430{
11431 /* We might have been passed a SUBREG. */
f676971a 11432 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11433 return 0;
f676971a 11434
90f81f99
AP
11435 /* We might have been passed non floating point registers. */
11436 if (!FP_REGNO_P (REGNO (reg1))
11437 || !FP_REGNO_P (REGNO (reg2)))
11438 return 0;
35068b43
RK
11439
11440 return (REGNO (reg1) == REGNO (reg2) - 1);
11441}
11442
a4f6c312
SS
11443/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11444 addr1 and addr2 must be in consecutive memory locations
11445 (addr2 == addr1 + 8). */
35068b43
RK
11446
11447int
90f81f99 11448mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11449{
90f81f99 11450 rtx addr1, addr2;
bb8df8a6
EC
11451 unsigned int reg1, reg2;
11452 int offset1, offset2;
35068b43 11453
90f81f99
AP
11454 /* The mems cannot be volatile. */
11455 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11456 return 0;
f676971a 11457
90f81f99
AP
11458 addr1 = XEXP (mem1, 0);
11459 addr2 = XEXP (mem2, 0);
11460
35068b43
RK
11461 /* Extract an offset (if used) from the first addr. */
11462 if (GET_CODE (addr1) == PLUS)
11463 {
11464 /* If not a REG, return zero. */
11465 if (GET_CODE (XEXP (addr1, 0)) != REG)
11466 return 0;
11467 else
11468 {
c4ad648e 11469 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11470 /* The offset must be constant! */
11471 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11472 return 0;
11473 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11474 }
11475 }
11476 else if (GET_CODE (addr1) != REG)
11477 return 0;
11478 else
11479 {
11480 reg1 = REGNO (addr1);
11481 /* This was a simple (mem (reg)) expression. Offset is 0. */
11482 offset1 = 0;
11483 }
11484
bb8df8a6
EC
11485 /* And now for the second addr. */
11486 if (GET_CODE (addr2) == PLUS)
11487 {
11488 /* If not a REG, return zero. */
11489 if (GET_CODE (XEXP (addr2, 0)) != REG)
11490 return 0;
11491 else
11492 {
11493 reg2 = REGNO (XEXP (addr2, 0));
11494 /* The offset must be constant. */
11495 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11496 return 0;
11497 offset2 = INTVAL (XEXP (addr2, 1));
11498 }
11499 }
11500 else if (GET_CODE (addr2) != REG)
35068b43 11501 return 0;
bb8df8a6
EC
11502 else
11503 {
11504 reg2 = REGNO (addr2);
11505 /* This was a simple (mem (reg)) expression. Offset is 0. */
11506 offset2 = 0;
11507 }
35068b43 11508
bb8df8a6
EC
11509 /* Both of these must have the same base register. */
11510 if (reg1 != reg2)
35068b43
RK
11511 return 0;
11512
11513 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11514 if (offset2 != offset1 + 8)
35068b43
RK
11515 return 0;
11516
11517 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11518 instructions. */
11519 return 1;
11520}
9878760c 11521\f
e41b2a33
PB
11522
11523rtx
11524rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11525{
11526 static bool eliminated = false;
11527 if (mode != SDmode)
11528 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11529 else
11530 {
11531 rtx mem = cfun->machine->sdmode_stack_slot;
11532 gcc_assert (mem != NULL_RTX);
11533
11534 if (!eliminated)
11535 {
11536 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11537 cfun->machine->sdmode_stack_slot = mem;
11538 eliminated = true;
11539 }
11540 return mem;
11541 }
11542}
11543
11544static tree
11545rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11546{
11547 /* Don't walk into types. */
11548 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11549 {
11550 *walk_subtrees = 0;
11551 return NULL_TREE;
11552 }
11553
11554 switch (TREE_CODE (*tp))
11555 {
11556 case VAR_DECL:
11557 case PARM_DECL:
11558 case FIELD_DECL:
11559 case RESULT_DECL:
11560 case REAL_CST:
fdf4f148 11561 case INDIRECT_REF:
a0f39282
JJ
11562 case ALIGN_INDIRECT_REF:
11563 case MISALIGNED_INDIRECT_REF:
fdf4f148 11564 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11565 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11566 return *tp;
11567 break;
11568 default:
11569 break;
11570 }
11571
11572 return NULL_TREE;
11573}
11574
11575
11576/* Allocate a 64-bit stack slot to be used for copying SDmode
11577 values through if this function has any SDmode references. */
11578
11579static void
11580rs6000_alloc_sdmode_stack_slot (void)
11581{
11582 tree t;
11583 basic_block bb;
726a989a 11584 gimple_stmt_iterator gsi;
e41b2a33
PB
11585
11586 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11587
11588 FOR_EACH_BB (bb)
726a989a 11589 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e41b2a33 11590 {
726a989a 11591 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
e41b2a33
PB
11592 if (ret)
11593 {
11594 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11595 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11596 SDmode, 0);
11597 return;
11598 }
11599 }
11600
11601 /* Check for any SDmode parameters of the function. */
11602 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11603 {
11604 if (TREE_TYPE (t) == error_mark_node)
11605 continue;
11606
11607 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11608 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11609 {
11610 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11611 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11612 SDmode, 0);
11613 return;
11614 }
11615 }
11616}
11617
11618static void
11619rs6000_instantiate_decls (void)
11620{
11621 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11622 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11623}
11624
9878760c 11625/* Return the register class of a scratch register needed to copy IN into
0a2aaacc 11626 or out of a register in RCLASS in MODE. If it can be done directly,
9878760c
RK
11627 NO_REGS is returned. */
11628
11629enum reg_class
0a2aaacc 11630rs6000_secondary_reload_class (enum reg_class rclass,
3c4774e0
R
11631 enum machine_mode mode ATTRIBUTE_UNUSED,
11632 rtx in)
9878760c 11633{
5accd822 11634 int regno;
9878760c 11635
ab82a49f
AP
11636 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11637#if TARGET_MACHO
c4ad648e 11638 && MACHOPIC_INDIRECT
ab82a49f 11639#endif
c4ad648e 11640 ))
46fad5b7
DJ
11641 {
11642 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11643 other than BASE_REGS for TARGET_ELF. So indicate that a
11644 register from BASE_REGS is needed as an intermediate
11645 register.
f676971a 11646
46fad5b7
DJ
11647 On Darwin, pic addresses require a load from memory, which
11648 needs a base register. */
0a2aaacc 11649 if (rclass != BASE_REGS
c4ad648e
AM
11650 && (GET_CODE (in) == SYMBOL_REF
11651 || GET_CODE (in) == HIGH
11652 || GET_CODE (in) == LABEL_REF
11653 || GET_CODE (in) == CONST))
11654 return BASE_REGS;
46fad5b7 11655 }
e7b7998a 11656
5accd822
DE
11657 if (GET_CODE (in) == REG)
11658 {
11659 regno = REGNO (in);
11660 if (regno >= FIRST_PSEUDO_REGISTER)
11661 {
11662 regno = true_regnum (in);
11663 if (regno >= FIRST_PSEUDO_REGISTER)
11664 regno = -1;
11665 }
11666 }
11667 else if (GET_CODE (in) == SUBREG)
11668 {
11669 regno = true_regnum (in);
11670 if (regno >= FIRST_PSEUDO_REGISTER)
11671 regno = -1;
11672 }
11673 else
11674 regno = -1;
11675
9878760c
RK
11676 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11677 into anything. */
0a2aaacc 11678 if (rclass == GENERAL_REGS || rclass == BASE_REGS
9878760c
RK
11679 || (regno >= 0 && INT_REGNO_P (regno)))
11680 return NO_REGS;
11681
11682 /* Constants, memory, and FP registers can go into FP registers. */
11683 if ((regno == -1 || FP_REGNO_P (regno))
0a2aaacc 11684 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
e41b2a33 11685 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11686
0ac081f6
AH
11687 /* Memory, and AltiVec registers can go into AltiVec registers. */
11688 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
0a2aaacc 11689 && rclass == ALTIVEC_REGS)
0ac081f6
AH
11690 return NO_REGS;
11691
9878760c 11692 /* We can copy among the CR registers. */
0a2aaacc 11693 if ((rclass == CR_REGS || rclass == CR0_REGS)
9878760c
RK
11694 && regno >= 0 && CR_REGNO_P (regno))
11695 return NO_REGS;
11696
11697 /* Otherwise, we need GENERAL_REGS. */
11698 return GENERAL_REGS;
11699}
11700\f
11701/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11702 know this is a valid comparison.
9878760c
RK
11703
11704 SCC_P is 1 if this is for an scc. That means that %D will have been
11705 used instead of %C, so the bits will be in different places.
11706
b4ac57ab 11707 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11708
11709int
a2369ed3 11710ccr_bit (rtx op, int scc_p)
9878760c
RK
11711{
11712 enum rtx_code code = GET_CODE (op);
11713 enum machine_mode cc_mode;
11714 int cc_regnum;
11715 int base_bit;
9ebbca7d 11716 rtx reg;
9878760c 11717
ec8e098d 11718 if (!COMPARISON_P (op))
9878760c
RK
11719 return -1;
11720
9ebbca7d
GK
11721 reg = XEXP (op, 0);
11722
37409796 11723 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11724
11725 cc_mode = GET_MODE (reg);
11726 cc_regnum = REGNO (reg);
11727 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11728
39a10a29 11729 validate_condition_mode (code, cc_mode);
c5defebb 11730
b7053a3f
GK
11731 /* When generating a sCOND operation, only positive conditions are
11732 allowed. */
37409796
NS
11733 gcc_assert (!scc_p
11734 || code == EQ || code == GT || code == LT || code == UNORDERED
11735 || code == GTU || code == LTU);
f676971a 11736
9878760c
RK
11737 switch (code)
11738 {
11739 case NE:
11740 return scc_p ? base_bit + 3 : base_bit + 2;
11741 case EQ:
11742 return base_bit + 2;
1c882ea4 11743 case GT: case GTU: case UNLE:
9878760c 11744 return base_bit + 1;
1c882ea4 11745 case LT: case LTU: case UNGE:
9878760c 11746 return base_bit;
1c882ea4
GK
11747 case ORDERED: case UNORDERED:
11748 return base_bit + 3;
9878760c
RK
11749
11750 case GE: case GEU:
39a10a29 11751 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11752 unordered position. So test that bit. For integer, this is ! LT
11753 unless this is an scc insn. */
39a10a29 11754 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11755
11756 case LE: case LEU:
39a10a29 11757 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11758
9878760c 11759 default:
37409796 11760 gcc_unreachable ();
9878760c
RK
11761 }
11762}
1ff7789b 11763\f
8d30c4ee 11764/* Return the GOT register. */
1ff7789b 11765
9390387d 11766rtx
a2369ed3 11767rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11768{
a4f6c312
SS
11769 /* The second flow pass currently (June 1999) can't update
11770 regs_ever_live without disturbing other parts of the compiler, so
11771 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11772 if (!can_create_pseudo_p ()
11773 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11774 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11775
e3b5732b 11776 crtl->uses_pic_offset_table = 1;
3cb999d8 11777
1ff7789b
MM
11778 return pic_offset_table_rtx;
11779}
a7df97e6 11780\f
e2500fed
GK
11781/* Function to init struct machine_function.
11782 This will be called, via a pointer variable,
11783 from push_function_context. */
a7df97e6 11784
e2500fed 11785static struct machine_function *
863d938c 11786rs6000_init_machine_status (void)
a7df97e6 11787{
5ead67f6 11788 return GGC_CNEW (machine_function);
a7df97e6 11789}
9878760c 11790\f
0ba1b2ff
AM
11791/* These macros test for integers and extract the low-order bits. */
11792#define INT_P(X) \
11793((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11794 && GET_MODE (X) == VOIDmode)
11795
11796#define INT_LOWPART(X) \
11797 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11798
11799int
a2369ed3 11800extract_MB (rtx op)
0ba1b2ff
AM
11801{
11802 int i;
11803 unsigned long val = INT_LOWPART (op);
11804
11805 /* If the high bit is zero, the value is the first 1 bit we find
11806 from the left. */
11807 if ((val & 0x80000000) == 0)
11808 {
37409796 11809 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11810
11811 i = 1;
11812 while (((val <<= 1) & 0x80000000) == 0)
11813 ++i;
11814 return i;
11815 }
11816
11817 /* If the high bit is set and the low bit is not, or the mask is all
11818 1's, the value is zero. */
11819 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11820 return 0;
11821
11822 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11823 from the right. */
11824 i = 31;
11825 while (((val >>= 1) & 1) != 0)
11826 --i;
11827
11828 return i;
11829}
11830
11831int
a2369ed3 11832extract_ME (rtx op)
0ba1b2ff
AM
11833{
11834 int i;
11835 unsigned long val = INT_LOWPART (op);
11836
11837 /* If the low bit is zero, the value is the first 1 bit we find from
11838 the right. */
11839 if ((val & 1) == 0)
11840 {
37409796 11841 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11842
11843 i = 30;
11844 while (((val >>= 1) & 1) == 0)
11845 --i;
11846
11847 return i;
11848 }
11849
11850 /* If the low bit is set and the high bit is not, or the mask is all
11851 1's, the value is 31. */
11852 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11853 return 31;
11854
11855 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11856 from the left. */
11857 i = 0;
11858 while (((val <<= 1) & 0x80000000) != 0)
11859 ++i;
11860
11861 return i;
11862}
11863
c4501e62
JJ
11864/* Locate some local-dynamic symbol still in use by this function
11865 so that we can print its name in some tls_ld pattern. */
11866
11867static const char *
863d938c 11868rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11869{
11870 rtx insn;
11871
11872 if (cfun->machine->some_ld_name)
11873 return cfun->machine->some_ld_name;
11874
11875 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11876 if (INSN_P (insn)
11877 && for_each_rtx (&PATTERN (insn),
11878 rs6000_get_some_local_dynamic_name_1, 0))
11879 return cfun->machine->some_ld_name;
11880
37409796 11881 gcc_unreachable ();
c4501e62
JJ
11882}
11883
11884/* Helper function for rs6000_get_some_local_dynamic_name. */
11885
11886static int
a2369ed3 11887rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11888{
11889 rtx x = *px;
11890
11891 if (GET_CODE (x) == SYMBOL_REF)
11892 {
11893 const char *str = XSTR (x, 0);
11894 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11895 {
11896 cfun->machine->some_ld_name = str;
11897 return 1;
11898 }
11899 }
11900
11901 return 0;
11902}
11903
85b776df
AM
11904/* Write out a function code label. */
11905
11906void
11907rs6000_output_function_entry (FILE *file, const char *fname)
11908{
11909 if (fname[0] != '.')
11910 {
11911 switch (DEFAULT_ABI)
11912 {
11913 default:
37409796 11914 gcc_unreachable ();
85b776df
AM
11915
11916 case ABI_AIX:
11917 if (DOT_SYMBOLS)
11918 putc ('.', file);
11919 else
11920 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11921 break;
11922
11923 case ABI_V4:
11924 case ABI_DARWIN:
11925 break;
11926 }
11927 }
11928 if (TARGET_AIX)
11929 RS6000_OUTPUT_BASENAME (file, fname);
11930 else
11931 assemble_name (file, fname);
11932}
11933
9878760c
RK
11934/* Print an operand. Recognize special options, documented below. */
11935
38c1f2d7 11936#if TARGET_ELF
d9407988 11937#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11938#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11939#else
11940#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11941#define SMALL_DATA_REG 0
ba5e43aa
MM
11942#endif
11943
9878760c 11944void
a2369ed3 11945print_operand (FILE *file, rtx x, int code)
9878760c
RK
11946{
11947 int i;
a260abc9 11948 HOST_WIDE_INT val;
0ba1b2ff 11949 unsigned HOST_WIDE_INT uval;
9878760c
RK
11950
11951 switch (code)
11952 {
a8b3aeda 11953 case '.':
a85d226b
RK
11954 /* Write out an instruction after the call which may be replaced
11955 with glue code by the loader. This depends on the AIX version. */
11956 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11957 return;
11958
81eace42
GK
11959 /* %a is output_address. */
11960
9854d9ed
RK
11961 case 'A':
11962 /* If X is a constant integer whose low-order 5 bits are zero,
11963 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11964 in the AIX assembler where "sri" with a zero shift count
20e26713 11965 writes a trash instruction. */
9854d9ed 11966 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11967 putc ('l', file);
9854d9ed 11968 else
76229ac8 11969 putc ('r', file);
9854d9ed
RK
11970 return;
11971
11972 case 'b':
e2c953b6
DE
11973 /* If constant, low-order 16 bits of constant, unsigned.
11974 Otherwise, write normally. */
11975 if (INT_P (x))
11976 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11977 else
11978 print_operand (file, x, 0);
cad12a8d
RK
11979 return;
11980
a260abc9
DE
11981 case 'B':
11982 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11983 for 64-bit mask direction. */
9390387d 11984 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11985 return;
a260abc9 11986
81eace42
GK
11987 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11988 output_operand. */
11989
423c1189
AH
11990 case 'c':
11991 /* X is a CR register. Print the number of the GT bit of the CR. */
11992 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11993 output_operand_lossage ("invalid %%E value");
11994 else
11995 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11996 return;
11997
11998 case 'D':
cef6b86c 11999 /* Like 'J' but get to the GT bit only. */
37409796 12000 gcc_assert (GET_CODE (x) == REG);
423c1189 12001
cef6b86c
EB
12002 /* Bit 1 is GT bit. */
12003 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 12004
cef6b86c
EB
12005 /* Add one for shift count in rlinm for scc. */
12006 fprintf (file, "%d", i + 1);
423c1189
AH
12007 return;
12008
9854d9ed 12009 case 'E':
39a10a29 12010 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
12011 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12012 output_operand_lossage ("invalid %%E value");
78fbdbf7 12013 else
39a10a29 12014 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 12015 return;
9854d9ed
RK
12016
12017 case 'f':
12018 /* X is a CR register. Print the shift count needed to move it
12019 to the high-order four bits. */
12020 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12021 output_operand_lossage ("invalid %%f value");
12022 else
9ebbca7d 12023 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12024 return;
12025
12026 case 'F':
12027 /* Similar, but print the count for the rotate in the opposite
12028 direction. */
12029 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12030 output_operand_lossage ("invalid %%F value");
12031 else
9ebbca7d 12032 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12033 return;
12034
12035 case 'G':
12036 /* X is a constant integer. If it is negative, print "m",
43aa4e05 12037 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
12038 if (GET_CODE (x) != CONST_INT)
12039 output_operand_lossage ("invalid %%G value");
12040 else if (INTVAL (x) >= 0)
76229ac8 12041 putc ('z', file);
9854d9ed 12042 else
76229ac8 12043 putc ('m', file);
9854d9ed 12044 return;
e2c953b6 12045
9878760c 12046 case 'h':
a4f6c312
SS
12047 /* If constant, output low-order five bits. Otherwise, write
12048 normally. */
9878760c 12049 if (INT_P (x))
5f59ecb7 12050 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
12051 else
12052 print_operand (file, x, 0);
12053 return;
12054
64305719 12055 case 'H':
a4f6c312
SS
12056 /* If constant, output low-order six bits. Otherwise, write
12057 normally. */
64305719 12058 if (INT_P (x))
5f59ecb7 12059 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
12060 else
12061 print_operand (file, x, 0);
12062 return;
12063
9854d9ed
RK
12064 case 'I':
12065 /* Print `i' if this is a constant, else nothing. */
9878760c 12066 if (INT_P (x))
76229ac8 12067 putc ('i', file);
9878760c
RK
12068 return;
12069
9854d9ed
RK
12070 case 'j':
12071 /* Write the bit number in CCR for jump. */
12072 i = ccr_bit (x, 0);
12073 if (i == -1)
12074 output_operand_lossage ("invalid %%j code");
9878760c 12075 else
9854d9ed 12076 fprintf (file, "%d", i);
9878760c
RK
12077 return;
12078
9854d9ed
RK
12079 case 'J':
12080 /* Similar, but add one for shift count in rlinm for scc and pass
12081 scc flag to `ccr_bit'. */
12082 i = ccr_bit (x, 1);
12083 if (i == -1)
12084 output_operand_lossage ("invalid %%J code");
12085 else
a0466a68
RK
12086 /* If we want bit 31, write a shift count of zero, not 32. */
12087 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
12088 return;
12089
9854d9ed
RK
12090 case 'k':
12091 /* X must be a constant. Write the 1's complement of the
12092 constant. */
9878760c 12093 if (! INT_P (x))
9854d9ed 12094 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
12095 else
12096 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
12097 return;
12098
81eace42 12099 case 'K':
9ebbca7d
GK
12100 /* X must be a symbolic constant on ELF. Write an
12101 expression suitable for an 'addi' that adds in the low 16
12102 bits of the MEM. */
12103 if (GET_CODE (x) != CONST)
12104 {
12105 print_operand_address (file, x);
12106 fputs ("@l", file);
12107 }
12108 else
12109 {
12110 if (GET_CODE (XEXP (x, 0)) != PLUS
12111 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
12112 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
12113 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 12114 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
12115 print_operand_address (file, XEXP (XEXP (x, 0), 0));
12116 fputs ("@l", file);
ed8d2920
MM
12117 /* For GNU as, there must be a non-alphanumeric character
12118 between 'l' and the number. The '-' is added by
12119 print_operand() already. */
12120 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
12121 fputs ("+", file);
9ebbca7d
GK
12122 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
12123 }
81eace42
GK
12124 return;
12125
12126 /* %l is output_asm_label. */
9ebbca7d 12127
9854d9ed
RK
12128 case 'L':
12129 /* Write second word of DImode or DFmode reference. Works on register
12130 or non-indexed memory only. */
12131 if (GET_CODE (x) == REG)
fb5c67a7 12132 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
12133 else if (GET_CODE (x) == MEM)
12134 {
12135 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 12136 we have already done it, we can just use an offset of word. */
9854d9ed
RK
12137 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12138 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
12139 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12140 UNITS_PER_WORD));
6fb5fa3c
DB
12141 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12142 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12143 UNITS_PER_WORD));
9854d9ed 12144 else
d7624dc0
RK
12145 output_address (XEXP (adjust_address_nv (x, SImode,
12146 UNITS_PER_WORD),
12147 0));
ed8908e7 12148
ba5e43aa 12149 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12150 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12151 reg_names[SMALL_DATA_REG]);
9854d9ed 12152 }
9878760c 12153 return;
f676971a 12154
9878760c
RK
12155 case 'm':
12156 /* MB value for a mask operand. */
b1765bde 12157 if (! mask_operand (x, SImode))
9878760c
RK
12158 output_operand_lossage ("invalid %%m value");
12159
0ba1b2ff 12160 fprintf (file, "%d", extract_MB (x));
9878760c
RK
12161 return;
12162
12163 case 'M':
12164 /* ME value for a mask operand. */
b1765bde 12165 if (! mask_operand (x, SImode))
a260abc9 12166 output_operand_lossage ("invalid %%M value");
9878760c 12167
0ba1b2ff 12168 fprintf (file, "%d", extract_ME (x));
9878760c
RK
12169 return;
12170
81eace42
GK
12171 /* %n outputs the negative of its operand. */
12172
9878760c
RK
12173 case 'N':
12174 /* Write the number of elements in the vector times 4. */
12175 if (GET_CODE (x) != PARALLEL)
12176 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
12177 else
12178 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
12179 return;
12180
12181 case 'O':
12182 /* Similar, but subtract 1 first. */
12183 if (GET_CODE (x) != PARALLEL)
1427100a 12184 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
12185 else
12186 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
12187 return;
12188
9854d9ed
RK
12189 case 'p':
12190 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12191 if (! INT_P (x)
2bfcf297 12192 || INT_LOWPART (x) < 0
9854d9ed
RK
12193 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12194 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
12195 else
12196 fprintf (file, "%d", i);
9854d9ed
RK
12197 return;
12198
9878760c
RK
12199 case 'P':
12200 /* The operand must be an indirect memory reference. The result
8bb418a3 12201 is the register name. */
9878760c
RK
12202 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12203 || REGNO (XEXP (x, 0)) >= 32)
12204 output_operand_lossage ("invalid %%P value");
e2c953b6 12205 else
fb5c67a7 12206 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
12207 return;
12208
dfbdccdb
GK
12209 case 'q':
12210 /* This outputs the logical code corresponding to a boolean
12211 expression. The expression may have one or both operands
39a10a29 12212 negated (if one, only the first one). For condition register
c4ad648e
AM
12213 logical operations, it will also treat the negated
12214 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 12215 {
63bc1d05 12216 const char *const *t = 0;
dfbdccdb
GK
12217 const char *s;
12218 enum rtx_code code = GET_CODE (x);
12219 static const char * const tbl[3][3] = {
12220 { "and", "andc", "nor" },
12221 { "or", "orc", "nand" },
12222 { "xor", "eqv", "xor" } };
12223
12224 if (code == AND)
12225 t = tbl[0];
12226 else if (code == IOR)
12227 t = tbl[1];
12228 else if (code == XOR)
12229 t = tbl[2];
12230 else
12231 output_operand_lossage ("invalid %%q value");
12232
12233 if (GET_CODE (XEXP (x, 0)) != NOT)
12234 s = t[0];
12235 else
12236 {
12237 if (GET_CODE (XEXP (x, 1)) == NOT)
12238 s = t[2];
12239 else
12240 s = t[1];
12241 }
f676971a 12242
dfbdccdb
GK
12243 fputs (s, file);
12244 }
12245 return;
12246
2c4a9cff
DE
12247 case 'Q':
12248 if (TARGET_MFCRF)
3b6ce0af 12249 fputc (',', file);
5efb1046 12250 /* FALLTHRU */
2c4a9cff
DE
12251 else
12252 return;
12253
9854d9ed
RK
12254 case 'R':
12255 /* X is a CR register. Print the mask for `mtcrf'. */
12256 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12257 output_operand_lossage ("invalid %%R value");
12258 else
9ebbca7d 12259 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12260 return;
9854d9ed
RK
12261
12262 case 's':
12263 /* Low 5 bits of 32 - value */
12264 if (! INT_P (x))
12265 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12266 else
12267 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12268 return;
9854d9ed 12269
a260abc9 12270 case 'S':
0ba1b2ff 12271 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12272 CONST_INT 32-bit mask is considered sign-extended so any
12273 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12274 if (! mask64_operand (x, DImode))
a260abc9
DE
12275 output_operand_lossage ("invalid %%S value");
12276
0ba1b2ff 12277 uval = INT_LOWPART (x);
a260abc9 12278
0ba1b2ff 12279 if (uval & 1) /* Clear Left */
a260abc9 12280 {
f099d360
GK
12281#if HOST_BITS_PER_WIDE_INT > 64
12282 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12283#endif
0ba1b2ff 12284 i = 64;
a260abc9 12285 }
0ba1b2ff 12286 else /* Clear Right */
a260abc9 12287 {
0ba1b2ff 12288 uval = ~uval;
f099d360
GK
12289#if HOST_BITS_PER_WIDE_INT > 64
12290 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12291#endif
0ba1b2ff 12292 i = 63;
a260abc9 12293 }
0ba1b2ff
AM
12294 while (uval != 0)
12295 --i, uval >>= 1;
37409796 12296 gcc_assert (i >= 0);
0ba1b2ff
AM
12297 fprintf (file, "%d", i);
12298 return;
a260abc9 12299
a3170dc6
AH
12300 case 't':
12301 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12302 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12303
12304 /* Bit 3 is OV bit. */
12305 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12306
12307 /* If we want bit 31, write a shift count of zero, not 32. */
12308 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12309 return;
12310
cccf3bdc
DE
12311 case 'T':
12312 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12313 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12314 && REGNO (x) != CTR_REGNO))
cccf3bdc 12315 output_operand_lossage ("invalid %%T value");
1de43f85 12316 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12317 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12318 else
12319 fputs ("ctr", file);
12320 return;
12321
9854d9ed 12322 case 'u':
802a0058 12323 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12324 if (! INT_P (x))
12325 output_operand_lossage ("invalid %%u value");
e2c953b6 12326 else
f676971a 12327 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12328 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12329 return;
12330
802a0058
MM
12331 case 'v':
12332 /* High-order 16 bits of constant for use in signed operand. */
12333 if (! INT_P (x))
12334 output_operand_lossage ("invalid %%v value");
e2c953b6 12335 else
134c32f6
DE
12336 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12337 (INT_LOWPART (x) >> 16) & 0xffff);
12338 return;
802a0058 12339
9854d9ed
RK
12340 case 'U':
12341 /* Print `u' if this has an auto-increment or auto-decrement. */
12342 if (GET_CODE (x) == MEM
12343 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12344 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12345 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12346 putc ('u', file);
9854d9ed 12347 return;
9878760c 12348
e0cd0770
JC
12349 case 'V':
12350 /* Print the trap code for this operand. */
12351 switch (GET_CODE (x))
12352 {
12353 case EQ:
12354 fputs ("eq", file); /* 4 */
12355 break;
12356 case NE:
12357 fputs ("ne", file); /* 24 */
12358 break;
12359 case LT:
12360 fputs ("lt", file); /* 16 */
12361 break;
12362 case LE:
12363 fputs ("le", file); /* 20 */
12364 break;
12365 case GT:
12366 fputs ("gt", file); /* 8 */
12367 break;
12368 case GE:
12369 fputs ("ge", file); /* 12 */
12370 break;
12371 case LTU:
12372 fputs ("llt", file); /* 2 */
12373 break;
12374 case LEU:
12375 fputs ("lle", file); /* 6 */
12376 break;
12377 case GTU:
12378 fputs ("lgt", file); /* 1 */
12379 break;
12380 case GEU:
12381 fputs ("lge", file); /* 5 */
12382 break;
12383 default:
37409796 12384 gcc_unreachable ();
e0cd0770
JC
12385 }
12386 break;
12387
9854d9ed
RK
12388 case 'w':
12389 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12390 normally. */
12391 if (INT_P (x))
f676971a 12392 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12393 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12394 else
12395 print_operand (file, x, 0);
9878760c
RK
12396 return;
12397
9854d9ed 12398 case 'W':
e2c953b6 12399 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12400 val = (GET_CODE (x) == CONST_INT
12401 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12402
12403 if (val < 0)
12404 i = -1;
9854d9ed 12405 else
e2c953b6
DE
12406 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12407 if ((val <<= 1) < 0)
12408 break;
12409
12410#if HOST_BITS_PER_WIDE_INT == 32
12411 if (GET_CODE (x) == CONST_INT && i >= 0)
12412 i += 32; /* zero-extend high-part was all 0's */
12413 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12414 {
12415 val = CONST_DOUBLE_LOW (x);
12416
37409796
NS
12417 gcc_assert (val);
12418 if (val < 0)
e2c953b6
DE
12419 --i;
12420 else
12421 for ( ; i < 64; i++)
12422 if ((val <<= 1) < 0)
12423 break;
12424 }
12425#endif
12426
12427 fprintf (file, "%d", i + 1);
9854d9ed 12428 return;
9878760c 12429
9854d9ed
RK
12430 case 'X':
12431 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12432 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12433 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12434 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12435 putc ('x', file);
9854d9ed 12436 return;
9878760c 12437
9854d9ed
RK
12438 case 'Y':
12439 /* Like 'L', for third word of TImode */
12440 if (GET_CODE (x) == REG)
fb5c67a7 12441 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12442 else if (GET_CODE (x) == MEM)
9878760c 12443 {
9854d9ed
RK
12444 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12445 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12446 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12447 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12448 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12449 else
d7624dc0 12450 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12451 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12452 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12453 reg_names[SMALL_DATA_REG]);
9878760c
RK
12454 }
12455 return;
f676971a 12456
9878760c 12457 case 'z':
b4ac57ab
RS
12458 /* X is a SYMBOL_REF. Write out the name preceded by a
12459 period and without any trailing data in brackets. Used for function
4d30c363
MM
12460 names. If we are configured for System V (or the embedded ABI) on
12461 the PowerPC, do not emit the period, since those systems do not use
12462 TOCs and the like. */
37409796 12463 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12464
c4ad648e
AM
12465 /* Mark the decl as referenced so that cgraph will output the
12466 function. */
9bf6462a 12467 if (SYMBOL_REF_DECL (x))
c4ad648e 12468 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12469
85b776df 12470 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12471 if (TARGET_MACHO)
12472 {
12473 const char *name = XSTR (x, 0);
a031e781 12474#if TARGET_MACHO
3b48085e 12475 if (MACHOPIC_INDIRECT
11abc112
MM
12476 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12477 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12478#endif
12479 assemble_name (file, name);
12480 }
85b776df 12481 else if (!DOT_SYMBOLS)
9739c90c 12482 assemble_name (file, XSTR (x, 0));
85b776df
AM
12483 else
12484 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12485 return;
12486
9854d9ed
RK
12487 case 'Z':
12488 /* Like 'L', for last word of TImode. */
12489 if (GET_CODE (x) == REG)
fb5c67a7 12490 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12491 else if (GET_CODE (x) == MEM)
12492 {
12493 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12494 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12495 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12496 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12497 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12498 else
d7624dc0 12499 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12500 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12501 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12502 reg_names[SMALL_DATA_REG]);
9854d9ed 12503 }
5c23c401 12504 return;
0ac081f6 12505
a3170dc6 12506 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12507 case 'y':
12508 {
12509 rtx tmp;
12510
37409796 12511 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12512
12513 tmp = XEXP (x, 0);
12514
90d3ff1c 12515 /* Ugly hack because %y is overloaded. */
8ef65e3d 12516 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12517 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12518 || GET_MODE (x) == TFmode
12519 || GET_MODE (x) == TImode))
a3170dc6
AH
12520 {
12521 /* Handle [reg]. */
12522 if (GET_CODE (tmp) == REG)
12523 {
12524 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12525 break;
12526 }
12527 /* Handle [reg+UIMM]. */
12528 else if (GET_CODE (tmp) == PLUS &&
12529 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12530 {
12531 int x;
12532
37409796 12533 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12534
12535 x = INTVAL (XEXP (tmp, 1));
12536 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12537 break;
12538 }
12539
12540 /* Fall through. Must be [reg+reg]. */
12541 }
850e8d3d
DN
12542 if (TARGET_ALTIVEC
12543 && GET_CODE (tmp) == AND
12544 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12545 && INTVAL (XEXP (tmp, 1)) == -16)
12546 tmp = XEXP (tmp, 0);
0ac081f6 12547 if (GET_CODE (tmp) == REG)
c62f2db5 12548 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12549 else
0ac081f6 12550 {
cb8cc791
AP
12551 if (!GET_CODE (tmp) == PLUS
12552 || !REG_P (XEXP (tmp, 0))
12553 || !REG_P (XEXP (tmp, 1)))
12554 {
12555 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12556 break;
12557 }
bb8df8a6 12558
0ac081f6
AH
12559 if (REGNO (XEXP (tmp, 0)) == 0)
12560 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12561 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12562 else
12563 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12564 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12565 }
0ac081f6
AH
12566 break;
12567 }
f676971a 12568
9878760c
RK
12569 case 0:
12570 if (GET_CODE (x) == REG)
12571 fprintf (file, "%s", reg_names[REGNO (x)]);
12572 else if (GET_CODE (x) == MEM)
12573 {
12574 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12575 know the width from the mode. */
12576 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12577 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12578 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12579 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12580 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12581 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12582 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12583 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12584 else
a54d04b7 12585 output_address (XEXP (x, 0));
9878760c
RK
12586 }
12587 else
a54d04b7 12588 output_addr_const (file, x);
a85d226b 12589 return;
9878760c 12590
c4501e62
JJ
12591 case '&':
12592 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12593 return;
12594
9878760c
RK
12595 default:
12596 output_operand_lossage ("invalid %%xn code");
12597 }
12598}
12599\f
12600/* Print the address of an operand. */
12601
12602void
a2369ed3 12603print_operand_address (FILE *file, rtx x)
9878760c
RK
12604{
12605 if (GET_CODE (x) == REG)
4697a36c 12606 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12607 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12608 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12609 {
12610 output_addr_const (file, x);
ba5e43aa 12611 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12612 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12613 reg_names[SMALL_DATA_REG]);
37409796
NS
12614 else
12615 gcc_assert (!TARGET_TOC);
9878760c
RK
12616 }
12617 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12618 {
9024f4b8 12619 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12620 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12621 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12622 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12623 else
4697a36c
MM
12624 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12625 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12626 }
12627 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12628 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12629 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12630#if TARGET_ELF
12631 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12632 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12633 {
12634 output_addr_const (file, XEXP (x, 1));
12635 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12636 }
c859cda6
DJ
12637#endif
12638#if TARGET_MACHO
12639 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12640 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12641 {
12642 fprintf (file, "lo16(");
12643 output_addr_const (file, XEXP (x, 1));
12644 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12645 }
3cb999d8 12646#endif
4d588c14 12647 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12648 {
2e4316da 12649 output_addr_const (file, XEXP (x, 1));
9ebbca7d
GK
12650 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12651 }
9878760c 12652 else
37409796 12653 gcc_unreachable ();
9878760c
RK
12654}
12655\f
2e4316da
RS
12656/* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */
12657
12658bool
12659rs6000_output_addr_const_extra (FILE *file, rtx x)
12660{
12661 if (GET_CODE (x) == UNSPEC)
12662 switch (XINT (x, 1))
12663 {
12664 case UNSPEC_TOCREL:
12665 x = XVECEXP (x, 0, 0);
12666 gcc_assert (GET_CODE (x) == SYMBOL_REF);
12667 output_addr_const (file, x);
12668 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
12669 {
12670 putc ('-', file);
12671 assemble_name (file, toc_label_name);
12672 }
12673 else if (TARGET_ELF)
12674 fputs ("@toc", file);
12675 return true;
08a6a74b
RS
12676
12677#if TARGET_MACHO
12678 case UNSPEC_MACHOPIC_OFFSET:
12679 output_addr_const (file, XVECEXP (x, 0, 0));
12680 putc ('-', file);
12681 machopic_output_function_base_name (file);
12682 return true;
12683#endif
2e4316da
RS
12684 }
12685 return false;
12686}
12687\f
88cad84b 12688/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12689 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12690 is defined. It also needs to handle DI-mode objects on 64-bit
12691 targets. */
12692
12693static bool
a2369ed3 12694rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12695{
f4f4921e 12696#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12697 /* Special handling for SI values. */
84dcde01 12698 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12699 {
301d03af 12700 static int recurse = 0;
f676971a 12701
301d03af
RS
12702 /* For -mrelocatable, we mark all addresses that need to be fixed up
12703 in the .fixup section. */
12704 if (TARGET_RELOCATABLE
d6b5193b
RS
12705 && in_section != toc_section
12706 && in_section != text_section
4325ca90 12707 && !unlikely_text_section_p (in_section)
301d03af
RS
12708 && !recurse
12709 && GET_CODE (x) != CONST_INT
12710 && GET_CODE (x) != CONST_DOUBLE
12711 && CONSTANT_P (x))
12712 {
12713 char buf[256];
12714
12715 recurse = 1;
12716 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12717 fixuplabelno++;
12718 ASM_OUTPUT_LABEL (asm_out_file, buf);
12719 fprintf (asm_out_file, "\t.long\t(");
12720 output_addr_const (asm_out_file, x);
12721 fprintf (asm_out_file, ")@fixup\n");
12722 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12723 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12724 fprintf (asm_out_file, "\t.long\t");
12725 assemble_name (asm_out_file, buf);
12726 fprintf (asm_out_file, "\n\t.previous\n");
12727 recurse = 0;
12728 return true;
12729 }
12730 /* Remove initial .'s to turn a -mcall-aixdesc function
12731 address into the address of the descriptor, not the function
12732 itself. */
12733 else if (GET_CODE (x) == SYMBOL_REF
12734 && XSTR (x, 0)[0] == '.'
12735 && DEFAULT_ABI == ABI_AIX)
12736 {
12737 const char *name = XSTR (x, 0);
12738 while (*name == '.')
12739 name++;
12740
12741 fprintf (asm_out_file, "\t.long\t%s\n", name);
12742 return true;
12743 }
12744 }
f4f4921e 12745#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12746 return default_assemble_integer (x, size, aligned_p);
12747}
93638d7a
AM
12748
12749#ifdef HAVE_GAS_HIDDEN
12750/* Emit an assembler directive to set symbol visibility for DECL to
12751 VISIBILITY_TYPE. */
12752
5add3202 12753static void
a2369ed3 12754rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12755{
93638d7a
AM
12756 /* Functions need to have their entry point symbol visibility set as
12757 well as their descriptor symbol visibility. */
85b776df
AM
12758 if (DEFAULT_ABI == ABI_AIX
12759 && DOT_SYMBOLS
12760 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12761 {
25fdb4dc 12762 static const char * const visibility_types[] = {
c4ad648e 12763 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12764 };
12765
12766 const char *name, *type;
93638d7a
AM
12767
12768 name = ((* targetm.strip_name_encoding)
12769 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12770 type = visibility_types[vis];
93638d7a 12771
25fdb4dc
RH
12772 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12773 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12774 }
25fdb4dc
RH
12775 else
12776 default_assemble_visibility (decl, vis);
93638d7a
AM
12777}
12778#endif
301d03af 12779\f
39a10a29 12780enum rtx_code
a2369ed3 12781rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12782{
12783 /* Reversal of FP compares takes care -- an ordered compare
12784 becomes an unordered compare and vice versa. */
f676971a 12785 if (mode == CCFPmode
bc9ec0e0
GK
12786 && (!flag_finite_math_only
12787 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12788 || code == UNEQ || code == LTGT))
bab6226b 12789 return reverse_condition_maybe_unordered (code);
39a10a29 12790 else
bab6226b 12791 return reverse_condition (code);
39a10a29
GK
12792}
12793
39a10a29
GK
12794/* Generate a compare for CODE. Return a brand-new rtx that
12795 represents the result of the compare. */
a4f6c312 12796
39a10a29 12797static rtx
a2369ed3 12798rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12799{
12800 enum machine_mode comp_mode;
12801 rtx compare_result;
12802
12803 if (rs6000_compare_fp_p)
12804 comp_mode = CCFPmode;
12805 else if (code == GTU || code == LTU
c4ad648e 12806 || code == GEU || code == LEU)
39a10a29 12807 comp_mode = CCUNSmode;
60934f9c
NS
12808 else if ((code == EQ || code == NE)
12809 && GET_CODE (rs6000_compare_op0) == SUBREG
12810 && GET_CODE (rs6000_compare_op1) == SUBREG
12811 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12812 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12813 /* These are unsigned values, perhaps there will be a later
12814 ordering compare that can be shared with this one.
12815 Unfortunately we cannot detect the signedness of the operands
12816 for non-subregs. */
12817 comp_mode = CCUNSmode;
39a10a29
GK
12818 else
12819 comp_mode = CCmode;
12820
12821 /* First, the compare. */
12822 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12823
cef6b86c 12824 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12825 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12826 && rs6000_compare_fp_p)
a3170dc6 12827 {
64022b5d 12828 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12829 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12830
12831 if (op_mode == VOIDmode)
12832 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12833
cef6b86c
EB
12834 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12835 This explains the following mess. */
423c1189 12836
a3170dc6
AH
12837 switch (code)
12838 {
423c1189 12839 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12840 switch (op_mode)
12841 {
12842 case SFmode:
1cdc0d8f 12843 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12844 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12845 rs6000_compare_op1)
12846 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12847 rs6000_compare_op1);
12848 break;
12849
12850 case DFmode:
1cdc0d8f 12851 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12852 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12853 rs6000_compare_op1)
12854 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12855 rs6000_compare_op1);
12856 break;
12857
17caeff2 12858 case TFmode:
1cdc0d8f 12859 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12860 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12861 rs6000_compare_op1)
12862 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12863 rs6000_compare_op1);
12864 break;
12865
37409796
NS
12866 default:
12867 gcc_unreachable ();
12868 }
a3170dc6 12869 break;
bb8df8a6 12870
423c1189 12871 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12872 switch (op_mode)
12873 {
12874 case SFmode:
1cdc0d8f 12875 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12876 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12877 rs6000_compare_op1)
12878 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12879 rs6000_compare_op1);
12880 break;
bb8df8a6 12881
37409796 12882 case DFmode:
1cdc0d8f 12883 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12884 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12885 rs6000_compare_op1)
12886 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12887 rs6000_compare_op1);
12888 break;
12889
17caeff2 12890 case TFmode:
1cdc0d8f 12891 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12892 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12893 rs6000_compare_op1)
12894 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12895 rs6000_compare_op1);
12896 break;
12897
37409796
NS
12898 default:
12899 gcc_unreachable ();
12900 }
a3170dc6 12901 break;
bb8df8a6 12902
423c1189 12903 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12904 switch (op_mode)
12905 {
12906 case SFmode:
1cdc0d8f 12907 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12908 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12909 rs6000_compare_op1)
12910 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12911 rs6000_compare_op1);
12912 break;
bb8df8a6 12913
37409796 12914 case DFmode:
1cdc0d8f 12915 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12916 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12917 rs6000_compare_op1)
12918 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12919 rs6000_compare_op1);
12920 break;
12921
17caeff2 12922 case TFmode:
1cdc0d8f 12923 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12924 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12925 rs6000_compare_op1)
12926 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12927 rs6000_compare_op1);
12928 break;
12929
37409796
NS
12930 default:
12931 gcc_unreachable ();
12932 }
a3170dc6 12933 break;
4d4cbc0e 12934 default:
37409796 12935 gcc_unreachable ();
a3170dc6
AH
12936 }
12937
12938 /* Synthesize LE and GE from LT/GT || EQ. */
12939 if (code == LE || code == GE || code == LEU || code == GEU)
12940 {
a3170dc6
AH
12941 emit_insn (cmp);
12942
12943 switch (code)
12944 {
12945 case LE: code = LT; break;
12946 case GE: code = GT; break;
12947 case LEU: code = LT; break;
12948 case GEU: code = GT; break;
37409796 12949 default: gcc_unreachable ();
a3170dc6
AH
12950 }
12951
a3170dc6
AH
12952 compare_result2 = gen_reg_rtx (CCFPmode);
12953
12954 /* Do the EQ. */
37409796
NS
12955 switch (op_mode)
12956 {
12957 case SFmode:
1cdc0d8f 12958 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12959 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12960 rs6000_compare_op1)
12961 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12962 rs6000_compare_op1);
12963 break;
12964
12965 case DFmode:
1cdc0d8f 12966 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12967 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12968 rs6000_compare_op1)
12969 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12970 rs6000_compare_op1);
12971 break;
12972
17caeff2 12973 case TFmode:
1cdc0d8f 12974 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12975 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12976 rs6000_compare_op1)
12977 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12978 rs6000_compare_op1);
12979 break;
12980
37409796
NS
12981 default:
12982 gcc_unreachable ();
12983 }
a3170dc6
AH
12984 emit_insn (cmp);
12985
a3170dc6 12986 /* OR them together. */
64022b5d
AH
12987 or_result = gen_reg_rtx (CCFPmode);
12988 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12989 compare_result2);
a3170dc6
AH
12990 compare_result = or_result;
12991 code = EQ;
12992 }
12993 else
12994 {
a3170dc6 12995 if (code == NE || code == LTGT)
a3170dc6 12996 code = NE;
423c1189
AH
12997 else
12998 code = EQ;
a3170dc6
AH
12999 }
13000
13001 emit_insn (cmp);
13002 }
13003 else
de17c25f
DE
13004 {
13005 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
13006 CLOBBERs to match cmptf_internal2 pattern. */
13007 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
13008 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 13009 && !TARGET_IEEEQUAD
de17c25f
DE
13010 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
13011 emit_insn (gen_rtx_PARALLEL (VOIDmode,
13012 gen_rtvec (9,
13013 gen_rtx_SET (VOIDmode,
13014 compare_result,
13015 gen_rtx_COMPARE (comp_mode,
13016 rs6000_compare_op0,
13017 rs6000_compare_op1)),
13018 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13019 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13020 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13021 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13022 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13023 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13024 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13025 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
13026 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
13027 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
13028 {
13029 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
13030 comp_mode = CCEQmode;
13031 compare_result = gen_reg_rtx (CCEQmode);
13032 if (TARGET_64BIT)
13033 emit_insn (gen_stack_protect_testdi (compare_result,
13034 rs6000_compare_op0, op1));
13035 else
13036 emit_insn (gen_stack_protect_testsi (compare_result,
13037 rs6000_compare_op0, op1));
13038 }
de17c25f
DE
13039 else
13040 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
13041 gen_rtx_COMPARE (comp_mode,
13042 rs6000_compare_op0,
13043 rs6000_compare_op1)));
13044 }
f676971a 13045
ca5adc63 13046 /* Some kinds of FP comparisons need an OR operation;
e7108df9 13047 under flag_finite_math_only we don't bother. */
39a10a29 13048 if (rs6000_compare_fp_p
e7108df9 13049 && !flag_finite_math_only
8ef65e3d 13050 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
13051 && (code == LE || code == GE
13052 || code == UNEQ || code == LTGT
13053 || code == UNGT || code == UNLT))
13054 {
13055 enum rtx_code or1, or2;
13056 rtx or1_rtx, or2_rtx, compare2_rtx;
13057 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 13058
39a10a29
GK
13059 switch (code)
13060 {
13061 case LE: or1 = LT; or2 = EQ; break;
13062 case GE: or1 = GT; or2 = EQ; break;
13063 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
13064 case LTGT: or1 = LT; or2 = GT; break;
13065 case UNGT: or1 = UNORDERED; or2 = GT; break;
13066 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 13067 default: gcc_unreachable ();
39a10a29
GK
13068 }
13069 validate_condition_mode (or1, comp_mode);
13070 validate_condition_mode (or2, comp_mode);
1c563bed
KH
13071 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
13072 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
13073 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
13074 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
13075 const_true_rtx);
13076 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
13077
13078 compare_result = or_result;
13079 code = EQ;
13080 }
13081
13082 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 13083
1c563bed 13084 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
13085}
13086
13087
13088/* Emit the RTL for an sCOND pattern. */
13089
13090void
a2369ed3 13091rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
13092{
13093 rtx condition_rtx;
13094 enum machine_mode op_mode;
b7053a3f 13095 enum rtx_code cond_code;
39a10a29
GK
13096
13097 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
13098 cond_code = GET_CODE (condition_rtx);
13099
8ef65e3d 13100 if (rs6000_compare_fp_p
423c1189
AH
13101 && !TARGET_FPRS && TARGET_HARD_FLOAT)
13102 {
13103 rtx t;
13104
13105 PUT_MODE (condition_rtx, SImode);
13106 t = XEXP (condition_rtx, 0);
13107
37409796 13108 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
13109
13110 if (cond_code == NE)
64022b5d 13111 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 13112
64022b5d 13113 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
13114 return;
13115 }
13116
b7053a3f
GK
13117 if (cond_code == NE
13118 || cond_code == GE || cond_code == LE
13119 || cond_code == GEU || cond_code == LEU
13120 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
13121 {
13122 rtx not_result = gen_reg_rtx (CCEQmode);
13123 rtx not_op, rev_cond_rtx;
13124 enum machine_mode cc_mode;
f676971a 13125
b7053a3f
GK
13126 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
13127
1c563bed 13128 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 13129 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
13130 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
13131 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
13132 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
13133 }
39a10a29
GK
13134
13135 op_mode = GET_MODE (rs6000_compare_op0);
13136 if (op_mode == VOIDmode)
13137 op_mode = GET_MODE (rs6000_compare_op1);
13138
13139 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
13140 {
13141 PUT_MODE (condition_rtx, DImode);
13142 convert_move (result, condition_rtx, 0);
13143 }
13144 else
13145 {
13146 PUT_MODE (condition_rtx, SImode);
13147 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
13148 }
13149}
13150
39a10a29
GK
13151/* Emit a branch of kind CODE to location LOC. */
13152
13153void
a2369ed3 13154rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
13155{
13156 rtx condition_rtx, loc_ref;
13157
13158 condition_rtx = rs6000_generate_compare (code);
13159 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
13160 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
13161 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
13162 loc_ref, pc_rtx)));
13163}
13164
12a4e8c5
GK
13165/* Return the string to output a conditional branch to LABEL, which is
13166 the operand number of the label, or -1 if the branch is really a
f676971a 13167 conditional return.
12a4e8c5
GK
13168
13169 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
13170 condition code register and its mode specifies what kind of
13171 comparison we made.
13172
a0ab749a 13173 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
13174
13175 INSN is the insn. */
13176
13177char *
a2369ed3 13178output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
13179{
13180 static char string[64];
13181 enum rtx_code code = GET_CODE (op);
13182 rtx cc_reg = XEXP (op, 0);
13183 enum machine_mode mode = GET_MODE (cc_reg);
13184 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 13185 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
13186 int really_reversed = reversed ^ need_longbranch;
13187 char *s = string;
13188 const char *ccode;
13189 const char *pred;
13190 rtx note;
13191
39a10a29
GK
13192 validate_condition_mode (code, mode);
13193
13194 /* Work out which way this really branches. We could use
13195 reverse_condition_maybe_unordered here always but this
13196 makes the resulting assembler clearer. */
12a4e8c5 13197 if (really_reversed)
de40e1df
DJ
13198 {
13199 /* Reversal of FP compares takes care -- an ordered compare
13200 becomes an unordered compare and vice versa. */
13201 if (mode == CCFPmode)
13202 code = reverse_condition_maybe_unordered (code);
13203 else
13204 code = reverse_condition (code);
13205 }
12a4e8c5 13206
8ef65e3d 13207 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
13208 {
13209 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13210 to the GT bit. */
37409796
NS
13211 switch (code)
13212 {
13213 case EQ:
13214 /* Opposite of GT. */
13215 code = GT;
13216 break;
13217
13218 case NE:
13219 code = UNLE;
13220 break;
13221
13222 default:
13223 gcc_unreachable ();
13224 }
a3170dc6
AH
13225 }
13226
39a10a29 13227 switch (code)
12a4e8c5
GK
13228 {
13229 /* Not all of these are actually distinct opcodes, but
13230 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
13231 case NE: case LTGT:
13232 ccode = "ne"; break;
13233 case EQ: case UNEQ:
13234 ccode = "eq"; break;
f676971a 13235 case GE: case GEU:
50a0b056 13236 ccode = "ge"; break;
f676971a 13237 case GT: case GTU: case UNGT:
50a0b056 13238 ccode = "gt"; break;
f676971a 13239 case LE: case LEU:
50a0b056 13240 ccode = "le"; break;
f676971a 13241 case LT: case LTU: case UNLT:
50a0b056 13242 ccode = "lt"; break;
12a4e8c5
GK
13243 case UNORDERED: ccode = "un"; break;
13244 case ORDERED: ccode = "nu"; break;
13245 case UNGE: ccode = "nl"; break;
13246 case UNLE: ccode = "ng"; break;
13247 default:
37409796 13248 gcc_unreachable ();
12a4e8c5 13249 }
f676971a
EC
13250
13251 /* Maybe we have a guess as to how likely the branch is.
94a54f47 13252 The old mnemonics don't have a way to specify this information. */
f4857b9b 13253 pred = "";
12a4e8c5
GK
13254 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13255 if (note != NULL_RTX)
13256 {
13257 /* PROB is the difference from 50%. */
13258 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13259
13260 /* Only hint for highly probable/improbable branches on newer
13261 cpus as static prediction overrides processor dynamic
13262 prediction. For older cpus we may as well always hint, but
13263 assume not taken for branches that are very close to 50% as a
13264 mispredicted taken branch is more expensive than a
f676971a 13265 mispredicted not-taken branch. */
ec507f2d 13266 if (rs6000_always_hint
2c9e13f3
JH
13267 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13268 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13269 {
13270 if (abs (prob) > REG_BR_PROB_BASE / 20
13271 && ((prob > 0) ^ need_longbranch))
c4ad648e 13272 pred = "+";
f4857b9b
AM
13273 else
13274 pred = "-";
13275 }
12a4e8c5 13276 }
12a4e8c5
GK
13277
13278 if (label == NULL)
94a54f47 13279 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13280 else
94a54f47 13281 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13282
37c67319 13283 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13284 Assume they'd only be the first character.... */
37c67319
GK
13285 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13286 *s++ = '%';
94a54f47 13287 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13288
13289 if (label != NULL)
13290 {
13291 /* If the branch distance was too far, we may have to use an
13292 unconditional branch to go the distance. */
13293 if (need_longbranch)
44518ddd 13294 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13295 else
13296 s += sprintf (s, ",%s", label);
13297 }
13298
13299 return string;
13300}
50a0b056 13301
64022b5d 13302/* Return the string to flip the GT bit on a CR. */
423c1189 13303char *
64022b5d 13304output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13305{
13306 static char string[64];
13307 int a, b;
13308
37409796
NS
13309 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13310 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13311
64022b5d
AH
13312 /* GT bit. */
13313 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13314 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13315
13316 sprintf (string, "crnot %d,%d", a, b);
13317 return string;
13318}
13319
21213b4c
DP
13320/* Return insn index for the vector compare instruction for given CODE,
13321 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13322 not available. */
13323
13324static int
94ff898d 13325get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13326 enum machine_mode dest_mode,
13327 enum machine_mode op_mode)
13328{
13329 if (!TARGET_ALTIVEC)
13330 return INSN_NOT_AVAILABLE;
13331
13332 switch (code)
13333 {
13334 case EQ:
13335 if (dest_mode == V16QImode && op_mode == V16QImode)
13336 return UNSPEC_VCMPEQUB;
13337 if (dest_mode == V8HImode && op_mode == V8HImode)
13338 return UNSPEC_VCMPEQUH;
13339 if (dest_mode == V4SImode && op_mode == V4SImode)
13340 return UNSPEC_VCMPEQUW;
13341 if (dest_mode == V4SImode && op_mode == V4SFmode)
13342 return UNSPEC_VCMPEQFP;
13343 break;
13344 case GE:
13345 if (dest_mode == V4SImode && op_mode == V4SFmode)
13346 return UNSPEC_VCMPGEFP;
13347 case GT:
13348 if (dest_mode == V16QImode && op_mode == V16QImode)
13349 return UNSPEC_VCMPGTSB;
13350 if (dest_mode == V8HImode && op_mode == V8HImode)
13351 return UNSPEC_VCMPGTSH;
13352 if (dest_mode == V4SImode && op_mode == V4SImode)
13353 return UNSPEC_VCMPGTSW;
13354 if (dest_mode == V4SImode && op_mode == V4SFmode)
13355 return UNSPEC_VCMPGTFP;
13356 break;
13357 case GTU:
13358 if (dest_mode == V16QImode && op_mode == V16QImode)
13359 return UNSPEC_VCMPGTUB;
13360 if (dest_mode == V8HImode && op_mode == V8HImode)
13361 return UNSPEC_VCMPGTUH;
13362 if (dest_mode == V4SImode && op_mode == V4SImode)
13363 return UNSPEC_VCMPGTUW;
13364 break;
13365 default:
13366 break;
13367 }
13368 return INSN_NOT_AVAILABLE;
13369}
13370
13371/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13372 DMODE is expected destination mode. This is a recursive function. */
13373
13374static rtx
13375rs6000_emit_vector_compare (enum rtx_code rcode,
13376 rtx op0, rtx op1,
13377 enum machine_mode dmode)
13378{
13379 int vec_cmp_insn;
13380 rtx mask;
13381 enum machine_mode dest_mode;
13382 enum machine_mode op_mode = GET_MODE (op1);
13383
37409796
NS
13384 gcc_assert (TARGET_ALTIVEC);
13385 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13386
13387 /* Floating point vector compare instructions uses destination V4SImode.
13388 Move destination to appropriate mode later. */
13389 if (dmode == V4SFmode)
13390 dest_mode = V4SImode;
13391 else
13392 dest_mode = dmode;
13393
13394 mask = gen_reg_rtx (dest_mode);
13395 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13396
13397 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13398 {
13399 bool swap_operands = false;
13400 bool try_again = false;
13401 switch (rcode)
13402 {
13403 case LT:
13404 rcode = GT;
13405 swap_operands = true;
13406 try_again = true;
13407 break;
13408 case LTU:
13409 rcode = GTU;
13410 swap_operands = true;
13411 try_again = true;
13412 break;
13413 case NE:
370df7db
JC
13414 case UNLE:
13415 case UNLT:
13416 case UNGE:
13417 case UNGT:
13418 /* Invert condition and try again.
13419 e.g., A != B becomes ~(A==B). */
21213b4c 13420 {
370df7db 13421 enum rtx_code rev_code;
21213b4c 13422 enum insn_code nor_code;
d1123cde 13423 rtx eq_rtx;
370df7db
JC
13424
13425 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13426 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13427 dest_mode);
94ff898d 13428
166cdb08 13429 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13430 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13431 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13432
13433 if (dmode != dest_mode)
13434 {
13435 rtx temp = gen_reg_rtx (dest_mode);
13436 convert_move (temp, mask, 0);
13437 return temp;
13438 }
13439 return mask;
13440 }
13441 break;
13442 case GE:
13443 case GEU:
13444 case LE:
13445 case LEU:
13446 /* Try GT/GTU/LT/LTU OR EQ */
13447 {
13448 rtx c_rtx, eq_rtx;
13449 enum insn_code ior_code;
13450 enum rtx_code new_code;
13451
37409796
NS
13452 switch (rcode)
13453 {
13454 case GE:
13455 new_code = GT;
13456 break;
13457
13458 case GEU:
13459 new_code = GTU;
13460 break;
13461
13462 case LE:
13463 new_code = LT;
13464 break;
13465
13466 case LEU:
13467 new_code = LTU;
13468 break;
13469
13470 default:
13471 gcc_unreachable ();
13472 }
21213b4c
DP
13473
13474 c_rtx = rs6000_emit_vector_compare (new_code,
13475 op0, op1, dest_mode);
13476 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13477 dest_mode);
13478
166cdb08 13479 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13480 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13481 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13482 if (dmode != dest_mode)
13483 {
13484 rtx temp = gen_reg_rtx (dest_mode);
13485 convert_move (temp, mask, 0);
13486 return temp;
13487 }
13488 return mask;
13489 }
13490 break;
13491 default:
37409796 13492 gcc_unreachable ();
21213b4c
DP
13493 }
13494
13495 if (try_again)
13496 {
13497 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13498 /* You only get two chances. */
13499 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13500 }
13501
13502 if (swap_operands)
13503 {
13504 rtx tmp;
13505 tmp = op0;
13506 op0 = op1;
13507 op1 = tmp;
13508 }
13509 }
13510
915167f5
GK
13511 emit_insn (gen_rtx_SET (VOIDmode, mask,
13512 gen_rtx_UNSPEC (dest_mode,
13513 gen_rtvec (2, op0, op1),
13514 vec_cmp_insn)));
21213b4c
DP
13515 if (dmode != dest_mode)
13516 {
13517 rtx temp = gen_reg_rtx (dest_mode);
13518 convert_move (temp, mask, 0);
13519 return temp;
13520 }
13521 return mask;
13522}
13523
13524/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13525 valid insn doesn exist for given mode. */
13526
13527static int
13528get_vsel_insn (enum machine_mode mode)
13529{
13530 switch (mode)
13531 {
13532 case V4SImode:
13533 return UNSPEC_VSEL4SI;
13534 break;
13535 case V4SFmode:
13536 return UNSPEC_VSEL4SF;
13537 break;
13538 case V8HImode:
13539 return UNSPEC_VSEL8HI;
13540 break;
13541 case V16QImode:
13542 return UNSPEC_VSEL16QI;
13543 break;
13544 default:
13545 return INSN_NOT_AVAILABLE;
13546 break;
13547 }
13548 return INSN_NOT_AVAILABLE;
13549}
13550
13551/* Emit vector select insn where DEST is destination using
13552 operands OP1, OP2 and MASK. */
13553
13554static void
13555rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13556{
13557 rtx t, temp;
13558 enum machine_mode dest_mode = GET_MODE (dest);
13559 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13560
13561 temp = gen_reg_rtx (dest_mode);
94ff898d 13562
bb8df8a6 13563 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13564 select op2. */
915167f5
GK
13565 t = gen_rtx_SET (VOIDmode, temp,
13566 gen_rtx_UNSPEC (dest_mode,
13567 gen_rtvec (3, op2, op1, mask),
13568 vsel_insn_index));
21213b4c
DP
13569 emit_insn (t);
13570 emit_move_insn (dest, temp);
13571 return;
13572}
13573
94ff898d 13574/* Emit vector conditional expression.
21213b4c
DP
13575 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13576 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13577
13578int
13579rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13580 rtx cond, rtx cc_op0, rtx cc_op1)
13581{
13582 enum machine_mode dest_mode = GET_MODE (dest);
13583 enum rtx_code rcode = GET_CODE (cond);
13584 rtx mask;
13585
13586 if (!TARGET_ALTIVEC)
13587 return 0;
13588
13589 /* Get the vector mask for the given relational operations. */
13590 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13591
13592 rs6000_emit_vector_select (dest, op1, op2, mask);
13593
13594 return 1;
13595}
13596
50a0b056
GK
13597/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13598 operands of the last comparison is nonzero/true, FALSE_COND if it
13599 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13600
50a0b056 13601int
a2369ed3 13602rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13603{
13604 enum rtx_code code = GET_CODE (op);
13605 rtx op0 = rs6000_compare_op0;
13606 rtx op1 = rs6000_compare_op1;
13607 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13608 enum machine_mode compare_mode = GET_MODE (op0);
13609 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13610 rtx temp;
add2402e 13611 bool is_against_zero;
50a0b056 13612
a3c9585f 13613 /* These modes should always match. */
a3170dc6
AH
13614 if (GET_MODE (op1) != compare_mode
13615 /* In the isel case however, we can use a compare immediate, so
13616 op1 may be a small constant. */
13617 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13618 return 0;
178c3eff 13619 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13620 return 0;
178c3eff 13621 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13622 return 0;
13623
50a0b056 13624 /* First, work out if the hardware can do this at all, or
a3c9585f 13625 if it's too slow.... */
50a0b056 13626 if (! rs6000_compare_fp_p)
a3170dc6
AH
13627 {
13628 if (TARGET_ISEL)
13629 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13630 return 0;
13631 }
8ef65e3d 13632 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13633 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13634 return 0;
50a0b056 13635
add2402e 13636 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13637
add2402e
GK
13638 /* A floating-point subtract might overflow, underflow, or produce
13639 an inexact result, thus changing the floating-point flags, so it
13640 can't be generated if we care about that. It's safe if one side
13641 of the construct is zero, since then no subtract will be
13642 generated. */
ebb109ad 13643 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13644 && flag_trapping_math && ! is_against_zero)
13645 return 0;
13646
50a0b056
GK
13647 /* Eliminate half of the comparisons by switching operands, this
13648 makes the remaining code simpler. */
13649 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13650 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13651 {
13652 code = reverse_condition_maybe_unordered (code);
13653 temp = true_cond;
13654 true_cond = false_cond;
13655 false_cond = temp;
13656 }
13657
13658 /* UNEQ and LTGT take four instructions for a comparison with zero,
13659 it'll probably be faster to use a branch here too. */
bc9ec0e0 13660 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13661 return 0;
f676971a 13662
50a0b056
GK
13663 if (GET_CODE (op1) == CONST_DOUBLE)
13664 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13665
b6d08ca1 13666 /* We're going to try to implement comparisons by performing
50a0b056
GK
13667 a subtract, then comparing against zero. Unfortunately,
13668 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13669 know that the operand is finite and the comparison
50a0b056 13670 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13671 if (HONOR_INFINITIES (compare_mode)
50a0b056 13672 && code != GT && code != UNGE
045572c7 13673 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13674 /* Constructs of the form (a OP b ? a : b) are safe. */
13675 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13676 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13677 && ! rtx_equal_p (op1, true_cond))))
13678 return 0;
add2402e 13679
50a0b056
GK
13680 /* At this point we know we can use fsel. */
13681
13682 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13683 if (! is_against_zero)
13684 {
13685 temp = gen_reg_rtx (compare_mode);
13686 emit_insn (gen_rtx_SET (VOIDmode, temp,
13687 gen_rtx_MINUS (compare_mode, op0, op1)));
13688 op0 = temp;
13689 op1 = CONST0_RTX (compare_mode);
13690 }
50a0b056
GK
13691
13692 /* If we don't care about NaNs we can reduce some of the comparisons
13693 down to faster ones. */
bc9ec0e0 13694 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13695 switch (code)
13696 {
13697 case GT:
13698 code = LE;
13699 temp = true_cond;
13700 true_cond = false_cond;
13701 false_cond = temp;
13702 break;
13703 case UNGE:
13704 code = GE;
13705 break;
13706 case UNEQ:
13707 code = EQ;
13708 break;
13709 default:
13710 break;
13711 }
13712
13713 /* Now, reduce everything down to a GE. */
13714 switch (code)
13715 {
13716 case GE:
13717 break;
13718
13719 case LE:
3148ad6d
DJ
13720 temp = gen_reg_rtx (compare_mode);
13721 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13722 op0 = temp;
13723 break;
13724
13725 case ORDERED:
3148ad6d
DJ
13726 temp = gen_reg_rtx (compare_mode);
13727 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13728 op0 = temp;
13729 break;
13730
13731 case EQ:
3148ad6d 13732 temp = gen_reg_rtx (compare_mode);
f676971a 13733 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13734 gen_rtx_NEG (compare_mode,
13735 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13736 op0 = temp;
13737 break;
13738
13739 case UNGE:
bc9ec0e0 13740 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13741 temp = gen_reg_rtx (result_mode);
50a0b056 13742 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13743 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13744 gen_rtx_GE (VOIDmode,
13745 op0, op1),
13746 true_cond, false_cond)));
bc9ec0e0
GK
13747 false_cond = true_cond;
13748 true_cond = temp;
50a0b056 13749
3148ad6d
DJ
13750 temp = gen_reg_rtx (compare_mode);
13751 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13752 op0 = temp;
13753 break;
13754
13755 case GT:
bc9ec0e0 13756 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13757 temp = gen_reg_rtx (result_mode);
50a0b056 13758 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13759 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13760 gen_rtx_GE (VOIDmode,
13761 op0, op1),
13762 true_cond, false_cond)));
bc9ec0e0
GK
13763 true_cond = false_cond;
13764 false_cond = temp;
50a0b056 13765
3148ad6d
DJ
13766 temp = gen_reg_rtx (compare_mode);
13767 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13768 op0 = temp;
13769 break;
13770
13771 default:
37409796 13772 gcc_unreachable ();
50a0b056
GK
13773 }
13774
13775 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13776 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13777 gen_rtx_GE (VOIDmode,
13778 op0, op1),
13779 true_cond, false_cond)));
13780 return 1;
13781}
13782
a3170dc6
AH
13783/* Same as above, but for ints (isel). */
13784
13785static int
a2369ed3 13786rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13787{
13788 rtx condition_rtx, cr;
13789
13790 /* All isel implementations thus far are 32-bits. */
13791 if (GET_MODE (rs6000_compare_op0) != SImode)
13792 return 0;
13793
13794 /* We still have to do the compare, because isel doesn't do a
13795 compare, it just looks at the CRx bits set by a previous compare
13796 instruction. */
13797 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13798 cr = XEXP (condition_rtx, 0);
13799
13800 if (GET_MODE (cr) == CCmode)
13801 emit_insn (gen_isel_signed (dest, condition_rtx,
13802 true_cond, false_cond, cr));
13803 else
13804 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13805 true_cond, false_cond, cr));
13806
13807 return 1;
13808}
13809
13810const char *
a2369ed3 13811output_isel (rtx *operands)
a3170dc6
AH
13812{
13813 enum rtx_code code;
13814
13815 code = GET_CODE (operands[1]);
13816 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13817 {
13818 PUT_CODE (operands[1], reverse_condition (code));
13819 return "isel %0,%3,%2,%j1";
13820 }
13821 else
13822 return "isel %0,%2,%3,%j1";
13823}
13824
50a0b056 13825void
a2369ed3 13826rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13827{
13828 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13829 enum rtx_code c;
50a0b056 13830 rtx target;
5dc8d536
AH
13831
13832 if (code == SMAX || code == SMIN)
13833 c = GE;
13834 else
13835 c = GEU;
13836
50a0b056 13837 if (code == SMAX || code == UMAX)
f676971a 13838 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13839 op0, op1, mode, 0);
13840 else
f676971a 13841 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13842 op1, op0, mode, 0);
37409796 13843 gcc_assert (target);
50a0b056
GK
13844 if (target != dest)
13845 emit_move_insn (dest, target);
13846}
46c07df8 13847
915167f5
GK
13848/* Emit instructions to perform a load-reserved/store-conditional operation.
13849 The operation performed is an atomic
13850 (set M (CODE:MODE M OP))
13851 If not NULL, BEFORE is atomically set to M before the operation, and
13852 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13853 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13854 Either OP or M may be wrapped in a NOT operation. */
13855
13856void
13857rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13858 rtx m, rtx op, rtx before_param, rtx after_param,
13859 bool sync_p)
13860{
13861 enum machine_mode used_mode;
13862 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13863 rtx used_m;
13864 rtvec vec;
13865 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13866 rtx shift = NULL_RTX;
bb8df8a6 13867
915167f5 13868 if (sync_p)
a3cb4b82 13869 emit_insn (gen_lwsync ());
bb8df8a6 13870
915167f5
GK
13871 used_m = m;
13872
13873 /* If this is smaller than SImode, we'll have to use SImode with
13874 adjustments. */
13875 if (mode == QImode || mode == HImode)
13876 {
13877 rtx newop, oldop;
13878
13879 if (MEM_ALIGN (used_m) >= 32)
13880 {
13881 int ishift = 0;
13882 if (BYTES_BIG_ENDIAN)
13883 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13884
915167f5 13885 shift = GEN_INT (ishift);
c75c6d11 13886 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13887 }
13888 else
13889 {
13890 rtx addrSI, aligned_addr;
a9c9d3fa 13891 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13892
c75c6d11
JJ
13893 addrSI = gen_lowpart_common (SImode,
13894 force_reg (Pmode, XEXP (used_m, 0)));
13895 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13896 shift = gen_reg_rtx (SImode);
13897
13898 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13899 GEN_INT (shift_mask)));
13900 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13901
13902 aligned_addr = expand_binop (Pmode, and_optab,
13903 XEXP (used_m, 0),
13904 GEN_INT (-4), NULL_RTX,
13905 1, OPTAB_LIB_WIDEN);
13906 used_m = change_address (used_m, SImode, aligned_addr);
13907 set_mem_align (used_m, 32);
915167f5 13908 }
c75c6d11
JJ
13909 /* It's safe to keep the old alias set of USED_M, because
13910 the operation is atomic and only affects the original
13911 USED_M. */
f66f9865 13912 m = used_m;
915167f5
GK
13913
13914 if (GET_CODE (op) == NOT)
13915 {
13916 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13917 oldop = gen_rtx_NOT (SImode, oldop);
13918 }
13919 else
13920 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13921
915167f5
GK
13922 switch (code)
13923 {
13924 case IOR:
13925 case XOR:
13926 newop = expand_binop (SImode, and_optab,
13927 oldop, GEN_INT (imask), NULL_RTX,
13928 1, OPTAB_LIB_WIDEN);
13929 emit_insn (gen_ashlsi3 (newop, newop, shift));
13930 break;
13931
f66f9865
DE
13932 case NOT: /* NAND */
13933 newop = expand_binop (SImode, ior_optab,
13934 oldop, GEN_INT (~imask), NULL_RTX,
13935 1, OPTAB_LIB_WIDEN);
13936 emit_insn (gen_rotlsi3 (newop, newop, shift));
13937 break;
13938
915167f5
GK
13939 case AND:
13940 newop = expand_binop (SImode, ior_optab,
13941 oldop, GEN_INT (~imask), NULL_RTX,
13942 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13943 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13944 break;
13945
13946 case PLUS:
9f0076e5 13947 case MINUS:
915167f5
GK
13948 {
13949 rtx mask;
bb8df8a6 13950
915167f5
GK
13951 newop = expand_binop (SImode, and_optab,
13952 oldop, GEN_INT (imask), NULL_RTX,
13953 1, OPTAB_LIB_WIDEN);
13954 emit_insn (gen_ashlsi3 (newop, newop, shift));
13955
13956 mask = gen_reg_rtx (SImode);
13957 emit_move_insn (mask, GEN_INT (imask));
13958 emit_insn (gen_ashlsi3 (mask, mask, shift));
13959
9f0076e5
DE
13960 if (code == PLUS)
13961 newop = gen_rtx_PLUS (SImode, m, newop);
13962 else
13963 newop = gen_rtx_MINUS (SImode, m, newop);
13964 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13965 newop = gen_rtx_IOR (SImode, newop,
13966 gen_rtx_AND (SImode,
13967 gen_rtx_NOT (SImode, mask),
13968 m));
13969 break;
13970 }
13971
13972 default:
13973 gcc_unreachable ();
13974 }
13975
13976 op = newop;
13977 used_mode = SImode;
13978 before = gen_reg_rtx (used_mode);
13979 after = gen_reg_rtx (used_mode);
13980 }
13981 else
13982 {
13983 used_mode = mode;
13984 before = before_param;
13985 after = after_param;
13986
13987 if (before == NULL_RTX)
13988 before = gen_reg_rtx (used_mode);
13989 if (after == NULL_RTX)
13990 after = gen_reg_rtx (used_mode);
13991 }
bb8df8a6 13992
f66f9865 13993 if ((code == PLUS || code == MINUS)
9f0076e5 13994 && used_mode != mode)
915167f5
GK
13995 the_op = op; /* Computed above. */
13996 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13997 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
f66f9865
DE
13998 else if (code == NOT)
13999 the_op = gen_rtx_fmt_ee (IOR, used_mode,
14000 gen_rtx_NOT (used_mode, m),
14001 gen_rtx_NOT (used_mode, op));
915167f5
GK
14002 else
14003 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
14004
14005 set_after = gen_rtx_SET (VOIDmode, after, the_op);
14006 set_before = gen_rtx_SET (VOIDmode, before, used_m);
14007 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
14008 gen_rtx_UNSPEC (used_mode,
14009 gen_rtvec (1, the_op),
14010 UNSPEC_SYNC_OP));
915167f5
GK
14011 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
14012
9f0076e5 14013 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
14014 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
14015 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
14016 else
14017 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
14018 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
14019
14020 /* Shift and mask the return values properly. */
14021 if (used_mode != mode && before_param)
14022 {
14023 emit_insn (gen_lshrsi3 (before, before, shift));
14024 convert_move (before_param, before, 1);
14025 }
14026
14027 if (used_mode != mode && after_param)
14028 {
14029 emit_insn (gen_lshrsi3 (after, after, shift));
14030 convert_move (after_param, after, 1);
14031 }
14032
14033 /* The previous sequence will end with a branch that's dependent on
14034 the conditional store, so placing an isync will ensure that no
14035 other instructions (especially, no load or store instructions)
14036 can start before the atomic operation completes. */
14037 if (sync_p)
14038 emit_insn (gen_isync ());
14039}
14040
b52110d4
DE
14041/* A subroutine of the atomic operation splitters. Jump to LABEL if
14042 COND is true. Mark the jump as unlikely to be taken. */
14043
14044static void
14045emit_unlikely_jump (rtx cond, rtx label)
14046{
14047 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
14048 rtx x;
14049
14050 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
14051 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
14052 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
14053}
14054
14055/* A subroutine of the atomic operation splitters. Emit a load-locked
14056 instruction in MODE. */
14057
14058static void
14059emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
14060{
14061 rtx (*fn) (rtx, rtx) = NULL;
14062 if (mode == SImode)
14063 fn = gen_load_locked_si;
14064 else if (mode == DImode)
14065 fn = gen_load_locked_di;
14066 emit_insn (fn (reg, mem));
14067}
14068
14069/* A subroutine of the atomic operation splitters. Emit a store-conditional
14070 instruction in MODE. */
14071
14072static void
14073emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
14074{
14075 rtx (*fn) (rtx, rtx, rtx) = NULL;
14076 if (mode == SImode)
14077 fn = gen_store_conditional_si;
14078 else if (mode == DImode)
14079 fn = gen_store_conditional_di;
14080
9f0076e5 14081 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
14082 if (PPC405_ERRATUM77)
14083 emit_insn (gen_memory_barrier ());
14084
14085 emit_insn (fn (res, mem, val));
14086}
14087
ea2c620c 14088/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 14089 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
14090 operand of the binary operator. BEFORE and AFTER are optional locations to
14091 return the value of MEM either before of after the operation. SCRATCH is
14092 a scratch register. */
14093
14094void
14095rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
14096 rtx before, rtx after, rtx scratch)
14097{
14098 enum machine_mode mode = GET_MODE (mem);
14099 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14100
a3cb4b82 14101 emit_insn (gen_lwsync ());
9f0076e5
DE
14102
14103 label = gen_label_rtx ();
14104 emit_label (label);
14105 label = gen_rtx_LABEL_REF (VOIDmode, label);
14106
14107 if (before == NULL_RTX)
14108 before = scratch;
14109 emit_load_locked (mode, before, mem);
14110
14111 if (code == NOT)
f66f9865
DE
14112 x = gen_rtx_IOR (mode,
14113 gen_rtx_NOT (mode, before),
14114 gen_rtx_NOT (mode, val));
9f0076e5
DE
14115 else if (code == AND)
14116 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
14117 else
14118 x = gen_rtx_fmt_ee (code, mode, before, val);
14119
14120 if (after != NULL_RTX)
14121 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
14122 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
14123
14124 emit_store_conditional (mode, cond, mem, scratch);
14125
14126 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14127 emit_unlikely_jump (x, label);
14128
14129 emit_insn (gen_isync ());
14130}
14131
b52110d4
DE
14132/* Expand an atomic compare and swap operation. MEM is the memory on which
14133 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
14134 value to be stored. SCRATCH is a scratch GPR. */
14135
14136void
14137rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
14138 rtx scratch)
14139{
14140 enum machine_mode mode = GET_MODE (mem);
14141 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14142
a3cb4b82 14143 emit_insn (gen_lwsync ());
b52110d4
DE
14144
14145 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14146 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14147 emit_label (XEXP (label1, 0));
14148
14149 emit_load_locked (mode, retval, mem);
14150
14151 x = gen_rtx_COMPARE (CCmode, retval, oldval);
14152 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14153
14154 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14155 emit_unlikely_jump (x, label2);
14156
14157 emit_move_insn (scratch, newval);
14158 emit_store_conditional (mode, cond, mem, scratch);
14159
14160 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14161 emit_unlikely_jump (x, label1);
14162
14163 emit_insn (gen_isync ());
14164 emit_label (XEXP (label2, 0));
14165}
14166
14167/* Expand an atomic test and set operation. MEM is the memory on which
14168 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
14169
14170void
14171rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
14172{
14173 enum machine_mode mode = GET_MODE (mem);
14174 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14175
b52110d4
DE
14176 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14177 emit_label (XEXP (label, 0));
14178
14179 emit_load_locked (mode, retval, mem);
14180 emit_move_insn (scratch, val);
14181 emit_store_conditional (mode, cond, mem, scratch);
14182
14183 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14184 emit_unlikely_jump (x, label);
14185
14186 emit_insn (gen_isync ());
14187}
14188
9fc75b97
DE
14189void
14190rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14191{
14192 enum machine_mode mode = GET_MODE (mem);
14193 rtx addrSI, align, wdst, shift, mask;
14194 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14195 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14196
14197 /* Shift amount for subword relative to aligned word. */
4b49d9ce
AP
14198 addrSI = force_reg (GET_MODE (XEXP (mem, 0)), XEXP (mem, 0));
14199 addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI));
9fc75b97
DE
14200 shift = gen_reg_rtx (SImode);
14201 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14202 GEN_INT (shift_mask)));
14203 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14204
14205 /* Shift and mask old value into position within word. */
14206 oldval = convert_modes (SImode, mode, oldval, 1);
14207 oldval = expand_binop (SImode, and_optab,
14208 oldval, GEN_INT (imask), NULL_RTX,
14209 1, OPTAB_LIB_WIDEN);
14210 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14211
14212 /* Shift and mask new value into position within word. */
14213 newval = convert_modes (SImode, mode, newval, 1);
14214 newval = expand_binop (SImode, and_optab,
14215 newval, GEN_INT (imask), NULL_RTX,
14216 1, OPTAB_LIB_WIDEN);
14217 emit_insn (gen_ashlsi3 (newval, newval, shift));
14218
14219 /* Mask for insertion. */
14220 mask = gen_reg_rtx (SImode);
14221 emit_move_insn (mask, GEN_INT (imask));
14222 emit_insn (gen_ashlsi3 (mask, mask, shift));
14223
14224 /* Address of aligned word containing subword. */
14225 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14226 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14227 mem = change_address (mem, SImode, align);
14228 set_mem_align (mem, 32);
14229 MEM_VOLATILE_P (mem) = 1;
14230
14231 wdst = gen_reg_rtx (SImode);
14232 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14233 oldval, newval, mem));
14234
2725b75c
JJ
14235 /* Shift the result back. */
14236 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14237
9fc75b97
DE
14238 emit_move_insn (dst, gen_lowpart (mode, wdst));
14239}
14240
14241void
14242rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14243 rtx oldval, rtx newval, rtx mem,
14244 rtx scratch)
14245{
14246 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14247
a3cb4b82 14248 emit_insn (gen_lwsync ());
9fc75b97
DE
14249 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14250 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14251 emit_label (XEXP (label1, 0));
14252
14253 emit_load_locked (SImode, scratch, mem);
14254
14255 /* Mask subword within loaded value for comparison with oldval.
14256 Use UNSPEC_AND to avoid clobber.*/
14257 emit_insn (gen_rtx_SET (SImode, dest,
14258 gen_rtx_UNSPEC (SImode,
14259 gen_rtvec (2, scratch, mask),
14260 UNSPEC_AND)));
14261
14262 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14263 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14264
14265 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14266 emit_unlikely_jump (x, label2);
14267
14268 /* Clear subword within loaded value for insertion of new value. */
14269 emit_insn (gen_rtx_SET (SImode, scratch,
14270 gen_rtx_AND (SImode,
14271 gen_rtx_NOT (SImode, mask), scratch)));
14272 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14273 emit_store_conditional (SImode, cond, mem, scratch);
14274
14275 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14276 emit_unlikely_jump (x, label1);
14277
14278 emit_insn (gen_isync ());
14279 emit_label (XEXP (label2, 0));
14280}
14281
14282
b52110d4 14283 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14284 multi-register moves. It will emit at most one instruction for
14285 each register that is accessed; that is, it won't emit li/lis pairs
14286 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14287 register. */
46c07df8 14288
46c07df8 14289void
a9baceb1 14290rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14291{
a9baceb1
GK
14292 /* The register number of the first register being moved. */
14293 int reg;
14294 /* The mode that is to be moved. */
14295 enum machine_mode mode;
14296 /* The mode that the move is being done in, and its size. */
14297 enum machine_mode reg_mode;
14298 int reg_mode_size;
14299 /* The number of registers that will be moved. */
14300 int nregs;
14301
14302 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14303 mode = GET_MODE (dst);
c8b622ff 14304 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14305 if (FP_REGNO_P (reg))
696e45ba
ME
14306 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode :
14307 ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? DFmode : SFmode);
a9baceb1
GK
14308 else if (ALTIVEC_REGNO_P (reg))
14309 reg_mode = V16QImode;
4f011e1e
JM
14310 else if (TARGET_E500_DOUBLE && mode == TFmode)
14311 reg_mode = DFmode;
a9baceb1
GK
14312 else
14313 reg_mode = word_mode;
14314 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14315
37409796 14316 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14317
a9baceb1
GK
14318 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14319 {
14320 /* Move register range backwards, if we might have destructive
14321 overlap. */
14322 int i;
14323 for (i = nregs - 1; i >= 0; i--)
f676971a 14324 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14325 simplify_gen_subreg (reg_mode, dst, mode,
14326 i * reg_mode_size),
14327 simplify_gen_subreg (reg_mode, src, mode,
14328 i * reg_mode_size)));
14329 }
46c07df8
HP
14330 else
14331 {
a9baceb1
GK
14332 int i;
14333 int j = -1;
14334 bool used_update = false;
46c07df8 14335
c1e55850 14336 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14337 {
14338 rtx breg;
3a1f863f 14339
a9baceb1
GK
14340 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14341 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14342 {
14343 rtx delta_rtx;
a9baceb1 14344 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14345 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14346 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14347 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14348 emit_insn (TARGET_32BIT
14349 ? gen_addsi3 (breg, breg, delta_rtx)
14350 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14351 src = replace_equiv_address (src, breg);
3a1f863f 14352 }
d04b6e6e 14353 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14354 {
13e2e16e 14355 rtx basereg;
c1e55850
GK
14356 basereg = gen_rtx_REG (Pmode, reg);
14357 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14358 src = replace_equiv_address (src, basereg);
c1e55850 14359 }
3a1f863f 14360
0423421f
AM
14361 breg = XEXP (src, 0);
14362 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14363 breg = XEXP (breg, 0);
14364
14365 /* If the base register we are using to address memory is
14366 also a destination reg, then change that register last. */
14367 if (REG_P (breg)
14368 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14369 && REGNO (breg) < REGNO (dst) + nregs)
14370 j = REGNO (breg) - REGNO (dst);
c4ad648e 14371 }
46c07df8 14372
a9baceb1 14373 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14374 {
14375 rtx breg;
14376
a9baceb1
GK
14377 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14378 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14379 {
14380 rtx delta_rtx;
a9baceb1 14381 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14382 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14383 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14384 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14385
14386 /* We have to update the breg before doing the store.
14387 Use store with update, if available. */
14388
14389 if (TARGET_UPDATE)
14390 {
a9baceb1 14391 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14392 emit_insn (TARGET_32BIT
14393 ? (TARGET_POWERPC64
14394 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14395 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14396 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14397 used_update = true;
3a1f863f
DE
14398 }
14399 else
a9baceb1
GK
14400 emit_insn (TARGET_32BIT
14401 ? gen_addsi3 (breg, breg, delta_rtx)
14402 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14403 dst = replace_equiv_address (dst, breg);
3a1f863f 14404 }
37409796 14405 else
d04b6e6e 14406 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14407 }
14408
46c07df8 14409 for (i = 0; i < nregs; i++)
f676971a 14410 {
3a1f863f
DE
14411 /* Calculate index to next subword. */
14412 ++j;
f676971a 14413 if (j == nregs)
3a1f863f 14414 j = 0;
46c07df8 14415
112cdef5 14416 /* If compiler already emitted move of first word by
a9baceb1 14417 store with update, no need to do anything. */
3a1f863f 14418 if (j == 0 && used_update)
a9baceb1 14419 continue;
f676971a 14420
a9baceb1
GK
14421 emit_insn (gen_rtx_SET (VOIDmode,
14422 simplify_gen_subreg (reg_mode, dst, mode,
14423 j * reg_mode_size),
14424 simplify_gen_subreg (reg_mode, src, mode,
14425 j * reg_mode_size)));
3a1f863f 14426 }
46c07df8
HP
14427 }
14428}
14429
12a4e8c5 14430\f
a4f6c312
SS
14431/* This page contains routines that are used to determine what the
14432 function prologue and epilogue code will do and write them out. */
9878760c 14433
a4f6c312
SS
14434/* Return the first fixed-point register that is required to be
14435 saved. 32 if none. */
9878760c
RK
14436
14437int
863d938c 14438first_reg_to_save (void)
9878760c
RK
14439{
14440 int first_reg;
14441
14442 /* Find lowest numbered live register. */
14443 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14444 if (df_regs_ever_live_p (first_reg)
a38d360d 14445 && (! call_used_regs[first_reg]
1db02437 14446 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14447 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14448 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14449 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14450 break;
14451
ee890fe2 14452#if TARGET_MACHO
93638d7a 14453 if (flag_pic
e3b5732b 14454 && crtl->uses_pic_offset_table
93638d7a 14455 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14456 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14457#endif
14458
9878760c
RK
14459 return first_reg;
14460}
14461
14462/* Similar, for FP regs. */
14463
14464int
863d938c 14465first_fp_reg_to_save (void)
9878760c
RK
14466{
14467 int first_reg;
14468
14469 /* Find lowest numbered live register. */
14470 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14471 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14472 break;
14473
14474 return first_reg;
14475}
00b960c7
AH
14476
14477/* Similar, for AltiVec regs. */
14478
14479static int
863d938c 14480first_altivec_reg_to_save (void)
00b960c7
AH
14481{
14482 int i;
14483
14484 /* Stack frame remains as is unless we are in AltiVec ABI. */
14485 if (! TARGET_ALTIVEC_ABI)
14486 return LAST_ALTIVEC_REGNO + 1;
14487
22fa69da 14488 /* On Darwin, the unwind routines are compiled without
982afe02 14489 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14490 altivec registers when necessary. */
e3b5732b 14491 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14492 && ! TARGET_ALTIVEC)
14493 return FIRST_ALTIVEC_REGNO + 20;
14494
00b960c7
AH
14495 /* Find lowest numbered live register. */
14496 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14497 if (df_regs_ever_live_p (i))
00b960c7
AH
14498 break;
14499
14500 return i;
14501}
14502
14503/* Return a 32-bit mask of the AltiVec registers we need to set in
14504 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14505 the 32-bit word is 0. */
14506
14507static unsigned int
863d938c 14508compute_vrsave_mask (void)
00b960c7
AH
14509{
14510 unsigned int i, mask = 0;
14511
22fa69da 14512 /* On Darwin, the unwind routines are compiled without
982afe02 14513 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14514 call-saved altivec registers when necessary. */
e3b5732b 14515 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14516 && ! TARGET_ALTIVEC)
14517 mask |= 0xFFF;
14518
00b960c7
AH
14519 /* First, find out if we use _any_ altivec registers. */
14520 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14521 if (df_regs_ever_live_p (i))
00b960c7
AH
14522 mask |= ALTIVEC_REG_BIT (i);
14523
14524 if (mask == 0)
14525 return mask;
14526
00b960c7
AH
14527 /* Next, remove the argument registers from the set. These must
14528 be in the VRSAVE mask set by the caller, so we don't need to add
14529 them in again. More importantly, the mask we compute here is
14530 used to generate CLOBBERs in the set_vrsave insn, and we do not
14531 wish the argument registers to die. */
38173d38 14532 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14533 mask &= ~ALTIVEC_REG_BIT (i);
14534
14535 /* Similarly, remove the return value from the set. */
14536 {
14537 bool yes = false;
14538 diddle_return_value (is_altivec_return_reg, &yes);
14539 if (yes)
14540 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14541 }
14542
14543 return mask;
14544}
14545
d62294f5 14546/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14547 size of prologues/epilogues by calling our own save/restore-the-world
14548 routines. */
d62294f5
FJ
14549
14550static void
f57fe068
AM
14551compute_save_world_info (rs6000_stack_t *info_ptr)
14552{
14553 info_ptr->world_save_p = 1;
14554 info_ptr->world_save_p
14555 = (WORLD_SAVE_P (info_ptr)
14556 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14557 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14558 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14559 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14560 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14561 && info_ptr->cr_save_p);
f676971a 14562
d62294f5
FJ
14563 /* This will not work in conjunction with sibcalls. Make sure there
14564 are none. (This check is expensive, but seldom executed.) */
f57fe068 14565 if (WORLD_SAVE_P (info_ptr))
f676971a 14566 {
d62294f5
FJ
14567 rtx insn;
14568 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14569 if ( GET_CODE (insn) == CALL_INSN
14570 && SIBLING_CALL_P (insn))
14571 {
14572 info_ptr->world_save_p = 0;
14573 break;
14574 }
d62294f5 14575 }
f676971a 14576
f57fe068 14577 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14578 {
14579 /* Even if we're not touching VRsave, make sure there's room on the
14580 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14581 will attempt to save it. */
d62294f5
FJ
14582 info_ptr->vrsave_size = 4;
14583
298ac1dd
AP
14584 /* If we are going to save the world, we need to save the link register too. */
14585 info_ptr->lr_save_p = 1;
14586
d62294f5
FJ
14587 /* "Save" the VRsave register too if we're saving the world. */
14588 if (info_ptr->vrsave_mask == 0)
c4ad648e 14589 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14590
14591 /* Because the Darwin register save/restore routines only handle
c4ad648e 14592 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14593 check. */
37409796
NS
14594 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14595 && (info_ptr->first_altivec_reg_save
14596 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14597 }
f676971a 14598 return;
d62294f5
FJ
14599}
14600
14601
00b960c7 14602static void
a2369ed3 14603is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14604{
14605 bool *yes = (bool *) xyes;
14606 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14607 *yes = true;
14608}
14609
4697a36c
MM
14610\f
14611/* Calculate the stack information for the current function. This is
14612 complicated by having two separate calling sequences, the AIX calling
14613 sequence and the V.4 calling sequence.
14614
592696dd 14615 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14616 32-bit 64-bit
4697a36c 14617 SP----> +---------------------------------------+
a260abc9 14618 | back chain to caller | 0 0
4697a36c 14619 +---------------------------------------+
a260abc9 14620 | saved CR | 4 8 (8-11)
4697a36c 14621 +---------------------------------------+
a260abc9 14622 | saved LR | 8 16
4697a36c 14623 +---------------------------------------+
a260abc9 14624 | reserved for compilers | 12 24
4697a36c 14625 +---------------------------------------+
a260abc9 14626 | reserved for binders | 16 32
4697a36c 14627 +---------------------------------------+
a260abc9 14628 | saved TOC pointer | 20 40
4697a36c 14629 +---------------------------------------+
a260abc9 14630 | Parameter save area (P) | 24 48
4697a36c 14631 +---------------------------------------+
a260abc9 14632 | Alloca space (A) | 24+P etc.
802a0058 14633 +---------------------------------------+
a7df97e6 14634 | Local variable space (L) | 24+P+A
4697a36c 14635 +---------------------------------------+
a7df97e6 14636 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14637 +---------------------------------------+
00b960c7
AH
14638 | Save area for AltiVec registers (W) | 24+P+A+L+X
14639 +---------------------------------------+
14640 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14641 +---------------------------------------+
14642 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14643 +---------------------------------------+
00b960c7
AH
14644 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14645 +---------------------------------------+
14646 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14647 +---------------------------------------+
14648 old SP->| back chain to caller's caller |
14649 +---------------------------------------+
14650
5376a30c
KR
14651 The required alignment for AIX configurations is two words (i.e., 8
14652 or 16 bytes).
14653
14654
4697a36c
MM
14655 V.4 stack frames look like:
14656
14657 SP----> +---------------------------------------+
14658 | back chain to caller | 0
14659 +---------------------------------------+
5eb387b8 14660 | caller's saved LR | 4
4697a36c
MM
14661 +---------------------------------------+
14662 | Parameter save area (P) | 8
14663 +---------------------------------------+
a7df97e6 14664 | Alloca space (A) | 8+P
f676971a 14665 +---------------------------------------+
a7df97e6 14666 | Varargs save area (V) | 8+P+A
f676971a 14667 +---------------------------------------+
a7df97e6 14668 | Local variable space (L) | 8+P+A+V
f676971a 14669 +---------------------------------------+
a7df97e6 14670 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14671 +---------------------------------------+
00b960c7
AH
14672 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14673 +---------------------------------------+
14674 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14675 +---------------------------------------+
14676 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14677 +---------------------------------------+
c4ad648e
AM
14678 | SPE: area for 64-bit GP registers |
14679 +---------------------------------------+
14680 | SPE alignment padding |
14681 +---------------------------------------+
00b960c7 14682 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14683 +---------------------------------------+
00b960c7 14684 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14685 +---------------------------------------+
00b960c7 14686 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14687 +---------------------------------------+
14688 old SP->| back chain to caller's caller |
14689 +---------------------------------------+
b6c9286a 14690
5376a30c
KR
14691 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14692 given. (But note below and in sysv4.h that we require only 8 and
14693 may round up the size of our stack frame anyways. The historical
14694 reason is early versions of powerpc-linux which didn't properly
14695 align the stack at program startup. A happy side-effect is that
14696 -mno-eabi libraries can be used with -meabi programs.)
14697
50d440bc 14698 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14699 the stack alignment requirements may differ. If -mno-eabi is not
14700 given, the required stack alignment is 8 bytes; if -mno-eabi is
14701 given, the required alignment is 16 bytes. (But see V.4 comment
14702 above.) */
4697a36c 14703
61b2fbe7
MM
14704#ifndef ABI_STACK_BOUNDARY
14705#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14706#endif
14707
d1d0c603 14708static rs6000_stack_t *
863d938c 14709rs6000_stack_info (void)
4697a36c 14710{
022123e6 14711 static rs6000_stack_t info;
4697a36c 14712 rs6000_stack_t *info_ptr = &info;
327e5343 14713 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14714 int ehrd_size;
64045029 14715 int save_align;
8070c91a 14716 int first_gp;
44688022 14717 HOST_WIDE_INT non_fixed_size;
4697a36c 14718
022123e6 14719 memset (&info, 0, sizeof (info));
4697a36c 14720
c19de7aa
AH
14721 if (TARGET_SPE)
14722 {
14723 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14724 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14725 cfun->machine->insn_chain_scanned_p
14726 = spe_func_has_64bit_regs_p () + 1;
14727 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14728 }
14729
a4f6c312 14730 /* Select which calling sequence. */
178274da 14731 info_ptr->abi = DEFAULT_ABI;
9878760c 14732
a4f6c312 14733 /* Calculate which registers need to be saved & save area size. */
4697a36c 14734 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14735 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14736 even if it currently looks like we won't. Reload may need it to
14737 get at a constant; if so, it will have already created a constant
14738 pool entry for it. */
2bfcf297 14739 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14740 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14741 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14742 && crtl->uses_const_pool
1db02437 14743 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14744 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14745 else
8070c91a
DJ
14746 first_gp = info_ptr->first_gp_reg_save;
14747
14748 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14749
a3170dc6
AH
14750 /* For the SPE, we have an additional upper 32-bits on each GPR.
14751 Ideally we should save the entire 64-bits only when the upper
14752 half is used in SIMD instructions. Since we only record
14753 registers live (not the size they are used in), this proves
14754 difficult because we'd have to traverse the instruction chain at
14755 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14756 so we opt to save the GPRs in 64-bits always if but one register
14757 gets used in 64-bits. Otherwise, all the registers in the frame
14758 get saved in 32-bits.
a3170dc6 14759
c19de7aa 14760 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14761 traditional GP save area will be empty. */
c19de7aa 14762 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14763 info_ptr->gp_size = 0;
14764
4697a36c
MM
14765 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14766 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14767
00b960c7
AH
14768 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14769 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14770 - info_ptr->first_altivec_reg_save);
14771
592696dd 14772 /* Does this function call anything? */
71f123ca
FS
14773 info_ptr->calls_p = (! current_function_is_leaf
14774 || cfun->machine->ra_needs_full_frame);
b6c9286a 14775
a4f6c312 14776 /* Determine if we need to save the link register. */
022123e6 14777 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14778 && crtl->profile
022123e6 14779 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14780#ifdef TARGET_RELOCATABLE
14781 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14782#endif
14783 || (info_ptr->first_fp_reg_save != 64
14784 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14785 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14786 || info_ptr->calls_p
14787 || rs6000_ra_ever_killed ())
4697a36c
MM
14788 {
14789 info_ptr->lr_save_p = 1;
1de43f85 14790 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14791 }
14792
9ebbca7d 14793 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14794 if (df_regs_ever_live_p (CR2_REGNO)
14795 || df_regs_ever_live_p (CR3_REGNO)
14796 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14797 {
14798 info_ptr->cr_save_p = 1;
178274da 14799 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14800 info_ptr->cr_size = reg_size;
14801 }
14802
83720594
RH
14803 /* If the current function calls __builtin_eh_return, then we need
14804 to allocate stack space for registers that will hold data for
14805 the exception handler. */
e3b5732b 14806 if (crtl->calls_eh_return)
83720594
RH
14807 {
14808 unsigned int i;
14809 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14810 continue;
a3170dc6
AH
14811
14812 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14813 ehrd_size = i * (TARGET_SPE_ABI
14814 && info_ptr->spe_64bit_regs_used != 0
14815 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14816 }
14817 else
14818 ehrd_size = 0;
14819
592696dd 14820 /* Determine various sizes. */
4697a36c
MM
14821 info_ptr->reg_size = reg_size;
14822 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14823 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14824 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14825 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14826 if (FRAME_GROWS_DOWNWARD)
14827 info_ptr->vars_size
5b667039
JJ
14828 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14829 + info_ptr->parm_size,
7d5175e1 14830 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14831 - (info_ptr->fixed_size + info_ptr->vars_size
14832 + info_ptr->parm_size);
00b960c7 14833
c19de7aa 14834 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14835 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14836 else
14837 info_ptr->spe_gp_size = 0;
14838
4d774ff8
HP
14839 if (TARGET_ALTIVEC_ABI)
14840 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14841 else
4d774ff8
HP
14842 info_ptr->vrsave_mask = 0;
14843
14844 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14845 info_ptr->vrsave_size = 4;
14846 else
14847 info_ptr->vrsave_size = 0;
b6c9286a 14848
d62294f5
FJ
14849 compute_save_world_info (info_ptr);
14850
592696dd 14851 /* Calculate the offsets. */
178274da 14852 switch (DEFAULT_ABI)
4697a36c 14853 {
b6c9286a 14854 case ABI_NONE:
24d304eb 14855 default:
37409796 14856 gcc_unreachable ();
b6c9286a
MM
14857
14858 case ABI_AIX:
ee890fe2 14859 case ABI_DARWIN:
b6c9286a
MM
14860 info_ptr->fp_save_offset = - info_ptr->fp_size;
14861 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14862
14863 if (TARGET_ALTIVEC_ABI)
14864 {
14865 info_ptr->vrsave_save_offset
14866 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14867
982afe02 14868 /* Align stack so vector save area is on a quadword boundary.
9278121c 14869 The padding goes above the vectors. */
00b960c7
AH
14870 if (info_ptr->altivec_size != 0)
14871 info_ptr->altivec_padding_size
9278121c 14872 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14873 else
14874 info_ptr->altivec_padding_size = 0;
14875
14876 info_ptr->altivec_save_offset
14877 = info_ptr->vrsave_save_offset
14878 - info_ptr->altivec_padding_size
14879 - info_ptr->altivec_size;
9278121c
GK
14880 gcc_assert (info_ptr->altivec_size == 0
14881 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14882
14883 /* Adjust for AltiVec case. */
14884 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14885 }
14886 else
14887 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14888 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14889 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14890 break;
14891
14892 case ABI_V4:
b6c9286a
MM
14893 info_ptr->fp_save_offset = - info_ptr->fp_size;
14894 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14895 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14896
c19de7aa 14897 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14898 {
14899 /* Align stack so SPE GPR save area is aligned on a
14900 double-word boundary. */
f78c3290 14901 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14902 info_ptr->spe_padding_size
14903 = 8 - (-info_ptr->cr_save_offset % 8);
14904 else
14905 info_ptr->spe_padding_size = 0;
14906
14907 info_ptr->spe_gp_save_offset
14908 = info_ptr->cr_save_offset
14909 - info_ptr->spe_padding_size
14910 - info_ptr->spe_gp_size;
14911
14912 /* Adjust for SPE case. */
022123e6 14913 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14914 }
a3170dc6 14915 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14916 {
14917 info_ptr->vrsave_save_offset
14918 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14919
14920 /* Align stack so vector save area is on a quadword boundary. */
14921 if (info_ptr->altivec_size != 0)
14922 info_ptr->altivec_padding_size
14923 = 16 - (-info_ptr->vrsave_save_offset % 16);
14924 else
14925 info_ptr->altivec_padding_size = 0;
14926
14927 info_ptr->altivec_save_offset
14928 = info_ptr->vrsave_save_offset
14929 - info_ptr->altivec_padding_size
14930 - info_ptr->altivec_size;
14931
14932 /* Adjust for AltiVec case. */
022123e6 14933 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14934 }
14935 else
022123e6
AM
14936 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14937 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14938 info_ptr->lr_save_offset = reg_size;
14939 break;
4697a36c
MM
14940 }
14941
64045029 14942 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14943 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14944 + info_ptr->gp_size
14945 + info_ptr->altivec_size
14946 + info_ptr->altivec_padding_size
a3170dc6
AH
14947 + info_ptr->spe_gp_size
14948 + info_ptr->spe_padding_size
00b960c7
AH
14949 + ehrd_size
14950 + info_ptr->cr_size
022123e6 14951 + info_ptr->vrsave_size,
64045029 14952 save_align);
00b960c7 14953
44688022 14954 non_fixed_size = (info_ptr->vars_size
ff381587 14955 + info_ptr->parm_size
5b667039 14956 + info_ptr->save_size);
ff381587 14957
44688022
AM
14958 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14959 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14960
14961 /* Determine if we need to allocate any stack frame:
14962
a4f6c312
SS
14963 For AIX we need to push the stack if a frame pointer is needed
14964 (because the stack might be dynamically adjusted), if we are
14965 debugging, if we make calls, or if the sum of fp_save, gp_save,
14966 and local variables are more than the space needed to save all
14967 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14968 + 18*8 = 288 (GPR13 reserved).
ff381587 14969
a4f6c312
SS
14970 For V.4 we don't have the stack cushion that AIX uses, but assume
14971 that the debugger can handle stackless frames. */
ff381587
MM
14972
14973 if (info_ptr->calls_p)
14974 info_ptr->push_p = 1;
14975
178274da 14976 else if (DEFAULT_ABI == ABI_V4)
44688022 14977 info_ptr->push_p = non_fixed_size != 0;
ff381587 14978
178274da
AM
14979 else if (frame_pointer_needed)
14980 info_ptr->push_p = 1;
14981
14982 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14983 info_ptr->push_p = 1;
14984
ff381587 14985 else
44688022 14986 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14987
a4f6c312 14988 /* Zero offsets if we're not saving those registers. */
8dda1a21 14989 if (info_ptr->fp_size == 0)
4697a36c
MM
14990 info_ptr->fp_save_offset = 0;
14991
8dda1a21 14992 if (info_ptr->gp_size == 0)
4697a36c
MM
14993 info_ptr->gp_save_offset = 0;
14994
00b960c7
AH
14995 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14996 info_ptr->altivec_save_offset = 0;
14997
14998 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14999 info_ptr->vrsave_save_offset = 0;
15000
c19de7aa
AH
15001 if (! TARGET_SPE_ABI
15002 || info_ptr->spe_64bit_regs_used == 0
15003 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
15004 info_ptr->spe_gp_save_offset = 0;
15005
c81fc13e 15006 if (! info_ptr->lr_save_p)
4697a36c
MM
15007 info_ptr->lr_save_offset = 0;
15008
c81fc13e 15009 if (! info_ptr->cr_save_p)
4697a36c
MM
15010 info_ptr->cr_save_offset = 0;
15011
15012 return info_ptr;
15013}
15014
c19de7aa
AH
15015/* Return true if the current function uses any GPRs in 64-bit SIMD
15016 mode. */
15017
15018static bool
863d938c 15019spe_func_has_64bit_regs_p (void)
c19de7aa
AH
15020{
15021 rtx insns, insn;
15022
15023 /* Functions that save and restore all the call-saved registers will
15024 need to save/restore the registers in 64-bits. */
e3b5732b
JH
15025 if (crtl->calls_eh_return
15026 || cfun->calls_setjmp
15027 || crtl->has_nonlocal_goto)
c19de7aa
AH
15028 return true;
15029
15030 insns = get_insns ();
15031
15032 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
15033 {
15034 if (INSN_P (insn))
15035 {
15036 rtx i;
15037
b5a5beb9
AH
15038 /* FIXME: This should be implemented with attributes...
15039
15040 (set_attr "spe64" "true")....then,
15041 if (get_spe64(insn)) return true;
15042
15043 It's the only reliable way to do the stuff below. */
15044
c19de7aa 15045 i = PATTERN (insn);
f82f556d
AH
15046 if (GET_CODE (i) == SET)
15047 {
15048 enum machine_mode mode = GET_MODE (SET_SRC (i));
15049
15050 if (SPE_VECTOR_MODE (mode))
15051 return true;
4f011e1e 15052 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
15053 return true;
15054 }
c19de7aa
AH
15055 }
15056 }
15057
15058 return false;
15059}
15060
d1d0c603 15061static void
a2369ed3 15062debug_stack_info (rs6000_stack_t *info)
9878760c 15063{
d330fd93 15064 const char *abi_string;
24d304eb 15065
c81fc13e 15066 if (! info)
4697a36c
MM
15067 info = rs6000_stack_info ();
15068
15069 fprintf (stderr, "\nStack information for function %s:\n",
15070 ((current_function_decl && DECL_NAME (current_function_decl))
15071 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
15072 : "<unknown>"));
15073
24d304eb
RK
15074 switch (info->abi)
15075 {
b6c9286a
MM
15076 default: abi_string = "Unknown"; break;
15077 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 15078 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 15079 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 15080 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
15081 }
15082
15083 fprintf (stderr, "\tABI = %5s\n", abi_string);
15084
00b960c7
AH
15085 if (TARGET_ALTIVEC_ABI)
15086 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
15087
a3170dc6
AH
15088 if (TARGET_SPE_ABI)
15089 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
15090
4697a36c
MM
15091 if (info->first_gp_reg_save != 32)
15092 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
15093
15094 if (info->first_fp_reg_save != 64)
15095 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 15096
00b960c7
AH
15097 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
15098 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
15099 info->first_altivec_reg_save);
15100
4697a36c
MM
15101 if (info->lr_save_p)
15102 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 15103
4697a36c
MM
15104 if (info->cr_save_p)
15105 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
15106
00b960c7
AH
15107 if (info->vrsave_mask)
15108 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
15109
4697a36c
MM
15110 if (info->push_p)
15111 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
15112
15113 if (info->calls_p)
15114 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
15115
4697a36c
MM
15116 if (info->gp_save_offset)
15117 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
15118
15119 if (info->fp_save_offset)
15120 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
15121
00b960c7
AH
15122 if (info->altivec_save_offset)
15123 fprintf (stderr, "\taltivec_save_offset = %5d\n",
15124 info->altivec_save_offset);
15125
a3170dc6
AH
15126 if (info->spe_gp_save_offset)
15127 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
15128 info->spe_gp_save_offset);
15129
00b960c7
AH
15130 if (info->vrsave_save_offset)
15131 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
15132 info->vrsave_save_offset);
15133
4697a36c
MM
15134 if (info->lr_save_offset)
15135 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
15136
15137 if (info->cr_save_offset)
15138 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
15139
15140 if (info->varargs_save_offset)
15141 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
15142
15143 if (info->total_size)
d1d0c603
JJ
15144 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15145 info->total_size);
4697a36c 15146
4697a36c 15147 if (info->vars_size)
d1d0c603
JJ
15148 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15149 info->vars_size);
4697a36c
MM
15150
15151 if (info->parm_size)
15152 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
15153
15154 if (info->fixed_size)
15155 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
15156
15157 if (info->gp_size)
15158 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
15159
a3170dc6
AH
15160 if (info->spe_gp_size)
15161 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
15162
4697a36c
MM
15163 if (info->fp_size)
15164 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
15165
00b960c7
AH
15166 if (info->altivec_size)
15167 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
15168
15169 if (info->vrsave_size)
15170 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
15171
15172 if (info->altivec_padding_size)
15173 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15174 info->altivec_padding_size);
15175
a3170dc6
AH
15176 if (info->spe_padding_size)
15177 fprintf (stderr, "\tspe_padding_size = %5d\n",
15178 info->spe_padding_size);
15179
4697a36c
MM
15180 if (info->cr_size)
15181 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15182
15183 if (info->save_size)
15184 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15185
15186 if (info->reg_size != 4)
15187 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15188
15189 fprintf (stderr, "\n");
9878760c 15190}
71f123ca
FS
15191
15192rtx
a2369ed3 15193rs6000_return_addr (int count, rtx frame)
71f123ca 15194{
a4f6c312
SS
15195 /* Currently we don't optimize very well between prolog and body
15196 code and for PIC code the code can be actually quite bad, so
15197 don't try to be too clever here. */
f1384257 15198 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
15199 {
15200 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
15201
15202 return
15203 gen_rtx_MEM
15204 (Pmode,
15205 memory_address
15206 (Pmode,
15207 plus_constant (copy_to_reg
15208 (gen_rtx_MEM (Pmode,
15209 memory_address (Pmode, frame))),
15210 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
15211 }
15212
8c29550d 15213 cfun->machine->ra_need_lr = 1;
1de43f85 15214 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
15215}
15216
5e1bf043
DJ
15217/* Say whether a function is a candidate for sibcall handling or not.
15218 We do not allow indirect calls to be optimized into sibling calls.
15219 Also, we can't do it if there are any vector parameters; there's
15220 nowhere to put the VRsave code so it works; note that functions with
15221 vector parameters are required to have a prototype, so the argument
15222 type info must be available here. (The tail recursion case can work
15223 with vector parameters, but there's no way to distinguish here.) */
4977bab6 15224static bool
a2369ed3 15225rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
15226{
15227 tree type;
4977bab6 15228 if (decl)
5e1bf043
DJ
15229 {
15230 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 15231 {
4977bab6 15232 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
15233 type; type = TREE_CHAIN (type))
15234 {
c15b529f 15235 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 15236 return false;
5e1bf043 15237 }
c4ad648e 15238 }
5e1bf043 15239 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
15240 || ((*targetm.binds_local_p) (decl)
15241 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 15242 {
4977bab6 15243 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
15244
15245 if (!lookup_attribute ("longcall", attr_list)
15246 || lookup_attribute ("shortcall", attr_list))
4977bab6 15247 return true;
2bcc50d0 15248 }
5e1bf043 15249 }
4977bab6 15250 return false;
5e1bf043
DJ
15251}
15252
e7e64a25
AS
15253/* NULL if INSN insn is valid within a low-overhead loop.
15254 Otherwise return why doloop cannot be applied.
9419649c
DE
15255 PowerPC uses the COUNT register for branch on table instructions. */
15256
e7e64a25 15257static const char *
3101faab 15258rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15259{
15260 if (CALL_P (insn))
e7e64a25 15261 return "Function call in the loop.";
9419649c
DE
15262
15263 if (JUMP_P (insn)
15264 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15265 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15266 return "Computed branch in the loop.";
9419649c 15267
e7e64a25 15268 return NULL;
9419649c
DE
15269}
15270
71f123ca 15271static int
863d938c 15272rs6000_ra_ever_killed (void)
71f123ca
FS
15273{
15274 rtx top;
5e1bf043
DJ
15275 rtx reg;
15276 rtx insn;
71f123ca 15277
3c072c6b 15278 if (cfun->is_thunk)
71f123ca 15279 return 0;
eb0424da 15280
36f7e964
AH
15281 /* regs_ever_live has LR marked as used if any sibcalls are present,
15282 but this should not force saving and restoring in the
15283 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15284 clobbers LR, so that is inappropriate. */
36f7e964 15285
5e1bf043
DJ
15286 /* Also, the prologue can generate a store into LR that
15287 doesn't really count, like this:
36f7e964 15288
5e1bf043
DJ
15289 move LR->R0
15290 bcl to set PIC register
15291 move LR->R31
15292 move R0->LR
36f7e964
AH
15293
15294 When we're called from the epilogue, we need to avoid counting
15295 this as a store. */
f676971a 15296
71f123ca
FS
15297 push_topmost_sequence ();
15298 top = get_insns ();
15299 pop_topmost_sequence ();
1de43f85 15300 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15301
5e1bf043
DJ
15302 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15303 {
15304 if (INSN_P (insn))
15305 {
022123e6
AM
15306 if (CALL_P (insn))
15307 {
15308 if (!SIBLING_CALL_P (insn))
15309 return 1;
15310 }
1de43f85 15311 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15312 return 1;
36f7e964
AH
15313 else if (set_of (reg, insn) != NULL_RTX
15314 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15315 return 1;
15316 }
15317 }
15318 return 0;
71f123ca 15319}
4697a36c 15320\f
9ebbca7d 15321/* Emit instructions needed to load the TOC register.
c7ca610e 15322 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15323 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15324
15325void
a2369ed3 15326rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15327{
6fb5fa3c 15328 rtx dest;
1db02437 15329 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15330
7f970b70 15331 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15332 {
7f970b70 15333 char buf[30];
e65a3857 15334 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15335
15336 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15337 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15338 if (flag_pic == 2)
15339 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15340 else
15341 got = rs6000_got_sym ();
15342 tmp1 = tmp2 = dest;
15343 if (!fromprolog)
15344 {
15345 tmp1 = gen_reg_rtx (Pmode);
15346 tmp2 = gen_reg_rtx (Pmode);
15347 }
6fb5fa3c
DB
15348 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15349 emit_move_insn (tmp1,
1de43f85 15350 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15351 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15352 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15353 }
15354 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15355 {
6fb5fa3c 15356 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15357 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15358 }
15359 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15360 {
15361 char buf[30];
20b71b17
AM
15362 rtx temp0 = (fromprolog
15363 ? gen_rtx_REG (Pmode, 0)
15364 : gen_reg_rtx (Pmode));
20b71b17 15365
20b71b17
AM
15366 if (fromprolog)
15367 {
ccbca5e4 15368 rtx symF, symL;
38c1f2d7 15369
20b71b17
AM
15370 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15371 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15372
20b71b17
AM
15373 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15374 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15375
6fb5fa3c
DB
15376 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15377 emit_move_insn (dest,
1de43f85 15378 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15379 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15380 }
15381 else
20b71b17
AM
15382 {
15383 rtx tocsym;
20b71b17
AM
15384
15385 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15386 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15387 emit_move_insn (dest,
1de43f85 15388 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15389 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15390 }
6fb5fa3c 15391 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15392 }
20b71b17
AM
15393 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15394 {
15395 /* This is for AIX code running in non-PIC ELF32. */
15396 char buf[30];
15397 rtx realsym;
15398 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15399 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15400
6fb5fa3c
DB
15401 emit_insn (gen_elf_high (dest, realsym));
15402 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15403 }
37409796 15404 else
9ebbca7d 15405 {
37409796 15406 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15407
9ebbca7d 15408 if (TARGET_32BIT)
6fb5fa3c 15409 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15410 else
6fb5fa3c 15411 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15412 }
15413}
15414
d1d0c603
JJ
15415/* Emit instructions to restore the link register after determining where
15416 its value has been stored. */
15417
15418void
15419rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15420{
15421 rs6000_stack_t *info = rs6000_stack_info ();
15422 rtx operands[2];
15423
15424 operands[0] = source;
15425 operands[1] = scratch;
15426
15427 if (info->lr_save_p)
15428 {
15429 rtx frame_rtx = stack_pointer_rtx;
15430 HOST_WIDE_INT sp_offset = 0;
15431 rtx tmp;
15432
15433 if (frame_pointer_needed
e3b5732b 15434 || cfun->calls_alloca
d1d0c603
JJ
15435 || info->total_size > 32767)
15436 {
0be76840 15437 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15438 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15439 frame_rtx = operands[1];
15440 }
15441 else if (info->push_p)
15442 sp_offset = info->total_size;
15443
15444 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15445 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15446 emit_move_insn (tmp, operands[0]);
15447 }
15448 else
1de43f85 15449 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15450}
15451
4862826d 15452static GTY(()) alias_set_type set = -1;
f103e34d 15453
4862826d 15454alias_set_type
863d938c 15455get_TOC_alias_set (void)
9ebbca7d 15456{
f103e34d
GK
15457 if (set == -1)
15458 set = new_alias_set ();
15459 return set;
f676971a 15460}
9ebbca7d 15461
c1207243 15462/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15463 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15464 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15465#if TARGET_ELF
3c9eb5f4 15466static int
f676971a 15467uses_TOC (void)
9ebbca7d 15468{
c4501e62 15469 rtx insn;
38c1f2d7 15470
c4501e62
JJ
15471 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15472 if (INSN_P (insn))
15473 {
15474 rtx pat = PATTERN (insn);
15475 int i;
9ebbca7d 15476
f676971a 15477 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15478 for (i = 0; i < XVECLEN (pat, 0); i++)
15479 {
15480 rtx sub = XVECEXP (pat, 0, i);
15481 if (GET_CODE (sub) == USE)
15482 {
15483 sub = XEXP (sub, 0);
15484 if (GET_CODE (sub) == UNSPEC
15485 && XINT (sub, 1) == UNSPEC_TOC)
15486 return 1;
15487 }
15488 }
15489 }
15490 return 0;
9ebbca7d 15491}
c954844a 15492#endif
38c1f2d7 15493
9ebbca7d 15494rtx
f676971a 15495create_TOC_reference (rtx symbol)
9ebbca7d 15496{
b3a13419 15497 if (!can_create_pseudo_p ())
6fb5fa3c 15498 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15499 return gen_rtx_PLUS (Pmode,
a8a05998 15500 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a 15501 gen_rtx_CONST (Pmode,
2e4316da 15502 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL)));
9ebbca7d 15503}
38c1f2d7 15504
fc4767bb
JJ
15505/* If _Unwind_* has been called from within the same module,
15506 toc register is not guaranteed to be saved to 40(1) on function
15507 entry. Save it there in that case. */
c7ca610e 15508
9ebbca7d 15509void
863d938c 15510rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15511{
15512 rtx mem;
15513 rtx stack_top = gen_reg_rtx (Pmode);
15514 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15515 rtx opcode = gen_reg_rtx (SImode);
15516 rtx tocompare = gen_reg_rtx (SImode);
15517 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15518
8308679f 15519 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15520 emit_move_insn (stack_top, mem);
15521
8308679f
DE
15522 mem = gen_frame_mem (Pmode,
15523 gen_rtx_PLUS (Pmode, stack_top,
15524 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15525 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15526 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15527 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15528 : 0xE8410028, SImode));
9ebbca7d 15529
fc4767bb 15530 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15531 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15532 no_toc_save_needed);
9ebbca7d 15533
8308679f
DE
15534 mem = gen_frame_mem (Pmode,
15535 gen_rtx_PLUS (Pmode, stack_top,
15536 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15537 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15538 emit_label (no_toc_save_needed);
9ebbca7d 15539}
38c1f2d7 15540\f
0be76840
DE
15541/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15542 and the change to the stack pointer. */
ba4828e0 15543
9ebbca7d 15544static void
863d938c 15545rs6000_emit_stack_tie (void)
9ebbca7d 15546{
0be76840
DE
15547 rtx mem = gen_frame_mem (BLKmode,
15548 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15549
9ebbca7d
GK
15550 emit_insn (gen_stack_tie (mem));
15551}
38c1f2d7 15552
9ebbca7d
GK
15553/* Emit the correct code for allocating stack space, as insns.
15554 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15555 If COPY_R11, make sure a copy of the old frame is left in r11,
15556 in preference to r12 if COPY_R12.
9ebbca7d
GK
15557 The generated code may use hard register 0 as a temporary. */
15558
15559static void
f78c3290 15560rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15561{
9ebbca7d
GK
15562 rtx insn;
15563 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15564 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1 15565 rtx todec = gen_int_mode (-size, Pmode);
d4bacef0 15566 rtx par, set, mem;
61168ff1
RS
15567
15568 if (INTVAL (todec) != -size)
15569 {
d4ee4d25 15570 warning (0, "stack frame too large");
61168ff1
RS
15571 emit_insn (gen_trap ());
15572 return;
15573 }
a157febd 15574
e3b5732b 15575 if (crtl->limit_stack)
a157febd
GK
15576 {
15577 if (REG_P (stack_limit_rtx)
f676971a 15578 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15579 && REGNO (stack_limit_rtx) <= 31)
15580 {
5b71a4e7 15581 emit_insn (TARGET_32BIT
9ebbca7d
GK
15582 ? gen_addsi3 (tmp_reg,
15583 stack_limit_rtx,
15584 GEN_INT (size))
15585 : gen_adddi3 (tmp_reg,
15586 stack_limit_rtx,
15587 GEN_INT (size)));
5b71a4e7 15588
9ebbca7d
GK
15589 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15590 const0_rtx));
a157febd
GK
15591 }
15592 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15593 && TARGET_32BIT
f607bc57 15594 && DEFAULT_ABI == ABI_V4)
a157febd 15595 {
9ebbca7d 15596 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15597 gen_rtx_PLUS (Pmode,
15598 stack_limit_rtx,
9ebbca7d 15599 GEN_INT (size)));
5b71a4e7 15600
9ebbca7d
GK
15601 emit_insn (gen_elf_high (tmp_reg, toload));
15602 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15603 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15604 const0_rtx));
a157febd
GK
15605 }
15606 else
d4ee4d25 15607 warning (0, "stack limit expression is not supported");
a157febd
GK
15608 }
15609
d4bacef0 15610 if (copy_r12 || copy_r11)
f78c3290
NF
15611 emit_move_insn (copy_r11
15612 ? gen_rtx_REG (Pmode, 11)
15613 : gen_rtx_REG (Pmode, 12),
15614 stack_reg);
9ebbca7d 15615
d4bacef0 15616 if (size > 32767)
38c1f2d7 15617 {
d4bacef0
BE
15618 /* Need a note here so that try_split doesn't get confused. */
15619 if (get_last_insn () == NULL_RTX)
15620 emit_note (NOTE_INSN_DELETED);
15621 insn = emit_move_insn (tmp_reg, todec);
15622 try_split (PATTERN (insn), insn, 0);
15623 todec = tmp_reg;
9ebbca7d 15624 }
d4bacef0
BE
15625
15626 insn = emit_insn (TARGET_32BIT
15627 ? gen_movsi_update_stack (stack_reg, stack_reg,
15628 todec, stack_reg)
15629 : gen_movdi_di_update_stack (stack_reg, stack_reg,
15630 todec, stack_reg));
15631 /* Since we didn't use gen_frame_mem to generate the MEM, grab
15632 it now and set the alias set/attributes. The above gen_*_update
15633 calls will generate a PARALLEL with the MEM set being the first
15634 operation. */
15635 par = PATTERN (insn);
15636 gcc_assert (GET_CODE (par) == PARALLEL);
15637 set = XVECEXP (par, 0, 0);
15638 gcc_assert (GET_CODE (set) == SET);
15639 mem = SET_DEST (set);
15640 gcc_assert (MEM_P (mem));
15641 MEM_NOTRAP_P (mem) = 1;
15642 set_mem_alias_set (mem, get_frame_alias_set ());
f676971a 15643
9ebbca7d 15644 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15645 REG_NOTES (insn) =
9ebbca7d 15646 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15647 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15648 gen_rtx_PLUS (Pmode, stack_reg,
15649 GEN_INT (-size))),
15650 REG_NOTES (insn));
15651}
15652
a4f6c312
SS
15653/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15654 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15655 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15656 deduce these equivalences by itself so it wasn't necessary to hold
15657 its hand so much. */
9ebbca7d
GK
15658
15659static void
f676971a 15660rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15661 rtx reg2, rtx rreg)
9ebbca7d
GK
15662{
15663 rtx real, temp;
15664
e56c4463
JL
15665 /* copy_rtx will not make unique copies of registers, so we need to
15666 ensure we don't have unwanted sharing here. */
15667 if (reg == reg2)
15668 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15669
15670 if (reg == rreg)
15671 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15672
9ebbca7d
GK
15673 real = copy_rtx (PATTERN (insn));
15674
89e7058f
AH
15675 if (reg2 != NULL_RTX)
15676 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15677
15678 real = replace_rtx (real, reg,
9ebbca7d
GK
15679 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15680 STACK_POINTER_REGNUM),
15681 GEN_INT (val)));
f676971a 15682
9ebbca7d
GK
15683 /* We expect that 'real' is either a SET or a PARALLEL containing
15684 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15685 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15686
15687 if (GET_CODE (real) == SET)
15688 {
15689 rtx set = real;
f676971a 15690
9ebbca7d
GK
15691 temp = simplify_rtx (SET_SRC (set));
15692 if (temp)
15693 SET_SRC (set) = temp;
15694 temp = simplify_rtx (SET_DEST (set));
15695 if (temp)
15696 SET_DEST (set) = temp;
15697 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15698 {
9ebbca7d
GK
15699 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15700 if (temp)
15701 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15702 }
38c1f2d7 15703 }
37409796 15704 else
9ebbca7d
GK
15705 {
15706 int i;
37409796
NS
15707
15708 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15709 for (i = 0; i < XVECLEN (real, 0); i++)
15710 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15711 {
15712 rtx set = XVECEXP (real, 0, i);
f676971a 15713
9ebbca7d
GK
15714 temp = simplify_rtx (SET_SRC (set));
15715 if (temp)
15716 SET_SRC (set) = temp;
15717 temp = simplify_rtx (SET_DEST (set));
15718 if (temp)
15719 SET_DEST (set) = temp;
15720 if (GET_CODE (SET_DEST (set)) == MEM)
15721 {
15722 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15723 if (temp)
15724 XEXP (SET_DEST (set), 0) = temp;
15725 }
15726 RTX_FRAME_RELATED_P (set) = 1;
15727 }
15728 }
c19de7aa 15729
9ebbca7d
GK
15730 RTX_FRAME_RELATED_P (insn) = 1;
15731 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15732 real,
15733 REG_NOTES (insn));
38c1f2d7
MM
15734}
15735
00b960c7
AH
15736/* Returns an insn that has a vrsave set operation with the
15737 appropriate CLOBBERs. */
15738
15739static rtx
a2369ed3 15740generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15741{
15742 int nclobs, i;
15743 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15744 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15745
a004eb82
AH
15746 clobs[0]
15747 = gen_rtx_SET (VOIDmode,
15748 vrsave,
15749 gen_rtx_UNSPEC_VOLATILE (SImode,
15750 gen_rtvec (2, reg, vrsave),
3aca4bff 15751 UNSPECV_SET_VRSAVE));
00b960c7
AH
15752
15753 nclobs = 1;
15754
9aa86737
AH
15755 /* We need to clobber the registers in the mask so the scheduler
15756 does not move sets to VRSAVE before sets of AltiVec registers.
15757
15758 However, if the function receives nonlocal gotos, reload will set
15759 all call saved registers live. We will end up with:
15760
15761 (set (reg 999) (mem))
15762 (parallel [ (set (reg vrsave) (unspec blah))
15763 (clobber (reg 999))])
15764
15765 The clobber will cause the store into reg 999 to be dead, and
15766 flow will attempt to delete an epilogue insn. In this case, we
15767 need an unspec use/set of the register. */
00b960c7
AH
15768
15769 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15770 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15771 {
15772 if (!epiloguep || call_used_regs [i])
15773 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15774 gen_rtx_REG (V4SImode, i));
15775 else
15776 {
15777 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15778
15779 clobs[nclobs++]
a004eb82
AH
15780 = gen_rtx_SET (VOIDmode,
15781 reg,
15782 gen_rtx_UNSPEC (V4SImode,
15783 gen_rtvec (1, reg), 27));
9aa86737
AH
15784 }
15785 }
00b960c7
AH
15786
15787 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15788
15789 for (i = 0; i < nclobs; ++i)
15790 XVECEXP (insn, 0, i) = clobs[i];
15791
15792 return insn;
15793}
15794
89e7058f
AH
15795/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15796 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15797
15798static void
f676971a 15799emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15800 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15801{
15802 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15803 rtx replacea, replaceb;
15804
15805 int_rtx = GEN_INT (offset);
15806
15807 /* Some cases that need register indexed addressing. */
15808 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15809 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15810 || (TARGET_SPE_ABI
15811 && SPE_VECTOR_MODE (mode)
15812 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15813 {
15814 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15815 flow path of instructions in the prologue. */
89e7058f
AH
15816 offset_rtx = gen_rtx_REG (Pmode, 11);
15817 emit_move_insn (offset_rtx, int_rtx);
15818
15819 replacea = offset_rtx;
15820 replaceb = int_rtx;
15821 }
15822 else
15823 {
15824 offset_rtx = int_rtx;
15825 replacea = NULL_RTX;
15826 replaceb = NULL_RTX;
15827 }
15828
15829 reg = gen_rtx_REG (mode, regno);
15830 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15831 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15832
15833 insn = emit_move_insn (mem, reg);
15834
15835 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15836}
15837
a3170dc6
AH
15838/* Emit an offset memory reference suitable for a frame store, while
15839 converting to a valid addressing mode. */
15840
15841static rtx
a2369ed3 15842gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15843{
15844 rtx int_rtx, offset_rtx;
15845
15846 int_rtx = GEN_INT (offset);
15847
4d4cbc0e 15848 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15849 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15850 {
15851 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15852 emit_move_insn (offset_rtx, int_rtx);
15853 }
15854 else
15855 offset_rtx = int_rtx;
15856
0be76840 15857 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15858}
15859
6d0a8091
DJ
15860/* Look for user-defined global regs. We should not save and restore these,
15861 and cannot use stmw/lmw if there are any in its range. */
15862
15863static bool
f78c3290 15864no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15865{
15866 int i;
e1ece9f1 15867 for (i = first; i < gpr ? 32 : 64 ; i++)
f78c3290 15868 if (global_regs[i])
6d0a8091
DJ
15869 return false;
15870 return true;
15871}
15872
699c914a
MS
15873#ifndef TARGET_FIX_AND_CONTINUE
15874#define TARGET_FIX_AND_CONTINUE 0
15875#endif
15876
f78c3290
NF
15877/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15878#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15879#define LAST_SAVRES_REGISTER 31
15880#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15881
15882static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15883
15884/* Return the symbol for an out-of-line register save/restore routine.
15885 We are saving/restoring GPRs if GPR is true. */
15886
15887static rtx
15888rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15889{
15890 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15891 rtx sym;
15892 int select = ((savep ? 1 : 0) << 2
15893 | (gpr
15894 /* On the SPE, we never have any FPRs, but we do have
15895 32/64-bit versions of the routines. */
15896 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15897 : 0) << 1
15898 | (exitp ? 1: 0));
15899
15900 /* Don't generate bogus routine names. */
15901 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15902
15903 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15904
15905 if (sym == NULL)
15906 {
15907 char name[30];
15908 const char *action;
15909 const char *regkind;
15910 const char *exit_suffix;
15911
15912 action = savep ? "save" : "rest";
15913
15914 /* SPE has slightly different names for its routines depending on
15915 whether we are saving 32-bit or 64-bit registers. */
15916 if (TARGET_SPE_ABI)
15917 {
15918 /* No floating point saves on the SPE. */
15919 gcc_assert (gpr);
15920
15921 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15922 }
15923 else
15924 regkind = gpr ? "gpr" : "fpr";
15925
15926 exit_suffix = exitp ? "_x" : "";
15927
15928 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15929
15930 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15931 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15932 }
15933
15934 return sym;
15935}
15936
15937/* Emit a sequence of insns, including a stack tie if needed, for
15938 resetting the stack pointer. If SAVRES is true, then don't reset the
15939 stack pointer, but move the base of the frame into r11 for use by
15940 out-of-line register restore routines. */
15941
15942static void
15943rs6000_emit_stack_reset (rs6000_stack_t *info,
15944 rtx sp_reg_rtx, rtx frame_reg_rtx,
15945 int sp_offset, bool savres)
15946{
15947 /* This blockage is needed so that sched doesn't decide to move
15948 the sp change before the register restores. */
15949 if (frame_reg_rtx != sp_reg_rtx
15950 || (TARGET_SPE_ABI
15951 && info->spe_64bit_regs_used != 0
15952 && info->first_gp_reg_save != 32))
15953 rs6000_emit_stack_tie ();
15954
15955 if (frame_reg_rtx != sp_reg_rtx)
15956 {
f78c3290
NF
15957 if (sp_offset != 0)
15958 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15959 GEN_INT (sp_offset)));
15960 else if (!savres)
15961 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15962 }
15963 else if (sp_offset != 0)
15964 {
15965 /* If we are restoring registers out-of-line, we will be using the
15966 "exit" variants of the restore routines, which will reset the
15967 stack for us. But we do need to point r11 into the right place
15968 for those routines. */
15969 rtx dest_reg = (savres
15970 ? gen_rtx_REG (Pmode, 11)
15971 : sp_reg_rtx);
15972
15973 emit_insn (TARGET_32BIT
15974 ? gen_addsi3 (dest_reg, sp_reg_rtx,
15975 GEN_INT (sp_offset))
15976 : gen_adddi3 (dest_reg, sp_reg_rtx,
15977 GEN_INT (sp_offset)));
15978 }
15979}
15980
15981/* Construct a parallel rtx describing the effect of a call to an
15982 out-of-line register save/restore routine. */
15983
15984static rtx
15985rs6000_make_savres_rtx (rs6000_stack_t *info,
15986 rtx frame_reg_rtx, int save_area_offset,
15987 enum machine_mode reg_mode,
15988 bool savep, bool gpr, bool exitp)
15989{
15990 int i;
15991 int offset, start_reg, end_reg, n_regs;
15992 int reg_size = GET_MODE_SIZE (reg_mode);
15993 rtx sym;
15994 rtvec p;
15995
15996 offset = 0;
15997 start_reg = (gpr
15998 ? info->first_gp_reg_save
15999 : info->first_fp_reg_save);
16000 end_reg = gpr ? 32 : 64;
16001 n_regs = end_reg - start_reg;
16002 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
16003
16004 /* If we're saving registers, then we should never say we're exiting. */
16005 gcc_assert ((savep && !exitp) || !savep);
16006
16007 if (exitp)
16008 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
16009
16010 RTVEC_ELT (p, offset++)
16011 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
16012
16013 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
16014 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
16015 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
16016
16017 for (i = 0; i < end_reg - start_reg; i++)
16018 {
16019 rtx addr, reg, mem;
16020 reg = gen_rtx_REG (reg_mode, start_reg + i);
16021 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16022 GEN_INT (save_area_offset + reg_size*i));
16023 mem = gen_frame_mem (reg_mode, addr);
16024
16025 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
16026 savep ? mem : reg,
16027 savep ? reg : mem);
16028 }
16029
16030 return gen_rtx_PARALLEL (VOIDmode, p);
16031}
16032
52ff33d0
NF
16033/* Determine whether the gp REG is really used. */
16034
16035static bool
16036rs6000_reg_live_or_pic_offset_p (int reg)
16037{
6fb5fa3c 16038 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
16039 && (!call_used_regs[reg]
16040 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16041 && TARGET_TOC && TARGET_MINIMAL_TOC)))
16042 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16043 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
16044 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
16045}
16046
f78c3290
NF
16047enum {
16048 SAVRES_MULTIPLE = 0x1,
16049 SAVRES_INLINE_FPRS = 0x2,
16050 SAVRES_INLINE_GPRS = 0x4
16051};
16052
16053/* Determine the strategy for savings/restoring registers. */
16054
16055static int
16056rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
16057 int using_static_chain_p, int sibcall)
16058{
16059 bool using_multiple_p;
16060 bool common;
16061 bool savres_fprs_inline;
16062 bool savres_gprs_inline;
16063 bool noclobber_global_gprs
16064 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
16065
16066 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
16067 && (!TARGET_SPE_ABI
16068 || info->spe_64bit_regs_used == 0)
16069 && info->first_gp_reg_save < 31
16070 && noclobber_global_gprs);
16071 /* Don't bother to try to save things out-of-line if r11 is occupied
16072 by the static chain. It would require too much fiddling and the
16073 static chain is rarely used anyway. */
16074 common = (using_static_chain_p
16075 || sibcall
16076 || crtl->calls_eh_return
16077 || !info->lr_save_p
16078 || cfun->machine->ra_need_lr
16079 || info->total_size > 32767);
16080 savres_fprs_inline = (common
16081 || info->first_fp_reg_save == 64
16082 || !no_global_regs_above (info->first_fp_reg_save,
16083 /*gpr=*/false)
16084 || FP_SAVE_INLINE (info->first_fp_reg_save));
16085 savres_gprs_inline = (common
16086 /* Saving CR interferes with the exit routines
16087 used on the SPE, so just punt here. */
16088 || (!savep
16089 && TARGET_SPE_ABI
16090 && info->spe_64bit_regs_used != 0
16091 && info->cr_save_p != 0)
16092 || info->first_gp_reg_save == 32
16093 || !noclobber_global_gprs
16094 || GP_SAVE_INLINE (info->first_gp_reg_save));
16095
16096 if (savep)
16097 /* If we are going to use store multiple, then don't even bother
16098 with the out-of-line routines, since the store-multiple instruction
16099 will always be smaller. */
16100 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16101 else
16102 {
16103 /* The situation is more complicated with load multiple. We'd
16104 prefer to use the out-of-line routines for restores, since the
16105 "exit" out-of-line routines can handle the restore of LR and
16106 the frame teardown. But we can only use the out-of-line
16107 routines if we know that we've used store multiple or
16108 out-of-line routines in the prologue, i.e. if we've saved all
16109 the registers from first_gp_reg_save. Otherwise, we risk
16110 loading garbage from the stack. Furthermore, we can only use
16111 the "exit" out-of-line gpr restore if we haven't saved any
16112 fprs. */
16113 bool saved_all = !savres_gprs_inline || using_multiple_p;
16114
16115 if (saved_all && info->first_fp_reg_save != 64)
16116 /* We can't use the exit routine; use load multiple if it's
16117 available. */
16118 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16119 }
16120
16121 return (using_multiple_p
16122 | (savres_fprs_inline << 1)
16123 | (savres_gprs_inline << 2));
16124}
16125
9ebbca7d
GK
16126/* Emit function prologue as insns. */
16127
9878760c 16128void
863d938c 16129rs6000_emit_prologue (void)
9878760c 16130{
4697a36c 16131 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 16132 enum machine_mode reg_mode = Pmode;
327e5343 16133 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16134 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
16135 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
16136 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 16137 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 16138 rtx insn;
f78c3290 16139 int strategy;
9ebbca7d 16140 int saving_FPRs_inline;
f78c3290 16141 int saving_GPRs_inline;
9ebbca7d 16142 int using_store_multiple;
f78c3290
NF
16143 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
16144 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
e1ece9f1 16145 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 16146 HOST_WIDE_INT sp_offset = 0;
f676971a 16147
699c914a
MS
16148 if (TARGET_FIX_AND_CONTINUE)
16149 {
16150 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 16151 address by modifying the first 5 instructions of the function
699c914a
MS
16152 to branch to the overriding function. This is necessary to
16153 permit function pointers that point to the old function to
16154 actually forward to the new function. */
16155 emit_insn (gen_nop ());
16156 emit_insn (gen_nop ());
de2ab0ca 16157 emit_insn (gen_nop ());
699c914a
MS
16158 emit_insn (gen_nop ());
16159 emit_insn (gen_nop ());
16160 }
16161
16162 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16163 {
16164 reg_mode = V2SImode;
16165 reg_size = 8;
16166 }
a3170dc6 16167
f78c3290
NF
16168 strategy = rs6000_savres_strategy (info, /*savep=*/true,
16169 /*static_chain_p=*/using_static_chain_p,
16170 /*sibcall=*/0);
16171 using_store_multiple = strategy & SAVRES_MULTIPLE;
16172 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16173 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
16174
16175 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
16176 if (! WORLD_SAVE_P (info)
16177 && info->push_p
acd0b319 16178 && (DEFAULT_ABI == ABI_V4
e3b5732b 16179 || crtl->calls_eh_return))
9ebbca7d 16180 {
f78c3290
NF
16181 bool need_r11 = (TARGET_SPE
16182 ? (!saving_GPRs_inline
16183 && info->spe_64bit_regs_used == 0)
16184 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
16185 if (info->total_size < 32767)
16186 sp_offset = info->total_size;
16187 else
f78c3290
NF
16188 frame_reg_rtx = (need_r11
16189 ? gen_rtx_REG (Pmode, 11)
16190 : frame_ptr_rtx);
f676971a 16191 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
16192 (frame_reg_rtx != sp_reg_rtx
16193 && (info->cr_save_p
16194 || info->lr_save_p
16195 || info->first_fp_reg_save < 64
16196 || info->first_gp_reg_save < 32
f78c3290
NF
16197 )),
16198 need_r11);
9ebbca7d
GK
16199 if (frame_reg_rtx != sp_reg_rtx)
16200 rs6000_emit_stack_tie ();
16201 }
16202
d62294f5 16203 /* Handle world saves specially here. */
f57fe068 16204 if (WORLD_SAVE_P (info))
d62294f5
FJ
16205 {
16206 int i, j, sz;
16207 rtx treg;
16208 rtvec p;
22fa69da 16209 rtx reg0;
d62294f5
FJ
16210
16211 /* save_world expects lr in r0. */
22fa69da 16212 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 16213 if (info->lr_save_p)
c4ad648e 16214 {
22fa69da 16215 insn = emit_move_insn (reg0,
1de43f85 16216 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
16217 RTX_FRAME_RELATED_P (insn) = 1;
16218 }
d62294f5
FJ
16219
16220 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 16221 assumptions about the offsets of various bits of the stack
992d08b1 16222 frame. */
37409796
NS
16223 gcc_assert (info->gp_save_offset == -220
16224 && info->fp_save_offset == -144
16225 && info->lr_save_offset == 8
16226 && info->cr_save_offset == 4
16227 && info->push_p
16228 && info->lr_save_p
e3b5732b 16229 && (!crtl->calls_eh_return
37409796
NS
16230 || info->ehrd_offset == -432)
16231 && info->vrsave_save_offset == -224
22fa69da 16232 && info->altivec_save_offset == -416);
d62294f5
FJ
16233
16234 treg = gen_rtx_REG (SImode, 11);
16235 emit_move_insn (treg, GEN_INT (-info->total_size));
16236
16237 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 16238 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
16239
16240 /* Preserve CR2 for save_world prologues */
22fa69da 16241 sz = 5;
d62294f5
FJ
16242 sz += 32 - info->first_gp_reg_save;
16243 sz += 64 - info->first_fp_reg_save;
16244 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16245 p = rtvec_alloc (sz);
16246 j = 0;
16247 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 16248 gen_rtx_REG (SImode,
1de43f85 16249 LR_REGNO));
d62294f5 16250 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16251 gen_rtx_SYMBOL_REF (Pmode,
16252 "*save_world"));
d62294f5 16253 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16254 properly. */
16255 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16256 {
696e45ba
ME
16257 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16258 ? DFmode : SFmode),
16259 info->first_fp_reg_save + i);
c4ad648e
AM
16260 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16261 GEN_INT (info->fp_save_offset
16262 + sp_offset + 8 * i));
696e45ba
ME
16263 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16264 ? DFmode : SFmode), addr);
c4ad648e
AM
16265
16266 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16267 }
d62294f5 16268 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16269 {
16270 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16271 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16272 GEN_INT (info->altivec_save_offset
16273 + sp_offset + 16 * i));
0be76840 16274 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16275
16276 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16277 }
d62294f5 16278 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16279 {
16280 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16281 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16282 GEN_INT (info->gp_save_offset
16283 + sp_offset + reg_size * i));
0be76840 16284 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16285
16286 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16287 }
16288
16289 {
16290 /* CR register traditionally saved as CR2. */
16291 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16292 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16293 GEN_INT (info->cr_save_offset
16294 + sp_offset));
0be76840 16295 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16296
16297 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16298 }
22fa69da
GK
16299 /* Explain about use of R0. */
16300 if (info->lr_save_p)
16301 {
16302 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16303 GEN_INT (info->lr_save_offset
16304 + sp_offset));
16305 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16306
22fa69da
GK
16307 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16308 }
16309 /* Explain what happens to the stack pointer. */
16310 {
16311 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16312 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16313 }
d62294f5
FJ
16314
16315 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16316 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16317 treg, GEN_INT (-info->total_size));
16318 sp_offset = info->total_size;
d62294f5
FJ
16319 }
16320
9ebbca7d 16321 /* If we use the link register, get it into r0. */
f57fe068 16322 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16323 {
52ff33d0
NF
16324 rtx addr, reg, mem;
16325
f8a57be8 16326 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16327 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16328 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16329
16330 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16331 GEN_INT (info->lr_save_offset + sp_offset));
16332 reg = gen_rtx_REG (Pmode, 0);
16333 mem = gen_rtx_MEM (Pmode, addr);
16334 /* This should not be of rs6000_sr_alias_set, because of
16335 __builtin_return_address. */
16336
16337 insn = emit_move_insn (mem, reg);
16338 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16339 NULL_RTX, NULL_RTX);
f8a57be8 16340 }
9ebbca7d
GK
16341
16342 /* If we need to save CR, put it into r12. */
f57fe068 16343 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16344 {
f8a57be8 16345 rtx set;
f676971a 16346
9ebbca7d 16347 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16348 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16349 RTX_FRAME_RELATED_P (insn) = 1;
16350 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16351 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16352 But that's OK. All we have to do is specify that _one_ condition
16353 code register is saved in this stack slot. The thrower's epilogue
16354 will then restore all the call-saved registers.
16355 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16356 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16357 gen_rtx_REG (SImode, CR2_REGNO));
16358 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16359 set,
16360 REG_NOTES (insn));
9ebbca7d
GK
16361 }
16362
a4f6c312
SS
16363 /* Do any required saving of fpr's. If only one or two to save, do
16364 it ourselves. Otherwise, call function. */
f57fe068 16365 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16366 {
16367 int i;
16368 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16369 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16370 && ! call_used_regs[info->first_fp_reg_save+i]))
696e45ba
ME
16371 emit_frame_save (frame_reg_rtx, frame_ptr_rtx,
16372 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16373 ? DFmode : SFmode,
89e7058f
AH
16374 info->first_fp_reg_save + i,
16375 info->fp_save_offset + sp_offset + 8 * i,
16376 info->total_size);
9ebbca7d 16377 }
f57fe068 16378 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16379 {
16380 rtx par;
16381
16382 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16383 info->fp_save_offset + sp_offset,
16384 DFmode,
16385 /*savep=*/true, /*gpr=*/false,
16386 /*exitp=*/false);
16387 insn = emit_insn (par);
16388 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16389 NULL_RTX, NULL_RTX);
16390 }
16391
16392 /* Save GPRs. This is done as a PARALLEL if we are using
16393 the store-multiple instructions. */
16394 if (!WORLD_SAVE_P (info)
16395 && TARGET_SPE_ABI
16396 && info->spe_64bit_regs_used != 0
16397 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16398 {
16399 int i;
f78c3290
NF
16400 rtx spe_save_area_ptr;
16401
16402 /* Determine whether we can address all of the registers that need
16403 to be saved with an offset from the stack pointer that fits in
16404 the small const field for SPE memory instructions. */
16405 int spe_regs_addressable_via_sp
16406 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16407 + (32 - info->first_gp_reg_save - 1) * reg_size)
16408 && saving_GPRs_inline);
16409 int spe_offset;
16410
16411 if (spe_regs_addressable_via_sp)
16412 {
16413 spe_save_area_ptr = frame_reg_rtx;
16414 spe_offset = info->spe_gp_save_offset + sp_offset;
16415 }
16416 else
16417 {
16418 /* Make r11 point to the start of the SPE save area. We need
16419 to be careful here if r11 is holding the static chain. If
16420 it is, then temporarily save it in r0. We would use r0 as
16421 our base register here, but using r0 as a base register in
16422 loads and stores means something different from what we
16423 would like. */
16424 int ool_adjust = (saving_GPRs_inline
16425 ? 0
16426 : (info->first_gp_reg_save
16427 - (FIRST_SAVRES_REGISTER+1))*8);
16428 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16429 + sp_offset - ool_adjust);
16430
16431 if (using_static_chain_p)
16432 {
16433 rtx r0 = gen_rtx_REG (Pmode, 0);
16434 gcc_assert (info->first_gp_reg_save > 11);
16435
16436 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16437 }
16438
16439 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16440 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16441 frame_reg_rtx,
16442 GEN_INT (offset)));
16443 /* We need to make sure the move to r11 gets noted for
16444 properly outputting unwind information. */
16445 if (!saving_GPRs_inline)
16446 rs6000_frame_related (insn, frame_reg_rtx, offset,
16447 NULL_RTX, NULL_RTX);
16448 spe_offset = 0;
16449 }
16450
16451 if (saving_GPRs_inline)
16452 {
16453 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16454 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16455 {
16456 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16457 rtx offset, addr, mem;
f676971a 16458
f78c3290
NF
16459 /* We're doing all this to ensure that the offset fits into
16460 the immediate offset of 'evstdd'. */
16461 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16462
16463 offset = GEN_INT (reg_size * i + spe_offset);
16464 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16465 mem = gen_rtx_MEM (V2SImode, addr);
16466
16467 insn = emit_move_insn (mem, reg);
16468
16469 rs6000_frame_related (insn, spe_save_area_ptr,
16470 info->spe_gp_save_offset
16471 + sp_offset + reg_size * i,
16472 offset, const0_rtx);
16473 }
16474 }
16475 else
9ebbca7d 16476 {
f78c3290 16477 rtx par;
9ebbca7d 16478
f78c3290
NF
16479 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16480 0, reg_mode,
16481 /*savep=*/true, /*gpr=*/true,
16482 /*exitp=*/false);
16483 insn = emit_insn (par);
16484 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16485 NULL_RTX, NULL_RTX);
9ebbca7d 16486 }
f78c3290
NF
16487
16488
16489 /* Move the static chain pointer back. */
16490 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16491 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16492 }
16493 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16494 {
16495 rtx par;
16496
16497 /* Need to adjust r11 if we saved any FPRs. */
16498 if (info->first_fp_reg_save != 64)
16499 {
16500 rtx r11 = gen_rtx_REG (reg_mode, 11);
16501 rtx offset = GEN_INT (info->total_size
16502 + (-8 * (64-info->first_fp_reg_save)));
16503 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16504 ? sp_reg_rtx : r11);
16505
16506 emit_insn (TARGET_32BIT
16507 ? gen_addsi3 (r11, ptr_reg, offset)
16508 : gen_adddi3 (r11, ptr_reg, offset));
16509 }
16510
16511 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16512 info->gp_save_offset + sp_offset,
16513 reg_mode,
16514 /*savep=*/true, /*gpr=*/true,
16515 /*exitp=*/false);
16516 insn = emit_insn (par);
f676971a 16517 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16518 NULL_RTX, NULL_RTX);
16519 }
f78c3290 16520 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16521 {
308c142a 16522 rtvec p;
9ebbca7d
GK
16523 int i;
16524 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16525 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16526 {
16527 rtx addr, reg, mem;
16528 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16529 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16530 GEN_INT (info->gp_save_offset
16531 + sp_offset
9ebbca7d 16532 + reg_size * i));
0be76840 16533 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16534
16535 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16536 }
16537 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16538 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16539 NULL_RTX, NULL_RTX);
b6c9286a 16540 }
f57fe068 16541 else if (!WORLD_SAVE_P (info))
b6c9286a 16542 {
9ebbca7d
GK
16543 int i;
16544 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16545 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16546 {
16547 rtx addr, reg, mem;
16548 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16549
52ff33d0
NF
16550 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16551 GEN_INT (info->gp_save_offset
16552 + sp_offset
16553 + reg_size * i));
16554 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16555
52ff33d0
NF
16556 insn = emit_move_insn (mem, reg);
16557 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16558 NULL_RTX, NULL_RTX);
16559 }
9ebbca7d
GK
16560 }
16561
83720594
RH
16562 /* ??? There's no need to emit actual instructions here, but it's the
16563 easiest way to get the frame unwind information emitted. */
e3b5732b 16564 if (crtl->calls_eh_return)
83720594 16565 {
78e1b90d
DE
16566 unsigned int i, regno;
16567
fc4767bb
JJ
16568 /* In AIX ABI we need to pretend we save r2 here. */
16569 if (TARGET_AIX)
16570 {
16571 rtx addr, reg, mem;
16572
16573 reg = gen_rtx_REG (reg_mode, 2);
16574 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16575 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16576 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16577
16578 insn = emit_move_insn (mem, reg);
f676971a 16579 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16580 NULL_RTX, NULL_RTX);
16581 PATTERN (insn) = gen_blockage ();
16582 }
16583
83720594
RH
16584 for (i = 0; ; ++i)
16585 {
83720594
RH
16586 regno = EH_RETURN_DATA_REGNO (i);
16587 if (regno == INVALID_REGNUM)
16588 break;
16589
89e7058f
AH
16590 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16591 info->ehrd_offset + sp_offset
16592 + reg_size * (int) i,
16593 info->total_size);
83720594
RH
16594 }
16595 }
16596
9ebbca7d 16597 /* Save CR if we use any that must be preserved. */
f57fe068 16598 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16599 {
16600 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16601 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16602 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16603 /* See the large comment above about why CR2_REGNO is used. */
16604 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16605
9ebbca7d
GK
16606 /* If r12 was used to hold the original sp, copy cr into r0 now
16607 that it's free. */
16608 if (REGNO (frame_reg_rtx) == 12)
16609 {
f8a57be8
GK
16610 rtx set;
16611
9ebbca7d 16612 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16613 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16614 RTX_FRAME_RELATED_P (insn) = 1;
16615 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16616 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16617 set,
16618 REG_NOTES (insn));
f676971a 16619
9ebbca7d
GK
16620 }
16621 insn = emit_move_insn (mem, cr_save_rtx);
16622
f676971a 16623 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16624 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16625 }
16626
f676971a 16627 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16628 for which it was done previously. */
f57fe068 16629 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16630 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16631 {
bcb2d701 16632 if (info->total_size < 32767)
2b2c2fe5 16633 sp_offset = info->total_size;
bcb2d701
EC
16634 else
16635 frame_reg_rtx = frame_ptr_rtx;
16636 rs6000_emit_allocate_stack (info->total_size,
16637 (frame_reg_rtx != sp_reg_rtx
16638 && ((info->altivec_size != 0)
16639 || (info->vrsave_mask != 0)
f78c3290
NF
16640 )),
16641 FALSE);
bcb2d701
EC
16642 if (frame_reg_rtx != sp_reg_rtx)
16643 rs6000_emit_stack_tie ();
2b2c2fe5 16644 }
9ebbca7d
GK
16645
16646 /* Set frame pointer, if needed. */
16647 if (frame_pointer_needed)
16648 {
7d5175e1 16649 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16650 sp_reg_rtx);
16651 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16652 }
9878760c 16653
2b2c2fe5
EC
16654 /* Save AltiVec registers if needed. Save here because the red zone does
16655 not include AltiVec registers. */
16656 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16657 {
16658 int i;
16659
16660 /* There should be a non inline version of this, for when we
16661 are saving lots of vector registers. */
16662 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16663 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16664 {
16665 rtx areg, savereg, mem;
16666 int offset;
16667
16668 offset = info->altivec_save_offset + sp_offset
16669 + 16 * (i - info->first_altivec_reg_save);
16670
16671 savereg = gen_rtx_REG (V4SImode, i);
16672
16673 areg = gen_rtx_REG (Pmode, 0);
16674 emit_move_insn (areg, GEN_INT (offset));
16675
16676 /* AltiVec addressing mode is [reg+reg]. */
16677 mem = gen_frame_mem (V4SImode,
16678 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16679
16680 insn = emit_move_insn (mem, savereg);
16681
16682 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16683 areg, GEN_INT (offset));
16684 }
16685 }
16686
16687 /* VRSAVE is a bit vector representing which AltiVec registers
16688 are used. The OS uses this to determine which vector
16689 registers to save on a context switch. We need to save
16690 VRSAVE on the stack frame, add whatever AltiVec registers we
16691 used in this function, and do the corresponding magic in the
16692 epilogue. */
16693
16694 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16695 && info->vrsave_mask != 0)
16696 {
16697 rtx reg, mem, vrsave;
16698 int offset;
16699
16700 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16701 as frame_reg_rtx and r11 as the static chain pointer for
16702 nested functions. */
16703 reg = gen_rtx_REG (SImode, 0);
16704 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16705 if (TARGET_MACHO)
16706 emit_insn (gen_get_vrsave_internal (reg));
16707 else
16708 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16709
16710 if (!WORLD_SAVE_P (info))
16711 {
16712 /* Save VRSAVE. */
16713 offset = info->vrsave_save_offset + sp_offset;
16714 mem = gen_frame_mem (SImode,
16715 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16716 GEN_INT (offset)));
16717 insn = emit_move_insn (mem, reg);
16718 }
16719
16720 /* Include the registers in the mask. */
16721 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16722
16723 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16724 }
16725
1db02437 16726 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16727 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16728 || (DEFAULT_ABI == ABI_V4
16729 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16730 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16731 {
16732 /* If emit_load_toc_table will use the link register, we need to save
16733 it. We use R12 for this purpose because emit_load_toc_table
16734 can use register 0. This allows us to use a plain 'blr' to return
16735 from the procedure more often. */
16736 int save_LR_around_toc_setup = (TARGET_ELF
16737 && DEFAULT_ABI != ABI_AIX
16738 && flag_pic
16739 && ! info->lr_save_p
16740 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16741 if (save_LR_around_toc_setup)
16742 {
1de43f85 16743 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16744
c4ad648e 16745 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16746 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16747
c4ad648e 16748 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16749
c4ad648e 16750 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16751 RTX_FRAME_RELATED_P (insn) = 1;
16752 }
16753 else
16754 rs6000_emit_load_toc_table (TRUE);
16755 }
ee890fe2 16756
fcce224d 16757#if TARGET_MACHO
ee890fe2 16758 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16759 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16760 {
1de43f85 16761 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
08a6a74b 16762 rtx src = gen_rtx_SYMBOL_REF (Pmode, MACHOPIC_FUNCTION_BASE_NAME);
ee890fe2 16763
6d0a8091
DJ
16764 /* Save and restore LR locally around this call (in R0). */
16765 if (!info->lr_save_p)
6fb5fa3c 16766 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16767
6fb5fa3c 16768 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16769
6fb5fa3c
DB
16770 emit_move_insn (gen_rtx_REG (Pmode,
16771 RS6000_PIC_OFFSET_TABLE_REGNUM),
16772 lr);
6d0a8091
DJ
16773
16774 if (!info->lr_save_p)
6fb5fa3c 16775 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16776 }
fcce224d 16777#endif
9ebbca7d
GK
16778}
16779
9ebbca7d 16780/* Write function prologue. */
a4f6c312 16781
08c148a8 16782static void
f676971a 16783rs6000_output_function_prologue (FILE *file,
a2369ed3 16784 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16785{
16786 rs6000_stack_t *info = rs6000_stack_info ();
16787
4697a36c
MM
16788 if (TARGET_DEBUG_STACK)
16789 debug_stack_info (info);
9878760c 16790
a4f6c312
SS
16791 /* Write .extern for any function we will call to save and restore
16792 fp values. */
16793 if (info->first_fp_reg_save < 64
16794 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16795 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16796 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16797 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16798
c764f757
RK
16799 /* Write .extern for AIX common mode routines, if needed. */
16800 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16801 {
f6709c70
JW
16802 fputs ("\t.extern __mulh\n", file);
16803 fputs ("\t.extern __mull\n", file);
16804 fputs ("\t.extern __divss\n", file);
16805 fputs ("\t.extern __divus\n", file);
16806 fputs ("\t.extern __quoss\n", file);
16807 fputs ("\t.extern __quous\n", file);
c764f757
RK
16808 common_mode_defined = 1;
16809 }
9878760c 16810
9ebbca7d 16811 if (! HAVE_prologue)
979721f8 16812 {
9ebbca7d 16813 start_sequence ();
9dda4cc8 16814
a4f6c312
SS
16815 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16816 the "toplevel" insn chain. */
2e040219 16817 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16818 rs6000_emit_prologue ();
2e040219 16819 emit_note (NOTE_INSN_DELETED);
178c3eff 16820
a3c9585f 16821 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16822 {
16823 rtx insn;
16824 unsigned addr = 0;
16825 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16826 {
16827 INSN_ADDRESSES_NEW (insn, addr);
16828 addr += 4;
16829 }
16830 }
9dda4cc8 16831
9ebbca7d 16832 if (TARGET_DEBUG_STACK)
a4f6c312 16833 debug_rtx_list (get_insns (), 100);
c9d691e9 16834 final (get_insns (), file, FALSE);
9ebbca7d 16835 end_sequence ();
979721f8
MM
16836 }
16837
9ebbca7d
GK
16838 rs6000_pic_labelno++;
16839}
f676971a 16840
1c9c5e43
AM
16841/* Non-zero if vmx regs are restored before the frame pop, zero if
16842 we restore after the pop when possible. */
16843#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16844
f78c3290
NF
16845/* Reload CR from REG. */
16846
16847static void
16848rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16849{
16850 int count = 0;
16851 int i;
16852
16853 if (using_mfcr_multiple)
16854 {
16855 for (i = 0; i < 8; i++)
16856 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16857 count++;
16858 gcc_assert (count);
16859 }
16860
16861 if (using_mfcr_multiple && count > 1)
16862 {
16863 rtvec p;
16864 int ndx;
16865
16866 p = rtvec_alloc (count);
16867
16868 ndx = 0;
16869 for (i = 0; i < 8; i++)
16870 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16871 {
16872 rtvec r = rtvec_alloc (2);
16873 RTVEC_ELT (r, 0) = reg;
16874 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16875 RTVEC_ELT (p, ndx) =
16876 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16877 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16878 ndx++;
16879 }
16880 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16881 gcc_assert (ndx == count);
16882 }
16883 else
16884 for (i = 0; i < 8; i++)
16885 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16886 {
16887 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16888 CR0_REGNO+i),
16889 reg));
16890 }
16891}
16892
9ebbca7d 16893/* Emit function epilogue as insns.
9878760c 16894
9ebbca7d
GK
16895 At present, dwarf2out_frame_debug_expr doesn't understand
16896 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16897 anywhere in the epilogue. Most of the insns below would in any case
16898 need special notes to explain where r11 is in relation to the stack. */
9878760c 16899
9ebbca7d 16900void
a2369ed3 16901rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16902{
16903 rs6000_stack_t *info;
f78c3290 16904 int restoring_GPRs_inline;
9ebbca7d
GK
16905 int restoring_FPRs_inline;
16906 int using_load_multiple;
d296e02e 16907 int using_mtcr_multiple;
9ebbca7d 16908 int use_backchain_to_restore_sp;
f78c3290
NF
16909 int restore_lr;
16910 int strategy;
9ebbca7d
GK
16911 int sp_offset = 0;
16912 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16913 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16914 enum machine_mode reg_mode = Pmode;
327e5343 16915 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16916 int i;
16917
c19de7aa
AH
16918 info = rs6000_stack_info ();
16919
16920 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16921 {
16922 reg_mode = V2SImode;
16923 reg_size = 8;
16924 }
16925
f78c3290
NF
16926 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16927 /*static_chain_p=*/0, sibcall);
16928 using_load_multiple = strategy & SAVRES_MULTIPLE;
16929 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16930 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16931 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16932 || rs6000_cpu == PROCESSOR_PPC603
16933 || rs6000_cpu == PROCESSOR_PPC750
16934 || optimize_size);
1c9c5e43
AM
16935 /* Restore via the backchain when we have a large frame, since this
16936 is more efficient than an addis, addi pair. The second condition
16937 here will not trigger at the moment; We don't actually need a
16938 frame pointer for alloca, but the generic parts of the compiler
16939 give us one anyway. */
16940 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16941 || info->total_size
16942 + (info->lr_save_p ? info->lr_save_offset : 0)
16943 > 32767
1c9c5e43
AM
16944 || (cfun->calls_alloca
16945 && !frame_pointer_needed));
f78c3290
NF
16946 restore_lr = (info->lr_save_p
16947 && restoring_GPRs_inline
16948 && restoring_FPRs_inline);
9ebbca7d 16949
f57fe068 16950 if (WORLD_SAVE_P (info))
d62294f5
FJ
16951 {
16952 int i, j;
16953 char rname[30];
16954 const char *alloc_rname;
16955 rtvec p;
16956
16957 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16958 stack slot (which is not likely to be our caller.)
16959 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16960 rest_world is similar, except any R10 parameter is ignored.
16961 The exception-handling stuff that was here in 2.95 is no
16962 longer necessary. */
d62294f5
FJ
16963
16964 p = rtvec_alloc (9
16965 + 1
f676971a 16966 + 32 - info->first_gp_reg_save
c4ad648e
AM
16967 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16968 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16969
e3b5732b 16970 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16971 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16972 alloc_rname = ggc_strdup (rname);
16973
16974 j = 0;
16975 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16976 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16977 gen_rtx_REG (Pmode,
1de43f85 16978 LR_REGNO));
d62294f5 16979 RTVEC_ELT (p, j++)
c4ad648e 16980 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16981 /* The instruction pattern requires a clobber here;
c4ad648e 16982 it is shared with the restVEC helper. */
d62294f5 16983 RTVEC_ELT (p, j++)
c4ad648e 16984 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16985
16986 {
c4ad648e
AM
16987 /* CR register traditionally saved as CR2. */
16988 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16989 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16990 GEN_INT (info->cr_save_offset));
0be76840 16991 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16992
16993 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16994 }
16995
16996 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16997 {
16998 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16999 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17000 GEN_INT (info->gp_save_offset
17001 + reg_size * i));
0be76840 17002 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
17003
17004 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17005 }
d62294f5 17006 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
17007 {
17008 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
17009 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17010 GEN_INT (info->altivec_save_offset
17011 + 16 * i));
0be76840 17012 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
17013
17014 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17015 }
d62294f5 17016 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e 17017 {
696e45ba
ME
17018 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17019 ? DFmode : SFmode),
17020 info->first_fp_reg_save + i);
c4ad648e
AM
17021 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17022 GEN_INT (info->fp_save_offset
17023 + 8 * i));
696e45ba
ME
17024 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17025 ? DFmode : SFmode), addr);
c4ad648e
AM
17026
17027 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17028 }
d62294f5 17029 RTVEC_ELT (p, j++)
c4ad648e 17030 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 17031 RTVEC_ELT (p, j++)
c4ad648e 17032 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 17033 RTVEC_ELT (p, j++)
c4ad648e 17034 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 17035 RTVEC_ELT (p, j++)
c4ad648e 17036 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 17037 RTVEC_ELT (p, j++)
c4ad648e 17038 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
17039 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
17040
17041 return;
17042 }
17043
45b194f8
AM
17044 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
17045 if (info->push_p)
2b2c2fe5 17046 sp_offset = info->total_size;
f676971a 17047
e6477eaa
AM
17048 /* Restore AltiVec registers if we must do so before adjusting the
17049 stack. */
17050 if (TARGET_ALTIVEC_ABI
17051 && info->altivec_size != 0
1c9c5e43
AM
17052 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17053 || (DEFAULT_ABI != ABI_V4
17054 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))))
9aa86737
AH
17055 {
17056 int i;
17057
e6477eaa
AM
17058 if (use_backchain_to_restore_sp)
17059 {
17060 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17061 emit_move_insn (frame_reg_rtx,
17062 gen_rtx_MEM (Pmode, sp_reg_rtx));
17063 sp_offset = 0;
17064 }
1c9c5e43
AM
17065 else if (frame_pointer_needed)
17066 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 17067
9aa86737
AH
17068 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17069 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17070 {
17071 rtx addr, areg, mem;
17072
17073 areg = gen_rtx_REG (Pmode, 0);
17074 emit_move_insn
17075 (areg, GEN_INT (info->altivec_save_offset
17076 + sp_offset
17077 + 16 * (i - info->first_altivec_reg_save)));
17078
17079 /* AltiVec addressing mode is [reg+reg]. */
17080 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 17081 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
17082
17083 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17084 }
17085 }
17086
e6477eaa
AM
17087 /* Restore VRSAVE if we must do so before adjusting the stack. */
17088 if (TARGET_ALTIVEC
17089 && TARGET_ALTIVEC_VRSAVE
17090 && info->vrsave_mask != 0
1c9c5e43
AM
17091 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17092 || (DEFAULT_ABI != ABI_V4
17093 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))))
e6477eaa
AM
17094 {
17095 rtx addr, mem, reg;
17096
1c9c5e43 17097 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 17098 {
1c9c5e43
AM
17099 if (use_backchain_to_restore_sp)
17100 {
17101 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17102 emit_move_insn (frame_reg_rtx,
17103 gen_rtx_MEM (Pmode, sp_reg_rtx));
17104 sp_offset = 0;
17105 }
17106 else if (frame_pointer_needed)
17107 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
17108 }
17109
17110 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17111 GEN_INT (info->vrsave_save_offset + sp_offset));
17112 mem = gen_frame_mem (SImode, addr);
17113 reg = gen_rtx_REG (SImode, 12);
17114 emit_move_insn (reg, mem);
17115
17116 emit_insn (generate_set_vrsave (reg, info, 1));
17117 }
17118
1c9c5e43
AM
17119 /* If we have a large stack frame, restore the old stack pointer
17120 using the backchain. */
2b2c2fe5
EC
17121 if (use_backchain_to_restore_sp)
17122 {
1c9c5e43 17123 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
17124 {
17125 /* Under V.4, don't reset the stack pointer until after we're done
17126 loading the saved registers. */
17127 if (DEFAULT_ABI == ABI_V4)
17128 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17129
17130 emit_move_insn (frame_reg_rtx,
17131 gen_rtx_MEM (Pmode, sp_reg_rtx));
17132 sp_offset = 0;
17133 }
1c9c5e43
AM
17134 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17135 && DEFAULT_ABI == ABI_V4)
17136 /* frame_reg_rtx has been set up by the altivec restore. */
17137 ;
17138 else
17139 {
17140 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
17141 frame_reg_rtx = sp_reg_rtx;
17142 }
17143 }
17144 /* If we have a frame pointer, we can restore the old stack pointer
17145 from it. */
17146 else if (frame_pointer_needed)
17147 {
17148 frame_reg_rtx = sp_reg_rtx;
17149 if (DEFAULT_ABI == ABI_V4)
17150 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17151
17152 emit_insn (TARGET_32BIT
17153 ? gen_addsi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17154 GEN_INT (info->total_size))
17155 : gen_adddi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17156 GEN_INT (info->total_size)));
17157 sp_offset = 0;
2b2c2fe5 17158 }
45b194f8
AM
17159 else if (info->push_p
17160 && DEFAULT_ABI != ABI_V4
e3b5732b 17161 && !crtl->calls_eh_return)
2b2c2fe5 17162 {
45b194f8
AM
17163 emit_insn (TARGET_32BIT
17164 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
17165 GEN_INT (info->total_size))
17166 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
17167 GEN_INT (info->total_size)));
17168 sp_offset = 0;
2b2c2fe5
EC
17169 }
17170
e6477eaa 17171 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
17172 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17173 && TARGET_ALTIVEC_ABI
e6477eaa
AM
17174 && info->altivec_size != 0
17175 && (DEFAULT_ABI == ABI_V4
17176 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
17177 {
17178 int i;
17179
17180 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17181 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17182 {
17183 rtx addr, areg, mem;
17184
17185 areg = gen_rtx_REG (Pmode, 0);
17186 emit_move_insn
17187 (areg, GEN_INT (info->altivec_save_offset
17188 + sp_offset
17189 + 16 * (i - info->first_altivec_reg_save)));
17190
17191 /* AltiVec addressing mode is [reg+reg]. */
17192 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17193 mem = gen_frame_mem (V4SImode, addr);
17194
17195 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17196 }
17197 }
17198
17199 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
17200 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17201 && TARGET_ALTIVEC
e6477eaa
AM
17202 && TARGET_ALTIVEC_VRSAVE
17203 && info->vrsave_mask != 0
17204 && (DEFAULT_ABI == ABI_V4
17205 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
554c2941
AM
17206 {
17207 rtx addr, mem, reg;
17208
17209 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17210 GEN_INT (info->vrsave_save_offset + sp_offset));
17211 mem = gen_frame_mem (SImode, addr);
17212 reg = gen_rtx_REG (SImode, 12);
17213 emit_move_insn (reg, mem);
17214
17215 emit_insn (generate_set_vrsave (reg, info, 1));
17216 }
17217
f78c3290
NF
17218 /* Get the old lr if we saved it. If we are restoring registers
17219 out-of-line, then the out-of-line routines can do this for us. */
17220 if (restore_lr)
b6c9286a 17221 {
a3170dc6
AH
17222 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17223 info->lr_save_offset + sp_offset);
ba4828e0 17224
9ebbca7d 17225 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 17226 }
f676971a 17227
9ebbca7d
GK
17228 /* Get the old cr if we saved it. */
17229 if (info->cr_save_p)
17230 {
17231 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17232 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 17233 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 17234
9ebbca7d
GK
17235 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17236 }
f676971a 17237
9ebbca7d 17238 /* Set LR here to try to overlap restores below. */
f78c3290 17239 if (restore_lr)
1de43f85 17240 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 17241 gen_rtx_REG (Pmode, 0));
f676971a 17242
83720594 17243 /* Load exception handler data registers, if needed. */
e3b5732b 17244 if (crtl->calls_eh_return)
83720594 17245 {
78e1b90d
DE
17246 unsigned int i, regno;
17247
fc4767bb
JJ
17248 if (TARGET_AIX)
17249 {
17250 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17251 GEN_INT (sp_offset + 5 * reg_size));
0be76840 17252 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
17253
17254 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17255 }
17256
83720594
RH
17257 for (i = 0; ; ++i)
17258 {
a3170dc6 17259 rtx mem;
83720594
RH
17260
17261 regno = EH_RETURN_DATA_REGNO (i);
17262 if (regno == INVALID_REGNUM)
17263 break;
17264
a3170dc6
AH
17265 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17266 info->ehrd_offset + sp_offset
17267 + reg_size * (int) i);
83720594
RH
17268
17269 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17270 }
17271 }
f676971a 17272
9ebbca7d
GK
17273 /* Restore GPRs. This is done as a PARALLEL if we are using
17274 the load-multiple instructions. */
f78c3290
NF
17275 if (TARGET_SPE_ABI
17276 && info->spe_64bit_regs_used != 0
17277 && info->first_gp_reg_save != 32)
52ff33d0 17278 {
52ff33d0
NF
17279 /* Determine whether we can address all of the registers that need
17280 to be saved with an offset from the stack pointer that fits in
17281 the small const field for SPE memory instructions. */
17282 int spe_regs_addressable_via_sp
f78c3290
NF
17283 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17284 + (32 - info->first_gp_reg_save - 1) * reg_size)
17285 && restoring_GPRs_inline);
52ff33d0
NF
17286 int spe_offset;
17287
17288 if (spe_regs_addressable_via_sp)
45b194f8 17289 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17290 else
17291 {
45b194f8 17292 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17293 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17294 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17295 There's no need to worry here because the static chain is passed
17296 anew to every function. */
f78c3290
NF
17297 int ool_adjust = (restoring_GPRs_inline
17298 ? 0
17299 : (info->first_gp_reg_save
17300 - (FIRST_SAVRES_REGISTER+1))*8);
17301
45b194f8
AM
17302 if (frame_reg_rtx == sp_reg_rtx)
17303 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17304 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17305 GEN_INT (info->spe_gp_save_offset
17306 + sp_offset
17307 - ool_adjust)));
45b194f8
AM
17308 /* Keep the invariant that frame_reg_rtx + sp_offset points
17309 at the top of the stack frame. */
17310 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17311
17312 spe_offset = 0;
17313 }
17314
f78c3290
NF
17315 if (restoring_GPRs_inline)
17316 {
17317 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17318 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17319 {
17320 rtx offset, addr, mem;
52ff33d0 17321
f78c3290
NF
17322 /* We're doing all this to ensure that the immediate offset
17323 fits into the immediate field of 'evldd'. */
17324 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17325
f78c3290
NF
17326 offset = GEN_INT (spe_offset + reg_size * i);
17327 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17328 mem = gen_rtx_MEM (V2SImode, addr);
52ff33d0 17329
f78c3290
NF
17330 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17331 mem);
17332 }
17333 }
17334 else
17335 {
17336 rtx par;
17337
17338 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17339 0, reg_mode,
17340 /*savep=*/false, /*gpr=*/true,
17341 /*exitp=*/true);
17342 emit_jump_insn (par);
17343
17344 /* We don't want anybody else emitting things after we jumped
17345 back. */
17346 return;
17347 }
52ff33d0 17348 }
f78c3290
NF
17349 else if (!restoring_GPRs_inline)
17350 {
17351 /* We are jumping to an out-of-line function. */
17352 bool can_use_exit = info->first_fp_reg_save == 64;
17353 rtx par;
17354
17355 /* Emit stack reset code if we need it. */
17356 if (can_use_exit)
17357 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17358 sp_offset, can_use_exit);
17359 else
17360 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17361 sp_reg_rtx,
17362 GEN_INT (sp_offset - info->fp_size)));
17363
17364 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17365 info->gp_save_offset, reg_mode,
17366 /*savep=*/false, /*gpr=*/true,
17367 /*exitp=*/can_use_exit);
17368
17369 if (can_use_exit)
17370 {
17371 if (info->cr_save_p)
17372 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17373 using_mtcr_multiple);
17374
17375 emit_jump_insn (par);
17376
17377 /* We don't want anybody else emitting things after we jumped
17378 back. */
17379 return;
17380 }
17381 else
17382 emit_insn (par);
17383 }
17384 else if (using_load_multiple)
17385 {
17386 rtvec p;
17387 p = rtvec_alloc (32 - info->first_gp_reg_save);
17388 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17389 {
f676971a
EC
17390 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17391 GEN_INT (info->gp_save_offset
17392 + sp_offset
9ebbca7d 17393 + reg_size * i));
0be76840 17394 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 17395
f78c3290
NF
17396 RTVEC_ELT (p, i) =
17397 gen_rtx_SET (VOIDmode,
17398 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17399 mem);
9ebbca7d 17400 }
f78c3290
NF
17401 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17402 }
17403 else
17404 {
17405 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17406 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17407 {
17408 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17409 GEN_INT (info->gp_save_offset
17410 + sp_offset
17411 + reg_size * i));
17412 rtx mem = gen_frame_mem (reg_mode, addr);
17413
17414 emit_move_insn (gen_rtx_REG (reg_mode,
17415 info->first_gp_reg_save + i), mem);
17416 }
17417 }
9878760c 17418
9ebbca7d
GK
17419 /* Restore fpr's if we need to do it without calling a function. */
17420 if (restoring_FPRs_inline)
17421 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17422 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17423 && ! call_used_regs[info->first_fp_reg_save+i]))
17424 {
17425 rtx addr, mem;
17426 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17427 GEN_INT (info->fp_save_offset
17428 + sp_offset
a4f6c312 17429 + 8 * i));
696e45ba
ME
17430 mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17431 ? DFmode : SFmode), addr);
9ebbca7d 17432
696e45ba
ME
17433 emit_move_insn (gen_rtx_REG (((TARGET_HARD_FLOAT
17434 && TARGET_DOUBLE_FLOAT)
17435 ? DFmode : SFmode),
9ebbca7d
GK
17436 info->first_fp_reg_save + i),
17437 mem);
17438 }
8d30c4ee 17439
9ebbca7d
GK
17440 /* If we saved cr, restore it here. Just those that were used. */
17441 if (info->cr_save_p)
f78c3290 17442 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
979721f8 17443
9ebbca7d 17444 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17445 have been done. */
f78c3290
NF
17446 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17447 sp_offset, !restoring_FPRs_inline);
b6c9286a 17448
e3b5732b 17449 if (crtl->calls_eh_return)
83720594
RH
17450 {
17451 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 17452 emit_insn (TARGET_32BIT
83720594
RH
17453 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
17454 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
17455 }
17456
9ebbca7d
GK
17457 if (!sibcall)
17458 {
17459 rtvec p;
17460 if (! restoring_FPRs_inline)
f78c3290 17461 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17462 else
17463 p = rtvec_alloc (2);
b6c9286a 17464
e35b9579 17465 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17466 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17467 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17468 : gen_rtx_CLOBBER (VOIDmode,
17469 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17470
17471 /* If we have to restore more than two FP registers, branch to the
17472 restore function. It will return to our caller. */
17473 if (! restoring_FPRs_inline)
17474 {
17475 int i;
f78c3290
NF
17476 rtx sym;
17477
17478 sym = rs6000_savres_routine_sym (info,
17479 /*savep=*/false,
17480 /*gpr=*/false,
17481 /*exitp=*/true);
17482 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17483 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17484 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17485 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17486 {
17487 rtx addr, mem;
17488 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17489 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17490 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17491
f78c3290 17492 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17493 gen_rtx_SET (VOIDmode,
17494 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17495 mem);
b6c9286a
MM
17496 }
17497 }
f676971a 17498
9ebbca7d 17499 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17500 }
9878760c
RK
17501}
17502
17503/* Write function epilogue. */
17504
08c148a8 17505static void
f676971a 17506rs6000_output_function_epilogue (FILE *file,
a2369ed3 17507 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17508{
9ebbca7d 17509 if (! HAVE_epilogue)
9878760c 17510 {
9ebbca7d
GK
17511 rtx insn = get_last_insn ();
17512 /* If the last insn was a BARRIER, we don't have to write anything except
17513 the trace table. */
17514 if (GET_CODE (insn) == NOTE)
17515 insn = prev_nonnote_insn (insn);
17516 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17517 {
9ebbca7d
GK
17518 /* This is slightly ugly, but at least we don't have two
17519 copies of the epilogue-emitting code. */
17520 start_sequence ();
17521
17522 /* A NOTE_INSN_DELETED is supposed to be at the start
17523 and end of the "toplevel" insn chain. */
2e040219 17524 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17525 rs6000_emit_epilogue (FALSE);
2e040219 17526 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17527
a3c9585f 17528 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17529 {
17530 rtx insn;
17531 unsigned addr = 0;
17532 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17533 {
17534 INSN_ADDRESSES_NEW (insn, addr);
17535 addr += 4;
17536 }
17537 }
17538
9ebbca7d 17539 if (TARGET_DEBUG_STACK)
a4f6c312 17540 debug_rtx_list (get_insns (), 100);
c9d691e9 17541 final (get_insns (), file, FALSE);
9ebbca7d 17542 end_sequence ();
4697a36c 17543 }
9878760c 17544 }
b4ac57ab 17545
efdba735
SH
17546#if TARGET_MACHO
17547 macho_branch_islands ();
0e5da0be
GK
17548 /* Mach-O doesn't support labels at the end of objects, so if
17549 it looks like we might want one, insert a NOP. */
17550 {
17551 rtx insn = get_last_insn ();
17552 while (insn
17553 && NOTE_P (insn)
a38e7aa5 17554 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17555 insn = PREV_INSN (insn);
f676971a
EC
17556 if (insn
17557 && (LABEL_P (insn)
0e5da0be 17558 || (NOTE_P (insn)
a38e7aa5 17559 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17560 fputs ("\tnop\n", file);
17561 }
17562#endif
17563
9b30bae2 17564 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17565 on its format.
17566
17567 We don't output a traceback table if -finhibit-size-directive was
17568 used. The documentation for -finhibit-size-directive reads
17569 ``don't output a @code{.size} assembler directive, or anything
17570 else that would cause trouble if the function is split in the
17571 middle, and the two halves are placed at locations far apart in
17572 memory.'' The traceback table has this property, since it
17573 includes the offset from the start of the function to the
4d30c363
MM
17574 traceback table itself.
17575
17576 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17577 different traceback table. */
57ac7be9 17578 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
3c072c6b 17579 && rs6000_traceback != traceback_none && !cfun->is_thunk)
9b30bae2 17580 {
69c75916 17581 const char *fname = NULL;
3ac88239 17582 const char *language_string = lang_hooks.name;
6041bf2f 17583 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17584 int i;
57ac7be9 17585 int optional_tbtab;
8097c268 17586 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17587
17588 if (rs6000_traceback == traceback_full)
17589 optional_tbtab = 1;
17590 else if (rs6000_traceback == traceback_part)
17591 optional_tbtab = 0;
17592 else
17593 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17594
69c75916
AM
17595 if (optional_tbtab)
17596 {
17597 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17598 while (*fname == '.') /* V.4 encodes . in the name */
17599 fname++;
17600
17601 /* Need label immediately before tbtab, so we can compute
17602 its offset from the function start. */
17603 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17604 ASM_OUTPUT_LABEL (file, fname);
17605 }
314fc5a9
ILT
17606
17607 /* The .tbtab pseudo-op can only be used for the first eight
17608 expressions, since it can't handle the possibly variable
17609 length fields that follow. However, if you omit the optional
17610 fields, the assembler outputs zeros for all optional fields
17611 anyways, giving each variable length field is minimum length
17612 (as defined in sys/debug.h). Thus we can not use the .tbtab
17613 pseudo-op at all. */
17614
17615 /* An all-zero word flags the start of the tbtab, for debuggers
17616 that have to find it by searching forward from the entry
17617 point or from the current pc. */
19d2d16f 17618 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17619
17620 /* Tbtab format type. Use format type 0. */
19d2d16f 17621 fputs ("\t.byte 0,", file);
314fc5a9 17622
5fc921c1
DE
17623 /* Language type. Unfortunately, there does not seem to be any
17624 official way to discover the language being compiled, so we
17625 use language_string.
17626 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17627 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17628 a number, so for now use 9. */
5fc921c1 17629 if (! strcmp (language_string, "GNU C"))
314fc5a9 17630 i = 0;
6de9cd9a 17631 else if (! strcmp (language_string, "GNU F77")
7f62878c 17632 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17633 i = 1;
8b83775b 17634 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17635 i = 2;
5fc921c1
DE
17636 else if (! strcmp (language_string, "GNU Ada"))
17637 i = 3;
56438901
AM
17638 else if (! strcmp (language_string, "GNU C++")
17639 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17640 i = 9;
9517ead8
AG
17641 else if (! strcmp (language_string, "GNU Java"))
17642 i = 13;
5fc921c1
DE
17643 else if (! strcmp (language_string, "GNU Objective-C"))
17644 i = 14;
314fc5a9 17645 else
37409796 17646 gcc_unreachable ();
314fc5a9
ILT
17647 fprintf (file, "%d,", i);
17648
17649 /* 8 single bit fields: global linkage (not set for C extern linkage,
17650 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17651 from start of procedure stored in tbtab, internal function, function
17652 has controlled storage, function has no toc, function uses fp,
17653 function logs/aborts fp operations. */
17654 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17655 fprintf (file, "%d,",
17656 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17657
17658 /* 6 bitfields: function is interrupt handler, name present in
17659 proc table, function calls alloca, on condition directives
17660 (controls stack walks, 3 bits), saves condition reg, saves
17661 link reg. */
17662 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17663 set up as a frame pointer, even when there is no alloca call. */
17664 fprintf (file, "%d,",
6041bf2f
DE
17665 ((optional_tbtab << 6)
17666 | ((optional_tbtab & frame_pointer_needed) << 5)
17667 | (info->cr_save_p << 1)
17668 | (info->lr_save_p)));
314fc5a9 17669
6041bf2f 17670 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17671 (6 bits). */
17672 fprintf (file, "%d,",
4697a36c 17673 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17674
17675 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17676 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17677
6041bf2f
DE
17678 if (optional_tbtab)
17679 {
17680 /* Compute the parameter info from the function decl argument
17681 list. */
17682 tree decl;
17683 int next_parm_info_bit = 31;
314fc5a9 17684
6041bf2f
DE
17685 for (decl = DECL_ARGUMENTS (current_function_decl);
17686 decl; decl = TREE_CHAIN (decl))
17687 {
17688 rtx parameter = DECL_INCOMING_RTL (decl);
17689 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17690
6041bf2f
DE
17691 if (GET_CODE (parameter) == REG)
17692 {
ebb109ad 17693 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17694 {
17695 int bits;
17696
17697 float_parms++;
17698
37409796
NS
17699 switch (mode)
17700 {
17701 case SFmode:
e41b2a33 17702 case SDmode:
37409796
NS
17703 bits = 0x2;
17704 break;
17705
17706 case DFmode:
7393f7f8 17707 case DDmode:
37409796 17708 case TFmode:
7393f7f8 17709 case TDmode:
37409796
NS
17710 bits = 0x3;
17711 break;
17712
17713 default:
17714 gcc_unreachable ();
17715 }
6041bf2f
DE
17716
17717 /* If only one bit will fit, don't or in this entry. */
17718 if (next_parm_info_bit > 0)
17719 parm_info |= (bits << (next_parm_info_bit - 1));
17720 next_parm_info_bit -= 2;
17721 }
17722 else
17723 {
17724 fixed_parms += ((GET_MODE_SIZE (mode)
17725 + (UNITS_PER_WORD - 1))
17726 / UNITS_PER_WORD);
17727 next_parm_info_bit -= 1;
17728 }
17729 }
17730 }
17731 }
314fc5a9
ILT
17732
17733 /* Number of fixed point parameters. */
17734 /* This is actually the number of words of fixed point parameters; thus
17735 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17736 fprintf (file, "%d,", fixed_parms);
17737
17738 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17739 all on stack. */
17740 /* This is actually the number of fp registers that hold parameters;
17741 and thus the maximum value is 13. */
17742 /* Set parameters on stack bit if parameters are not in their original
17743 registers, regardless of whether they are on the stack? Xlc
17744 seems to set the bit when not optimizing. */
17745 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17746
6041bf2f
DE
17747 if (! optional_tbtab)
17748 return;
17749
314fc5a9
ILT
17750 /* Optional fields follow. Some are variable length. */
17751
17752 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17753 11 double float. */
17754 /* There is an entry for each parameter in a register, in the order that
17755 they occur in the parameter list. Any intervening arguments on the
17756 stack are ignored. If the list overflows a long (max possible length
17757 34 bits) then completely leave off all elements that don't fit. */
17758 /* Only emit this long if there was at least one parameter. */
17759 if (fixed_parms || float_parms)
17760 fprintf (file, "\t.long %d\n", parm_info);
17761
17762 /* Offset from start of code to tb table. */
19d2d16f 17763 fputs ("\t.long ", file);
314fc5a9 17764 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17765 if (TARGET_AIX)
17766 RS6000_OUTPUT_BASENAME (file, fname);
17767 else
17768 assemble_name (file, fname);
17769 putc ('-', file);
17770 rs6000_output_function_entry (file, fname);
19d2d16f 17771 putc ('\n', file);
314fc5a9
ILT
17772
17773 /* Interrupt handler mask. */
17774 /* Omit this long, since we never set the interrupt handler bit
17775 above. */
17776
17777 /* Number of CTL (controlled storage) anchors. */
17778 /* Omit this long, since the has_ctl bit is never set above. */
17779
17780 /* Displacement into stack of each CTL anchor. */
17781 /* Omit this list of longs, because there are no CTL anchors. */
17782
17783 /* Length of function name. */
69c75916
AM
17784 if (*fname == '*')
17785 ++fname;
296b8152 17786 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17787
17788 /* Function name. */
17789 assemble_string (fname, strlen (fname));
17790
17791 /* Register for alloca automatic storage; this is always reg 31.
17792 Only emit this if the alloca bit was set above. */
17793 if (frame_pointer_needed)
19d2d16f 17794 fputs ("\t.byte 31\n", file);
b1765bde
DE
17795
17796 fputs ("\t.align 2\n", file);
9b30bae2 17797 }
9878760c 17798}
17167fd8 17799\f
a4f6c312
SS
17800/* A C compound statement that outputs the assembler code for a thunk
17801 function, used to implement C++ virtual function calls with
17802 multiple inheritance. The thunk acts as a wrapper around a virtual
17803 function, adjusting the implicit object parameter before handing
17804 control off to the real function.
17805
17806 First, emit code to add the integer DELTA to the location that
17807 contains the incoming first argument. Assume that this argument
17808 contains a pointer, and is the one used to pass the `this' pointer
17809 in C++. This is the incoming argument *before* the function
17810 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17811 values of all other incoming arguments.
17167fd8
MM
17812
17813 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17814 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17815 not touch the return address. Hence returning from FUNCTION will
17816 return to whoever called the current `thunk'.
17167fd8 17817
a4f6c312
SS
17818 The effect must be as if FUNCTION had been called directly with the
17819 adjusted first argument. This macro is responsible for emitting
17820 all of the code for a thunk function; output_function_prologue()
17821 and output_function_epilogue() are not invoked.
17167fd8 17822
a4f6c312
SS
17823 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17824 been extracted from it.) It might possibly be useful on some
17825 targets, but probably not.
17167fd8 17826
a4f6c312
SS
17827 If you do not define this macro, the target-independent code in the
17828 C++ frontend will generate a less efficient heavyweight thunk that
17829 calls FUNCTION instead of jumping to it. The generic approach does
17830 not support varargs. */
17167fd8 17831
3961e8fe 17832static void
f676971a
EC
17833rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17834 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17835 tree function)
17167fd8 17836{
0a2aaacc 17837 rtx this_rtx, insn, funexp;
17167fd8 17838
5b71a4e7 17839 reload_completed = 1;
fe3ad572 17840 epilogue_completed = 1;
56a7189a 17841
5b71a4e7 17842 /* Mark the end of the (empty) prologue. */
2e040219 17843 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17844
5b71a4e7
DE
17845 /* Find the "this" pointer. If the function returns a structure,
17846 the structure return pointer is in r3. */
61f71b34 17847 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 17848 this_rtx = gen_rtx_REG (Pmode, 4);
56a7189a 17849 else
0a2aaacc 17850 this_rtx = gen_rtx_REG (Pmode, 3);
17167fd8 17851
5b71a4e7
DE
17852 /* Apply the constant offset, if required. */
17853 if (delta)
17854 {
17855 rtx delta_rtx = GEN_INT (delta);
17856 emit_insn (TARGET_32BIT
0a2aaacc
KG
17857 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
17858 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17167fd8
MM
17859 }
17860
5b71a4e7
DE
17861 /* Apply the offset from the vtable, if required. */
17862 if (vcall_offset)
17167fd8 17863 {
5b71a4e7
DE
17864 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17865 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17866
0a2aaacc 17867 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
eeff9307
JJ
17868 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17869 {
17870 emit_insn (TARGET_32BIT
17871 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17872 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17873 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17874 }
17875 else
17876 {
17877 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17878
17879 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17880 }
5b71a4e7 17881 emit_insn (TARGET_32BIT
0a2aaacc
KG
17882 ? gen_addsi3 (this_rtx, this_rtx, tmp)
17883 : gen_adddi3 (this_rtx, this_rtx, tmp));
17167fd8
MM
17884 }
17885
5b71a4e7
DE
17886 /* Generate a tail call to the target function. */
17887 if (!TREE_USED (function))
17888 {
17889 assemble_external (function);
17890 TREE_USED (function) = 1;
17891 }
17892 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17893 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17894
17895#if TARGET_MACHO
ab82a49f 17896 if (MACHOPIC_INDIRECT)
5b71a4e7 17897 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17898#endif
5b71a4e7
DE
17899
17900 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17901 generate sibcall RTL explicitly. */
5b71a4e7
DE
17902 insn = emit_call_insn (
17903 gen_rtx_PARALLEL (VOIDmode,
17904 gen_rtvec (4,
17905 gen_rtx_CALL (VOIDmode,
17906 funexp, const0_rtx),
17907 gen_rtx_USE (VOIDmode, const0_rtx),
17908 gen_rtx_USE (VOIDmode,
17909 gen_rtx_REG (SImode,
1de43f85 17910 LR_REGNO)),
5b71a4e7
DE
17911 gen_rtx_RETURN (VOIDmode))));
17912 SIBLING_CALL_P (insn) = 1;
17913 emit_barrier ();
17914
17915 /* Run just enough of rest_of_compilation to get the insns emitted.
17916 There's not really enough bulk here to make other passes such as
17917 instruction scheduling worth while. Note that use_thunk calls
17918 assemble_start_function and assemble_end_function. */
17919 insn = get_insns ();
55e092c4 17920 insn_locators_alloc ();
5b71a4e7
DE
17921 shorten_branches (insn);
17922 final_start_function (insn, file, 1);
c9d691e9 17923 final (insn, file, 1);
5b71a4e7 17924 final_end_function ();
d7087dd2 17925 free_after_compilation (cfun);
5b71a4e7
DE
17926
17927 reload_completed = 0;
fe3ad572 17928 epilogue_completed = 0;
9ebbca7d 17929}
9ebbca7d
GK
17930\f
17931/* A quick summary of the various types of 'constant-pool tables'
17932 under PowerPC:
17933
f676971a 17934 Target Flags Name One table per
9ebbca7d
GK
17935 AIX (none) AIX TOC object file
17936 AIX -mfull-toc AIX TOC object file
17937 AIX -mminimal-toc AIX minimal TOC translation unit
17938 SVR4/EABI (none) SVR4 SDATA object file
17939 SVR4/EABI -fpic SVR4 pic object file
17940 SVR4/EABI -fPIC SVR4 PIC translation unit
17941 SVR4/EABI -mrelocatable EABI TOC function
17942 SVR4/EABI -maix AIX TOC object file
f676971a 17943 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17944 AIX minimal TOC translation unit
17945
17946 Name Reg. Set by entries contains:
17947 made by addrs? fp? sum?
17948
17949 AIX TOC 2 crt0 as Y option option
17950 AIX minimal TOC 30 prolog gcc Y Y option
17951 SVR4 SDATA 13 crt0 gcc N Y N
17952 SVR4 pic 30 prolog ld Y not yet N
17953 SVR4 PIC 30 prolog gcc Y option option
17954 EABI TOC 30 prolog gcc Y option option
17955
17956*/
17957
9ebbca7d
GK
17958/* Hash functions for the hash table. */
17959
17960static unsigned
a2369ed3 17961rs6000_hash_constant (rtx k)
9ebbca7d 17962{
46b33600
RH
17963 enum rtx_code code = GET_CODE (k);
17964 enum machine_mode mode = GET_MODE (k);
17965 unsigned result = (code << 3) ^ mode;
17966 const char *format;
17967 int flen, fidx;
f676971a 17968
46b33600
RH
17969 format = GET_RTX_FORMAT (code);
17970 flen = strlen (format);
17971 fidx = 0;
9ebbca7d 17972
46b33600
RH
17973 switch (code)
17974 {
17975 case LABEL_REF:
17976 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17977
17978 case CONST_DOUBLE:
17979 if (mode != VOIDmode)
17980 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17981 flen = 2;
17982 break;
17983
17984 case CODE_LABEL:
17985 fidx = 3;
17986 break;
17987
17988 default:
17989 break;
17990 }
9ebbca7d
GK
17991
17992 for (; fidx < flen; fidx++)
17993 switch (format[fidx])
17994 {
17995 case 's':
17996 {
17997 unsigned i, len;
17998 const char *str = XSTR (k, fidx);
17999 len = strlen (str);
18000 result = result * 613 + len;
18001 for (i = 0; i < len; i++)
18002 result = result * 613 + (unsigned) str[i];
17167fd8
MM
18003 break;
18004 }
9ebbca7d
GK
18005 case 'u':
18006 case 'e':
18007 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
18008 break;
18009 case 'i':
18010 case 'n':
18011 result = result * 613 + (unsigned) XINT (k, fidx);
18012 break;
18013 case 'w':
18014 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
18015 result = result * 613 + (unsigned) XWINT (k, fidx);
18016 else
18017 {
18018 size_t i;
9390387d 18019 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
18020 result = result * 613 + (unsigned) (XWINT (k, fidx)
18021 >> CHAR_BIT * i);
18022 }
18023 break;
09501938
DE
18024 case '0':
18025 break;
9ebbca7d 18026 default:
37409796 18027 gcc_unreachable ();
9ebbca7d 18028 }
46b33600 18029
9ebbca7d
GK
18030 return result;
18031}
18032
18033static unsigned
a2369ed3 18034toc_hash_function (const void *hash_entry)
9ebbca7d 18035{
f676971a 18036 const struct toc_hash_struct *thc =
a9098fd0
GK
18037 (const struct toc_hash_struct *) hash_entry;
18038 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
18039}
18040
18041/* Compare H1 and H2 for equivalence. */
18042
18043static int
a2369ed3 18044toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
18045{
18046 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
18047 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
18048
a9098fd0
GK
18049 if (((const struct toc_hash_struct *) h1)->key_mode
18050 != ((const struct toc_hash_struct *) h2)->key_mode)
18051 return 0;
18052
5692c7bc 18053 return rtx_equal_p (r1, r2);
9ebbca7d
GK
18054}
18055
28e510bd
MM
18056/* These are the names given by the C++ front-end to vtables, and
18057 vtable-like objects. Ideally, this logic should not be here;
18058 instead, there should be some programmatic way of inquiring as
18059 to whether or not an object is a vtable. */
18060
18061#define VTABLE_NAME_P(NAME) \
9390387d 18062 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
18063 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
18064 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 18065 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 18066 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd 18067
ee06c6a5
DE
18068#ifdef NO_DOLLAR_IN_LABEL
18069/* Return a GGC-allocated character string translating dollar signs in
18070 input NAME to underscores. Used by XCOFF ASM_OUTPUT_LABELREF. */
18071
18072const char *
18073rs6000_xcoff_strip_dollar (const char *name)
18074{
18075 char *strip, *p;
18076 int len;
18077
18078 p = strchr (name, '$');
18079
18080 if (p == 0 || p == name)
18081 return name;
18082
18083 len = strlen (name);
18084 strip = (char *) alloca (len + 1);
18085 strcpy (strip, name);
18086 p = strchr (strip, '$');
18087 while (p)
18088 {
18089 *p = '_';
18090 p = strchr (p + 1, '$');
18091 }
18092
18093 return ggc_alloc_string (strip, len);
18094}
18095#endif
18096
28e510bd 18097void
a2369ed3 18098rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
18099{
18100 /* Currently C++ toc references to vtables can be emitted before it
18101 is decided whether the vtable is public or private. If this is
18102 the case, then the linker will eventually complain that there is
f676971a 18103 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
18104 we emit the TOC reference to reference the symbol and not the
18105 section. */
18106 const char *name = XSTR (x, 0);
54ee9799 18107
f676971a 18108 if (VTABLE_NAME_P (name))
54ee9799
DE
18109 {
18110 RS6000_OUTPUT_BASENAME (file, name);
18111 }
18112 else
18113 assemble_name (file, name);
28e510bd
MM
18114}
18115
a4f6c312
SS
18116/* Output a TOC entry. We derive the entry name from what is being
18117 written. */
9878760c
RK
18118
18119void
a2369ed3 18120output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
18121{
18122 char buf[256];
3cce094d 18123 const char *name = buf;
9878760c 18124 rtx base = x;
16fdeb48 18125 HOST_WIDE_INT offset = 0;
9878760c 18126
37409796 18127 gcc_assert (!TARGET_NO_TOC);
4697a36c 18128
9ebbca7d
GK
18129 /* When the linker won't eliminate them, don't output duplicate
18130 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
18131 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
18132 CODE_LABELs. */
18133 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
18134 {
18135 struct toc_hash_struct *h;
18136 void * * found;
f676971a 18137
17211ab5 18138 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 18139 time because GGC is not initialized at that point. */
17211ab5 18140 if (toc_hash_table == NULL)
f676971a 18141 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
18142 toc_hash_eq, NULL);
18143
5ead67f6 18144 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 18145 h->key = x;
a9098fd0 18146 h->key_mode = mode;
9ebbca7d 18147 h->labelno = labelno;
f676971a 18148
9ebbca7d
GK
18149 found = htab_find_slot (toc_hash_table, h, 1);
18150 if (*found == NULL)
18151 *found = h;
f676971a 18152 else /* This is indeed a duplicate.
9ebbca7d
GK
18153 Set this label equal to that label. */
18154 {
18155 fputs ("\t.set ", file);
18156 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18157 fprintf (file, "%d,", labelno);
18158 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 18159 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
18160 found)->labelno));
18161 return;
18162 }
18163 }
18164
18165 /* If we're going to put a double constant in the TOC, make sure it's
18166 aligned properly when strict alignment is on. */
ff1720ed
RK
18167 if (GET_CODE (x) == CONST_DOUBLE
18168 && STRICT_ALIGNMENT
a9098fd0 18169 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
18170 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
18171 ASM_OUTPUT_ALIGN (file, 3);
18172 }
18173
4977bab6 18174 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 18175
37c37a57
RK
18176 /* Handle FP constants specially. Note that if we have a minimal
18177 TOC, things we put here aren't actually in the TOC, so we can allow
18178 FP constants. */
00b79d54
BE
18179 if (GET_CODE (x) == CONST_DOUBLE &&
18180 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
18181 {
18182 REAL_VALUE_TYPE rv;
18183 long k[4];
18184
18185 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18186 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18187 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
18188 else
18189 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
18190
18191 if (TARGET_64BIT)
18192 {
18193 if (TARGET_MINIMAL_TOC)
18194 fputs (DOUBLE_INT_ASM_OP, file);
18195 else
18196 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18197 k[0] & 0xffffffff, k[1] & 0xffffffff,
18198 k[2] & 0xffffffff, k[3] & 0xffffffff);
18199 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
18200 k[0] & 0xffffffff, k[1] & 0xffffffff,
18201 k[2] & 0xffffffff, k[3] & 0xffffffff);
18202 return;
18203 }
18204 else
18205 {
18206 if (TARGET_MINIMAL_TOC)
18207 fputs ("\t.long ", file);
18208 else
18209 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18210 k[0] & 0xffffffff, k[1] & 0xffffffff,
18211 k[2] & 0xffffffff, k[3] & 0xffffffff);
18212 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18213 k[0] & 0xffffffff, k[1] & 0xffffffff,
18214 k[2] & 0xffffffff, k[3] & 0xffffffff);
18215 return;
18216 }
18217 }
00b79d54
BE
18218 else if (GET_CODE (x) == CONST_DOUBLE &&
18219 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 18220 {
042259f2
DE
18221 REAL_VALUE_TYPE rv;
18222 long k[2];
0adc764e 18223
042259f2 18224 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18225
18226 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18227 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18228 else
18229 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 18230
13ded975
DE
18231 if (TARGET_64BIT)
18232 {
18233 if (TARGET_MINIMAL_TOC)
2bfcf297 18234 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18235 else
2f0552b6
AM
18236 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18237 k[0] & 0xffffffff, k[1] & 0xffffffff);
18238 fprintf (file, "0x%lx%08lx\n",
18239 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18240 return;
18241 }
1875cc88 18242 else
13ded975
DE
18243 {
18244 if (TARGET_MINIMAL_TOC)
2bfcf297 18245 fputs ("\t.long ", file);
13ded975 18246 else
2f0552b6
AM
18247 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18248 k[0] & 0xffffffff, k[1] & 0xffffffff);
18249 fprintf (file, "0x%lx,0x%lx\n",
18250 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18251 return;
18252 }
9878760c 18253 }
00b79d54
BE
18254 else if (GET_CODE (x) == CONST_DOUBLE &&
18255 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 18256 {
042259f2
DE
18257 REAL_VALUE_TYPE rv;
18258 long l;
9878760c 18259
042259f2 18260 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18261 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18262 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18263 else
18264 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 18265
31bfaa0b
DE
18266 if (TARGET_64BIT)
18267 {
18268 if (TARGET_MINIMAL_TOC)
2bfcf297 18269 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 18270 else
2f0552b6
AM
18271 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18272 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
18273 return;
18274 }
042259f2 18275 else
31bfaa0b
DE
18276 {
18277 if (TARGET_MINIMAL_TOC)
2bfcf297 18278 fputs ("\t.long ", file);
31bfaa0b 18279 else
2f0552b6
AM
18280 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18281 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
18282 return;
18283 }
042259f2 18284 }
f176e826 18285 else if (GET_MODE (x) == VOIDmode
a9098fd0 18286 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 18287 {
e2c953b6 18288 unsigned HOST_WIDE_INT low;
042259f2
DE
18289 HOST_WIDE_INT high;
18290
18291 if (GET_CODE (x) == CONST_DOUBLE)
18292 {
18293 low = CONST_DOUBLE_LOW (x);
18294 high = CONST_DOUBLE_HIGH (x);
18295 }
18296 else
18297#if HOST_BITS_PER_WIDE_INT == 32
18298 {
18299 low = INTVAL (x);
0858c623 18300 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18301 }
18302#else
18303 {
c4ad648e
AM
18304 low = INTVAL (x) & 0xffffffff;
18305 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18306 }
18307#endif
9878760c 18308
a9098fd0
GK
18309 /* TOC entries are always Pmode-sized, but since this
18310 is a bigendian machine then if we're putting smaller
18311 integer constants in the TOC we have to pad them.
18312 (This is still a win over putting the constants in
18313 a separate constant pool, because then we'd have
02a4ec28
FS
18314 to have both a TOC entry _and_ the actual constant.)
18315
18316 For a 32-bit target, CONST_INT values are loaded and shifted
18317 entirely within `low' and can be stored in one TOC entry. */
18318
37409796
NS
18319 /* It would be easy to make this work, but it doesn't now. */
18320 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18321
18322 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18323 {
18324#if HOST_BITS_PER_WIDE_INT == 32
18325 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18326 POINTER_SIZE, &low, &high, 0);
18327#else
18328 low |= high << 32;
18329 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18330 high = (HOST_WIDE_INT) low >> 32;
18331 low &= 0xffffffff;
18332#endif
18333 }
a9098fd0 18334
13ded975
DE
18335 if (TARGET_64BIT)
18336 {
18337 if (TARGET_MINIMAL_TOC)
2bfcf297 18338 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18339 else
2f0552b6
AM
18340 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18341 (long) high & 0xffffffff, (long) low & 0xffffffff);
18342 fprintf (file, "0x%lx%08lx\n",
18343 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18344 return;
18345 }
1875cc88 18346 else
13ded975 18347 {
02a4ec28
FS
18348 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18349 {
18350 if (TARGET_MINIMAL_TOC)
2bfcf297 18351 fputs ("\t.long ", file);
02a4ec28 18352 else
2bfcf297 18353 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18354 (long) high & 0xffffffff, (long) low & 0xffffffff);
18355 fprintf (file, "0x%lx,0x%lx\n",
18356 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18357 }
13ded975 18358 else
02a4ec28
FS
18359 {
18360 if (TARGET_MINIMAL_TOC)
2bfcf297 18361 fputs ("\t.long ", file);
02a4ec28 18362 else
2f0552b6
AM
18363 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18364 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18365 }
13ded975
DE
18366 return;
18367 }
9878760c
RK
18368 }
18369
18370 if (GET_CODE (x) == CONST)
18371 {
37409796 18372 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18373
9878760c
RK
18374 base = XEXP (XEXP (x, 0), 0);
18375 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18376 }
f676971a 18377
37409796
NS
18378 switch (GET_CODE (base))
18379 {
18380 case SYMBOL_REF:
18381 name = XSTR (base, 0);
18382 break;
18383
18384 case LABEL_REF:
18385 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18386 CODE_LABEL_NUMBER (XEXP (base, 0)));
18387 break;
18388
18389 case CODE_LABEL:
18390 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18391 break;
18392
18393 default:
18394 gcc_unreachable ();
18395 }
9878760c 18396
1875cc88 18397 if (TARGET_MINIMAL_TOC)
2bfcf297 18398 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18399 else
18400 {
5773a50f
DE
18401 fputs ("\t.tc ", file);
18402 RS6000_OUTPUT_BASENAME (file, name);
9878760c 18403
1875cc88 18404 if (offset < 0)
16fdeb48 18405 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18406 else if (offset)
16fdeb48 18407 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18408
19d2d16f 18409 fputs ("[TC],", file);
1875cc88 18410 }
581bc4de
MM
18411
18412 /* Currently C++ toc references to vtables can be emitted before it
18413 is decided whether the vtable is public or private. If this is
18414 the case, then the linker will eventually complain that there is
18415 a TOC reference to an unknown section. Thus, for vtables only,
18416 we emit the TOC reference to reference the symbol and not the
18417 section. */
28e510bd 18418 if (VTABLE_NAME_P (name))
581bc4de 18419 {
54ee9799 18420 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18421 if (offset < 0)
16fdeb48 18422 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18423 else if (offset > 0)
16fdeb48 18424 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18425 }
18426 else
18427 output_addr_const (file, x);
19d2d16f 18428 putc ('\n', file);
9878760c
RK
18429}
18430\f
18431/* Output an assembler pseudo-op to write an ASCII string of N characters
18432 starting at P to FILE.
18433
18434 On the RS/6000, we have to do this using the .byte operation and
18435 write out special characters outside the quoted string.
18436 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18437 so we must artificially break them up early. */
9878760c
RK
18438
18439void
a2369ed3 18440output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18441{
18442 char c;
18443 int i, count_string;
d330fd93
KG
18444 const char *for_string = "\t.byte \"";
18445 const char *for_decimal = "\t.byte ";
18446 const char *to_close = NULL;
9878760c
RK
18447
18448 count_string = 0;
18449 for (i = 0; i < n; i++)
18450 {
18451 c = *p++;
18452 if (c >= ' ' && c < 0177)
18453 {
18454 if (for_string)
18455 fputs (for_string, file);
18456 putc (c, file);
18457
18458 /* Write two quotes to get one. */
18459 if (c == '"')
18460 {
18461 putc (c, file);
18462 ++count_string;
18463 }
18464
18465 for_string = NULL;
18466 for_decimal = "\"\n\t.byte ";
18467 to_close = "\"\n";
18468 ++count_string;
18469
18470 if (count_string >= 512)
18471 {
18472 fputs (to_close, file);
18473
18474 for_string = "\t.byte \"";
18475 for_decimal = "\t.byte ";
18476 to_close = NULL;
18477 count_string = 0;
18478 }
18479 }
18480 else
18481 {
18482 if (for_decimal)
18483 fputs (for_decimal, file);
18484 fprintf (file, "%d", c);
18485
18486 for_string = "\n\t.byte \"";
18487 for_decimal = ", ";
18488 to_close = "\n";
18489 count_string = 0;
18490 }
18491 }
18492
18493 /* Now close the string if we have written one. Then end the line. */
18494 if (to_close)
9ebbca7d 18495 fputs (to_close, file);
9878760c
RK
18496}
18497\f
18498/* Generate a unique section name for FILENAME for a section type
18499 represented by SECTION_DESC. Output goes into BUF.
18500
18501 SECTION_DESC can be any string, as long as it is different for each
18502 possible section type.
18503
18504 We name the section in the same manner as xlc. The name begins with an
18505 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18506 names) with the last period replaced by the string SECTION_DESC. If
18507 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18508 the name. */
9878760c
RK
18509
18510void
f676971a 18511rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18512 const char *section_desc)
9878760c 18513{
9ebbca7d 18514 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18515 char *p;
18516 int len;
9878760c
RK
18517
18518 after_last_slash = filename;
18519 for (q = filename; *q; q++)
11e5fe42
RK
18520 {
18521 if (*q == '/')
18522 after_last_slash = q + 1;
18523 else if (*q == '.')
18524 last_period = q;
18525 }
9878760c 18526
11e5fe42 18527 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18528 *buf = (char *) xmalloc (len);
9878760c
RK
18529
18530 p = *buf;
18531 *p++ = '_';
18532
18533 for (q = after_last_slash; *q; q++)
18534 {
11e5fe42 18535 if (q == last_period)
c4ad648e 18536 {
9878760c
RK
18537 strcpy (p, section_desc);
18538 p += strlen (section_desc);
e3981aab 18539 break;
c4ad648e 18540 }
9878760c 18541
e9a780ec 18542 else if (ISALNUM (*q))
c4ad648e 18543 *p++ = *q;
9878760c
RK
18544 }
18545
11e5fe42 18546 if (last_period == 0)
9878760c
RK
18547 strcpy (p, section_desc);
18548 else
18549 *p = '\0';
18550}
e165f3f0 18551\f
a4f6c312 18552/* Emit profile function. */
411707f4 18553
411707f4 18554void
a2369ed3 18555output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18556{
858081ad
AH
18557 /* Non-standard profiling for kernels, which just saves LR then calls
18558 _mcount without worrying about arg saves. The idea is to change
18559 the function prologue as little as possible as it isn't easy to
18560 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18561 if (TARGET_PROFILE_KERNEL)
18562 return;
18563
8480e480
CC
18564 if (DEFAULT_ABI == ABI_AIX)
18565 {
9739c90c
JJ
18566#ifndef NO_PROFILE_COUNTERS
18567# define NO_PROFILE_COUNTERS 0
18568#endif
f676971a 18569 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
18570 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
18571 else
18572 {
18573 char buf[30];
18574 const char *label_name;
18575 rtx fun;
411707f4 18576
9739c90c
JJ
18577 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18578 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18579 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18580
9739c90c
JJ
18581 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
18582 fun, Pmode);
18583 }
8480e480 18584 }
ee890fe2
SS
18585 else if (DEFAULT_ABI == ABI_DARWIN)
18586 {
d5fa86ba 18587 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18588 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18589
18590 /* Be conservative and always set this, at least for now. */
e3b5732b 18591 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18592
18593#if TARGET_MACHO
18594 /* For PIC code, set up a stub and collect the caller's address
18595 from r0, which is where the prologue puts it. */
11abc112 18596 if (MACHOPIC_INDIRECT
e3b5732b 18597 && crtl->uses_pic_offset_table)
11abc112 18598 caller_addr_regno = 0;
ee890fe2
SS
18599#endif
18600 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
18601 0, VOIDmode, 1,
18602 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18603 }
411707f4
CC
18604}
18605
a4f6c312 18606/* Write function profiler code. */
e165f3f0
RK
18607
18608void
a2369ed3 18609output_function_profiler (FILE *file, int labelno)
e165f3f0 18610{
3daf36a4 18611 char buf[100];
e165f3f0 18612
38c1f2d7 18613 switch (DEFAULT_ABI)
3daf36a4 18614 {
38c1f2d7 18615 default:
37409796 18616 gcc_unreachable ();
38c1f2d7
MM
18617
18618 case ABI_V4:
09eeeacb
AM
18619 if (!TARGET_32BIT)
18620 {
d4ee4d25 18621 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18622 return;
18623 }
ffcfcb5f 18624 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18625 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18626 if (NO_PROFILE_COUNTERS)
18627 {
18628 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18629 reg_names[0], reg_names[1]);
18630 }
18631 else if (TARGET_SECURE_PLT && flag_pic)
18632 {
18633 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18634 reg_names[0], reg_names[1]);
18635 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18636 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18637 reg_names[12], reg_names[12]);
18638 assemble_name (file, buf);
18639 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18640 assemble_name (file, buf);
18641 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18642 }
18643 else if (flag_pic == 1)
38c1f2d7 18644 {
dfdfa60f 18645 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18646 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18647 reg_names[0], reg_names[1]);
17167fd8 18648 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18649 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18650 assemble_name (file, buf);
17167fd8 18651 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18652 }
9ebbca7d 18653 else if (flag_pic > 1)
38c1f2d7 18654 {
71625f3d
AM
18655 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18656 reg_names[0], reg_names[1]);
9ebbca7d 18657 /* Now, we need to get the address of the label. */
71625f3d 18658 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18659 assemble_name (file, buf);
9ebbca7d
GK
18660 fputs ("-.\n1:", file);
18661 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18662 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18663 reg_names[0], reg_names[11]);
18664 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18665 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18666 }
38c1f2d7
MM
18667 else
18668 {
17167fd8 18669 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18670 assemble_name (file, buf);
dfdfa60f 18671 fputs ("@ha\n", file);
71625f3d
AM
18672 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18673 reg_names[0], reg_names[1]);
a260abc9 18674 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18675 assemble_name (file, buf);
17167fd8 18676 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18677 }
18678
50d440bc 18679 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18680 fprintf (file, "\tbl %s%s\n",
18681 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18682 break;
18683
18684 case ABI_AIX:
ee890fe2 18685 case ABI_DARWIN:
ffcfcb5f
AM
18686 if (!TARGET_PROFILE_KERNEL)
18687 {
a3c9585f 18688 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18689 }
18690 else
18691 {
37409796 18692 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18693
18694 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18695 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18696
6de9cd9a 18697 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18698 {
18699 asm_fprintf (file, "\tstd %s,24(%s)\n",
18700 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18701 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18702 asm_fprintf (file, "\tld %s,24(%s)\n",
18703 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18704 }
18705 else
18706 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18707 }
38c1f2d7
MM
18708 break;
18709 }
e165f3f0 18710}
a251ffd0 18711
b54cf83a 18712\f
44cd321e
PS
18713
18714/* The following variable value is the last issued insn. */
18715
18716static rtx last_scheduled_insn;
18717
18718/* The following variable helps to balance issuing of load and
18719 store instructions */
18720
18721static int load_store_pendulum;
18722
b54cf83a
DE
18723/* Power4 load update and store update instructions are cracked into a
18724 load or store and an integer insn which are executed in the same cycle.
18725 Branches have their own dispatch slot which does not count against the
18726 GCC issue rate, but it changes the program flow so there are no other
18727 instructions to issue in this cycle. */
18728
18729static int
f676971a
EC
18730rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18731 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18732 rtx insn, int more)
b54cf83a 18733{
44cd321e 18734 last_scheduled_insn = insn;
b54cf83a
DE
18735 if (GET_CODE (PATTERN (insn)) == USE
18736 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18737 {
18738 cached_can_issue_more = more;
18739 return cached_can_issue_more;
18740 }
18741
18742 if (insn_terminates_group_p (insn, current_group))
18743 {
18744 cached_can_issue_more = 0;
18745 return cached_can_issue_more;
18746 }
b54cf83a 18747
d296e02e
AP
18748 /* If no reservation, but reach here */
18749 if (recog_memoized (insn) < 0)
18750 return more;
18751
ec507f2d 18752 if (rs6000_sched_groups)
b54cf83a 18753 {
cbe26ab8 18754 if (is_microcoded_insn (insn))
44cd321e 18755 cached_can_issue_more = 0;
cbe26ab8 18756 else if (is_cracked_insn (insn))
44cd321e
PS
18757 cached_can_issue_more = more > 2 ? more - 2 : 0;
18758 else
18759 cached_can_issue_more = more - 1;
18760
18761 return cached_can_issue_more;
b54cf83a 18762 }
165b263e 18763
d296e02e
AP
18764 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18765 return 0;
18766
44cd321e
PS
18767 cached_can_issue_more = more - 1;
18768 return cached_can_issue_more;
b54cf83a
DE
18769}
18770
a251ffd0
TG
18771/* Adjust the cost of a scheduling dependency. Return the new cost of
18772 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18773
c237e94a 18774static int
0a4f0294 18775rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18776{
44cd321e 18777 enum attr_type attr_type;
a251ffd0 18778
44cd321e 18779 if (! recog_memoized (insn))
a251ffd0
TG
18780 return 0;
18781
44cd321e 18782 switch (REG_NOTE_KIND (link))
a251ffd0 18783 {
44cd321e
PS
18784 case REG_DEP_TRUE:
18785 {
18786 /* Data dependency; DEP_INSN writes a register that INSN reads
18787 some cycles later. */
18788
18789 /* Separate a load from a narrower, dependent store. */
18790 if (rs6000_sched_groups
18791 && GET_CODE (PATTERN (insn)) == SET
18792 && GET_CODE (PATTERN (dep_insn)) == SET
18793 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18794 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18795 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18796 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18797 return cost + 14;
18798
18799 attr_type = get_attr_type (insn);
18800
18801 switch (attr_type)
18802 {
18803 case TYPE_JMPREG:
18804 /* Tell the first scheduling pass about the latency between
18805 a mtctr and bctr (and mtlr and br/blr). The first
18806 scheduling pass will not know about this latency since
18807 the mtctr instruction, which has the latency associated
18808 to it, will be generated by reload. */
18809 return TARGET_POWER ? 5 : 4;
18810 case TYPE_BRANCH:
18811 /* Leave some extra cycles between a compare and its
18812 dependent branch, to inhibit expensive mispredicts. */
18813 if ((rs6000_cpu_attr == CPU_PPC603
18814 || rs6000_cpu_attr == CPU_PPC604
18815 || rs6000_cpu_attr == CPU_PPC604E
18816 || rs6000_cpu_attr == CPU_PPC620
18817 || rs6000_cpu_attr == CPU_PPC630
18818 || rs6000_cpu_attr == CPU_PPC750
18819 || rs6000_cpu_attr == CPU_PPC7400
18820 || rs6000_cpu_attr == CPU_PPC7450
18821 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18822 || rs6000_cpu_attr == CPU_POWER5
18823 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18824 && recog_memoized (dep_insn)
18825 && (INSN_CODE (dep_insn) >= 0))
982afe02 18826
44cd321e
PS
18827 switch (get_attr_type (dep_insn))
18828 {
18829 case TYPE_CMP:
18830 case TYPE_COMPARE:
18831 case TYPE_DELAYED_COMPARE:
18832 case TYPE_IMUL_COMPARE:
18833 case TYPE_LMUL_COMPARE:
18834 case TYPE_FPCOMPARE:
18835 case TYPE_CR_LOGICAL:
18836 case TYPE_DELAYED_CR:
18837 return cost + 2;
18838 default:
18839 break;
18840 }
18841 break;
18842
18843 case TYPE_STORE:
18844 case TYPE_STORE_U:
18845 case TYPE_STORE_UX:
18846 case TYPE_FPSTORE:
18847 case TYPE_FPSTORE_U:
18848 case TYPE_FPSTORE_UX:
18849 if ((rs6000_cpu == PROCESSOR_POWER6)
18850 && recog_memoized (dep_insn)
18851 && (INSN_CODE (dep_insn) >= 0))
18852 {
18853
18854 if (GET_CODE (PATTERN (insn)) != SET)
18855 /* If this happens, we have to extend this to schedule
18856 optimally. Return default for now. */
18857 return cost;
18858
18859 /* Adjust the cost for the case where the value written
18860 by a fixed point operation is used as the address
18861 gen value on a store. */
18862 switch (get_attr_type (dep_insn))
18863 {
18864 case TYPE_LOAD:
18865 case TYPE_LOAD_U:
18866 case TYPE_LOAD_UX:
18867 case TYPE_CNTLZ:
18868 {
18869 if (! store_data_bypass_p (dep_insn, insn))
18870 return 4;
18871 break;
18872 }
18873 case TYPE_LOAD_EXT:
18874 case TYPE_LOAD_EXT_U:
18875 case TYPE_LOAD_EXT_UX:
18876 case TYPE_VAR_SHIFT_ROTATE:
18877 case TYPE_VAR_DELAYED_COMPARE:
18878 {
18879 if (! store_data_bypass_p (dep_insn, insn))
18880 return 6;
18881 break;
18882 }
18883 case TYPE_INTEGER:
18884 case TYPE_COMPARE:
18885 case TYPE_FAST_COMPARE:
18886 case TYPE_EXTS:
18887 case TYPE_SHIFT:
18888 case TYPE_INSERT_WORD:
18889 case TYPE_INSERT_DWORD:
18890 case TYPE_FPLOAD_U:
18891 case TYPE_FPLOAD_UX:
18892 case TYPE_STORE_U:
18893 case TYPE_STORE_UX:
18894 case TYPE_FPSTORE_U:
18895 case TYPE_FPSTORE_UX:
18896 {
18897 if (! store_data_bypass_p (dep_insn, insn))
18898 return 3;
18899 break;
18900 }
18901 case TYPE_IMUL:
18902 case TYPE_IMUL2:
18903 case TYPE_IMUL3:
18904 case TYPE_LMUL:
18905 case TYPE_IMUL_COMPARE:
18906 case TYPE_LMUL_COMPARE:
18907 {
18908 if (! store_data_bypass_p (dep_insn, insn))
18909 return 17;
18910 break;
18911 }
18912 case TYPE_IDIV:
18913 {
18914 if (! store_data_bypass_p (dep_insn, insn))
18915 return 45;
18916 break;
18917 }
18918 case TYPE_LDIV:
18919 {
18920 if (! store_data_bypass_p (dep_insn, insn))
18921 return 57;
18922 break;
18923 }
18924 default:
18925 break;
18926 }
18927 }
18928 break;
18929
18930 case TYPE_LOAD:
18931 case TYPE_LOAD_U:
18932 case TYPE_LOAD_UX:
18933 case TYPE_LOAD_EXT:
18934 case TYPE_LOAD_EXT_U:
18935 case TYPE_LOAD_EXT_UX:
18936 if ((rs6000_cpu == PROCESSOR_POWER6)
18937 && recog_memoized (dep_insn)
18938 && (INSN_CODE (dep_insn) >= 0))
18939 {
18940
18941 /* Adjust the cost for the case where the value written
18942 by a fixed point instruction is used within the address
18943 gen portion of a subsequent load(u)(x) */
18944 switch (get_attr_type (dep_insn))
18945 {
18946 case TYPE_LOAD:
18947 case TYPE_LOAD_U:
18948 case TYPE_LOAD_UX:
18949 case TYPE_CNTLZ:
18950 {
18951 if (set_to_load_agen (dep_insn, insn))
18952 return 4;
18953 break;
18954 }
18955 case TYPE_LOAD_EXT:
18956 case TYPE_LOAD_EXT_U:
18957 case TYPE_LOAD_EXT_UX:
18958 case TYPE_VAR_SHIFT_ROTATE:
18959 case TYPE_VAR_DELAYED_COMPARE:
18960 {
18961 if (set_to_load_agen (dep_insn, insn))
18962 return 6;
18963 break;
18964 }
18965 case TYPE_INTEGER:
18966 case TYPE_COMPARE:
18967 case TYPE_FAST_COMPARE:
18968 case TYPE_EXTS:
18969 case TYPE_SHIFT:
18970 case TYPE_INSERT_WORD:
18971 case TYPE_INSERT_DWORD:
18972 case TYPE_FPLOAD_U:
18973 case TYPE_FPLOAD_UX:
18974 case TYPE_STORE_U:
18975 case TYPE_STORE_UX:
18976 case TYPE_FPSTORE_U:
18977 case TYPE_FPSTORE_UX:
18978 {
18979 if (set_to_load_agen (dep_insn, insn))
18980 return 3;
18981 break;
18982 }
18983 case TYPE_IMUL:
18984 case TYPE_IMUL2:
18985 case TYPE_IMUL3:
18986 case TYPE_LMUL:
18987 case TYPE_IMUL_COMPARE:
18988 case TYPE_LMUL_COMPARE:
18989 {
18990 if (set_to_load_agen (dep_insn, insn))
18991 return 17;
18992 break;
18993 }
18994 case TYPE_IDIV:
18995 {
18996 if (set_to_load_agen (dep_insn, insn))
18997 return 45;
18998 break;
18999 }
19000 case TYPE_LDIV:
19001 {
19002 if (set_to_load_agen (dep_insn, insn))
19003 return 57;
19004 break;
19005 }
19006 default:
19007 break;
19008 }
19009 }
19010 break;
19011
19012 case TYPE_FPLOAD:
19013 if ((rs6000_cpu == PROCESSOR_POWER6)
19014 && recog_memoized (dep_insn)
19015 && (INSN_CODE (dep_insn) >= 0)
19016 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
19017 return 2;
19018
19019 default:
19020 break;
19021 }
c9dbf840 19022
a251ffd0 19023 /* Fall out to return default cost. */
44cd321e
PS
19024 }
19025 break;
19026
19027 case REG_DEP_OUTPUT:
19028 /* Output dependency; DEP_INSN writes a register that INSN writes some
19029 cycles later. */
19030 if ((rs6000_cpu == PROCESSOR_POWER6)
19031 && recog_memoized (dep_insn)
19032 && (INSN_CODE (dep_insn) >= 0))
19033 {
19034 attr_type = get_attr_type (insn);
19035
19036 switch (attr_type)
19037 {
19038 case TYPE_FP:
19039 if (get_attr_type (dep_insn) == TYPE_FP)
19040 return 1;
19041 break;
19042 case TYPE_FPLOAD:
19043 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
19044 return 2;
19045 break;
19046 default:
19047 break;
19048 }
19049 }
19050 case REG_DEP_ANTI:
19051 /* Anti dependency; DEP_INSN reads a register that INSN writes some
19052 cycles later. */
19053 return 0;
19054
19055 default:
19056 gcc_unreachable ();
a251ffd0
TG
19057 }
19058
19059 return cost;
19060}
b6c9286a 19061
cbe26ab8 19062/* The function returns a true if INSN is microcoded.
839a4992 19063 Return false otherwise. */
cbe26ab8
DN
19064
19065static bool
19066is_microcoded_insn (rtx insn)
19067{
19068 if (!insn || !INSN_P (insn)
19069 || GET_CODE (PATTERN (insn)) == USE
19070 || GET_CODE (PATTERN (insn)) == CLOBBER)
19071 return false;
19072
d296e02e
AP
19073 if (rs6000_cpu_attr == CPU_CELL)
19074 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
19075
ec507f2d 19076 if (rs6000_sched_groups)
cbe26ab8
DN
19077 {
19078 enum attr_type type = get_attr_type (insn);
19079 if (type == TYPE_LOAD_EXT_U
19080 || type == TYPE_LOAD_EXT_UX
19081 || type == TYPE_LOAD_UX
19082 || type == TYPE_STORE_UX
19083 || type == TYPE_MFCR)
c4ad648e 19084 return true;
cbe26ab8
DN
19085 }
19086
19087 return false;
19088}
19089
cbe26ab8
DN
19090/* The function returns true if INSN is cracked into 2 instructions
19091 by the processor (and therefore occupies 2 issue slots). */
19092
19093static bool
19094is_cracked_insn (rtx insn)
19095{
19096 if (!insn || !INSN_P (insn)
19097 || GET_CODE (PATTERN (insn)) == USE
19098 || GET_CODE (PATTERN (insn)) == CLOBBER)
19099 return false;
19100
ec507f2d 19101 if (rs6000_sched_groups)
cbe26ab8
DN
19102 {
19103 enum attr_type type = get_attr_type (insn);
19104 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
19105 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
19106 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
19107 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
19108 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
19109 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
19110 || type == TYPE_IDIV || type == TYPE_LDIV
19111 || type == TYPE_INSERT_WORD)
19112 return true;
cbe26ab8
DN
19113 }
19114
19115 return false;
19116}
19117
19118/* The function returns true if INSN can be issued only from
a3c9585f 19119 the branch slot. */
cbe26ab8
DN
19120
19121static bool
19122is_branch_slot_insn (rtx insn)
19123{
19124 if (!insn || !INSN_P (insn)
19125 || GET_CODE (PATTERN (insn)) == USE
19126 || GET_CODE (PATTERN (insn)) == CLOBBER)
19127 return false;
19128
ec507f2d 19129 if (rs6000_sched_groups)
cbe26ab8
DN
19130 {
19131 enum attr_type type = get_attr_type (insn);
19132 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 19133 return true;
cbe26ab8
DN
19134 return false;
19135 }
19136
19137 return false;
19138}
79ae11c4 19139
44cd321e
PS
19140/* The function returns true if out_inst sets a value that is
19141 used in the address generation computation of in_insn */
19142static bool
19143set_to_load_agen (rtx out_insn, rtx in_insn)
19144{
19145 rtx out_set, in_set;
19146
19147 /* For performance reasons, only handle the simple case where
19148 both loads are a single_set. */
19149 out_set = single_set (out_insn);
19150 if (out_set)
19151 {
19152 in_set = single_set (in_insn);
19153 if (in_set)
19154 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
19155 }
19156
19157 return false;
19158}
19159
19160/* The function returns true if the target storage location of
19161 out_insn is adjacent to the target storage location of in_insn */
19162/* Return 1 if memory locations are adjacent. */
19163
19164static bool
19165adjacent_mem_locations (rtx insn1, rtx insn2)
19166{
19167
e3a0e200
PB
19168 rtx a = get_store_dest (PATTERN (insn1));
19169 rtx b = get_store_dest (PATTERN (insn2));
19170
44cd321e
PS
19171 if ((GET_CODE (XEXP (a, 0)) == REG
19172 || (GET_CODE (XEXP (a, 0)) == PLUS
19173 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
19174 && (GET_CODE (XEXP (b, 0)) == REG
19175 || (GET_CODE (XEXP (b, 0)) == PLUS
19176 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
19177 {
f98e8938 19178 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 19179 rtx reg0, reg1;
44cd321e
PS
19180
19181 if (GET_CODE (XEXP (a, 0)) == PLUS)
19182 {
19183 reg0 = XEXP (XEXP (a, 0), 0);
19184 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
19185 }
19186 else
19187 reg0 = XEXP (a, 0);
19188
19189 if (GET_CODE (XEXP (b, 0)) == PLUS)
19190 {
19191 reg1 = XEXP (XEXP (b, 0), 0);
19192 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
19193 }
19194 else
19195 reg1 = XEXP (b, 0);
19196
19197 val_diff = val1 - val0;
19198
19199 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
19200 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
19201 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
19202 }
19203
19204 return false;
19205}
19206
a4f6c312 19207/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
19208 priority INSN_PRIORITY (INSN). Increase the priority to execute the
19209 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
19210 define this macro if you do not need to adjust the scheduling
19211 priorities of insns. */
bef84347 19212
c237e94a 19213static int
a2369ed3 19214rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 19215{
a4f6c312
SS
19216 /* On machines (like the 750) which have asymmetric integer units,
19217 where one integer unit can do multiply and divides and the other
19218 can't, reduce the priority of multiply/divide so it is scheduled
19219 before other integer operations. */
bef84347
VM
19220
19221#if 0
2c3c49de 19222 if (! INSN_P (insn))
bef84347
VM
19223 return priority;
19224
19225 if (GET_CODE (PATTERN (insn)) == USE)
19226 return priority;
19227
19228 switch (rs6000_cpu_attr) {
19229 case CPU_PPC750:
19230 switch (get_attr_type (insn))
19231 {
19232 default:
19233 break;
19234
19235 case TYPE_IMUL:
19236 case TYPE_IDIV:
3cb999d8
DE
19237 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19238 priority, priority);
bef84347
VM
19239 if (priority >= 0 && priority < 0x01000000)
19240 priority >>= 3;
19241 break;
19242 }
19243 }
19244#endif
19245
44cd321e 19246 if (insn_must_be_first_in_group (insn)
79ae11c4 19247 && reload_completed
f676971a 19248 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
19249 && rs6000_sched_restricted_insns_priority)
19250 {
19251
c4ad648e
AM
19252 /* Prioritize insns that can be dispatched only in the first
19253 dispatch slot. */
79ae11c4 19254 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
19255 /* Attach highest priority to insn. This means that in
19256 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 19257 precede 'priority' (critical path) considerations. */
f676971a 19258 return current_sched_info->sched_max_insns_priority;
79ae11c4 19259 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 19260 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
19261 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19262 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
19263 return (priority + 1);
19264 }
79ae11c4 19265
44cd321e
PS
19266 if (rs6000_cpu == PROCESSOR_POWER6
19267 && ((load_store_pendulum == -2 && is_load_insn (insn))
19268 || (load_store_pendulum == 2 && is_store_insn (insn))))
19269 /* Attach highest priority to insn if the scheduler has just issued two
19270 stores and this instruction is a load, or two loads and this instruction
19271 is a store. Power6 wants loads and stores scheduled alternately
19272 when possible */
19273 return current_sched_info->sched_max_insns_priority;
19274
bef84347
VM
19275 return priority;
19276}
19277
d296e02e
AP
19278/* Return true if the instruction is nonpipelined on the Cell. */
19279static bool
19280is_nonpipeline_insn (rtx insn)
19281{
19282 enum attr_type type;
19283 if (!insn || !INSN_P (insn)
19284 || GET_CODE (PATTERN (insn)) == USE
19285 || GET_CODE (PATTERN (insn)) == CLOBBER)
19286 return false;
19287
19288 type = get_attr_type (insn);
19289 if (type == TYPE_IMUL
19290 || type == TYPE_IMUL2
19291 || type == TYPE_IMUL3
19292 || type == TYPE_LMUL
19293 || type == TYPE_IDIV
19294 || type == TYPE_LDIV
19295 || type == TYPE_SDIV
19296 || type == TYPE_DDIV
19297 || type == TYPE_SSQRT
19298 || type == TYPE_DSQRT
19299 || type == TYPE_MFCR
19300 || type == TYPE_MFCRF
19301 || type == TYPE_MFJMPR)
19302 {
19303 return true;
19304 }
19305 return false;
19306}
19307
19308
a4f6c312
SS
19309/* Return how many instructions the machine can issue per cycle. */
19310
c237e94a 19311static int
863d938c 19312rs6000_issue_rate (void)
b6c9286a 19313{
3317bab1
DE
19314 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19315 if (!reload_completed)
19316 return 1;
19317
b6c9286a 19318 switch (rs6000_cpu_attr) {
3cb999d8
DE
19319 case CPU_RIOS1: /* ? */
19320 case CPU_RS64A:
19321 case CPU_PPC601: /* ? */
ed947a96 19322 case CPU_PPC7450:
3cb999d8 19323 return 3;
b54cf83a 19324 case CPU_PPC440:
b6c9286a 19325 case CPU_PPC603:
bef84347 19326 case CPU_PPC750:
ed947a96 19327 case CPU_PPC7400:
be12c2b0 19328 case CPU_PPC8540:
d296e02e 19329 case CPU_CELL:
fa41c305
EW
19330 case CPU_PPCE300C2:
19331 case CPU_PPCE300C3:
edae5fe3 19332 case CPU_PPCE500MC:
f676971a 19333 return 2;
3cb999d8 19334 case CPU_RIOS2:
b6c9286a 19335 case CPU_PPC604:
19684119 19336 case CPU_PPC604E:
b6c9286a 19337 case CPU_PPC620:
3cb999d8 19338 case CPU_PPC630:
b6c9286a 19339 return 4;
cbe26ab8 19340 case CPU_POWER4:
ec507f2d 19341 case CPU_POWER5:
44cd321e 19342 case CPU_POWER6:
cbe26ab8 19343 return 5;
b6c9286a
MM
19344 default:
19345 return 1;
19346 }
19347}
19348
be12c2b0
VM
19349/* Return how many instructions to look ahead for better insn
19350 scheduling. */
19351
19352static int
863d938c 19353rs6000_use_sched_lookahead (void)
be12c2b0
VM
19354{
19355 if (rs6000_cpu_attr == CPU_PPC8540)
19356 return 4;
d296e02e
AP
19357 if (rs6000_cpu_attr == CPU_CELL)
19358 return (reload_completed ? 8 : 0);
be12c2b0
VM
19359 return 0;
19360}
19361
d296e02e
AP
19362/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19363static int
19364rs6000_use_sched_lookahead_guard (rtx insn)
19365{
19366 if (rs6000_cpu_attr != CPU_CELL)
19367 return 1;
19368
19369 if (insn == NULL_RTX || !INSN_P (insn))
19370 abort ();
982afe02 19371
d296e02e
AP
19372 if (!reload_completed
19373 || is_nonpipeline_insn (insn)
19374 || is_microcoded_insn (insn))
19375 return 0;
19376
19377 return 1;
19378}
19379
569fa502
DN
19380/* Determine is PAT refers to memory. */
19381
19382static bool
19383is_mem_ref (rtx pat)
19384{
19385 const char * fmt;
19386 int i, j;
19387 bool ret = false;
19388
1de59bbd
DE
19389 /* stack_tie does not produce any real memory traffic. */
19390 if (GET_CODE (pat) == UNSPEC
19391 && XINT (pat, 1) == UNSPEC_TIE)
19392 return false;
19393
569fa502
DN
19394 if (GET_CODE (pat) == MEM)
19395 return true;
19396
19397 /* Recursively process the pattern. */
19398 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19399
19400 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19401 {
19402 if (fmt[i] == 'e')
19403 ret |= is_mem_ref (XEXP (pat, i));
19404 else if (fmt[i] == 'E')
19405 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19406 ret |= is_mem_ref (XVECEXP (pat, i, j));
19407 }
19408
19409 return ret;
19410}
19411
19412/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19413
569fa502
DN
19414static bool
19415is_load_insn1 (rtx pat)
19416{
19417 if (!pat || pat == NULL_RTX)
19418 return false;
19419
19420 if (GET_CODE (pat) == SET)
19421 return is_mem_ref (SET_SRC (pat));
19422
19423 if (GET_CODE (pat) == PARALLEL)
19424 {
19425 int i;
19426
19427 for (i = 0; i < XVECLEN (pat, 0); i++)
19428 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19429 return true;
19430 }
19431
19432 return false;
19433}
19434
19435/* Determine if INSN loads from memory. */
19436
19437static bool
19438is_load_insn (rtx insn)
19439{
19440 if (!insn || !INSN_P (insn))
19441 return false;
19442
19443 if (GET_CODE (insn) == CALL_INSN)
19444 return false;
19445
19446 return is_load_insn1 (PATTERN (insn));
19447}
19448
19449/* Determine if PAT is a PATTERN of a store insn. */
19450
19451static bool
19452is_store_insn1 (rtx pat)
19453{
19454 if (!pat || pat == NULL_RTX)
19455 return false;
19456
19457 if (GET_CODE (pat) == SET)
19458 return is_mem_ref (SET_DEST (pat));
19459
19460 if (GET_CODE (pat) == PARALLEL)
19461 {
19462 int i;
19463
19464 for (i = 0; i < XVECLEN (pat, 0); i++)
19465 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19466 return true;
19467 }
19468
19469 return false;
19470}
19471
19472/* Determine if INSN stores to memory. */
19473
19474static bool
19475is_store_insn (rtx insn)
19476{
19477 if (!insn || !INSN_P (insn))
19478 return false;
19479
19480 return is_store_insn1 (PATTERN (insn));
19481}
19482
e3a0e200
PB
19483/* Return the dest of a store insn. */
19484
19485static rtx
19486get_store_dest (rtx pat)
19487{
19488 gcc_assert (is_store_insn1 (pat));
19489
19490 if (GET_CODE (pat) == SET)
19491 return SET_DEST (pat);
19492 else if (GET_CODE (pat) == PARALLEL)
19493 {
19494 int i;
19495
19496 for (i = 0; i < XVECLEN (pat, 0); i++)
19497 {
19498 rtx inner_pat = XVECEXP (pat, 0, i);
19499 if (GET_CODE (inner_pat) == SET
19500 && is_mem_ref (SET_DEST (inner_pat)))
19501 return inner_pat;
19502 }
19503 }
19504 /* We shouldn't get here, because we should have either a simple
19505 store insn or a store with update which are covered above. */
19506 gcc_unreachable();
19507}
19508
569fa502
DN
19509/* Returns whether the dependence between INSN and NEXT is considered
19510 costly by the given target. */
19511
19512static bool
b198261f 19513rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19514{
b198261f
MK
19515 rtx insn;
19516 rtx next;
19517
aabcd309 19518 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19519 allow all dependent insns in the same group.
569fa502
DN
19520 This is the most aggressive option. */
19521 if (rs6000_sched_costly_dep == no_dep_costly)
19522 return false;
19523
f676971a 19524 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19525 do not allow dependent instructions in the same group.
19526 This is the most conservative option. */
19527 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19528 return true;
569fa502 19529
b198261f
MK
19530 insn = DEP_PRO (dep);
19531 next = DEP_CON (dep);
19532
f676971a
EC
19533 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19534 && is_load_insn (next)
569fa502
DN
19535 && is_store_insn (insn))
19536 /* Prevent load after store in the same group. */
19537 return true;
19538
19539 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19540 && is_load_insn (next)
569fa502 19541 && is_store_insn (insn)
e2f6ff94 19542 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19543 /* Prevent load after store in the same group if it is a true
19544 dependence. */
569fa502 19545 return true;
f676971a
EC
19546
19547 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19548 and will not be scheduled in the same group. */
19549 if (rs6000_sched_costly_dep <= max_dep_latency
19550 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19551 return true;
19552
19553 return false;
19554}
19555
f676971a 19556/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19557 skipping any "non-active" insns - insns that will not actually occupy
19558 an issue slot. Return NULL_RTX if such an insn is not found. */
19559
19560static rtx
19561get_next_active_insn (rtx insn, rtx tail)
19562{
f489aff8 19563 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19564 return NULL_RTX;
19565
f489aff8 19566 while (1)
cbe26ab8 19567 {
f489aff8
AM
19568 insn = NEXT_INSN (insn);
19569 if (insn == NULL_RTX || insn == tail)
19570 return NULL_RTX;
cbe26ab8 19571
f489aff8
AM
19572 if (CALL_P (insn)
19573 || JUMP_P (insn)
19574 || (NONJUMP_INSN_P (insn)
19575 && GET_CODE (PATTERN (insn)) != USE
19576 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19577 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19578 break;
19579 }
19580 return insn;
cbe26ab8
DN
19581}
19582
44cd321e
PS
19583/* We are about to begin issuing insns for this clock cycle. */
19584
19585static int
19586rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19587 rtx *ready ATTRIBUTE_UNUSED,
19588 int *pn_ready ATTRIBUTE_UNUSED,
19589 int clock_var ATTRIBUTE_UNUSED)
19590{
d296e02e
AP
19591 int n_ready = *pn_ready;
19592
44cd321e
PS
19593 if (sched_verbose)
19594 fprintf (dump, "// rs6000_sched_reorder :\n");
19595
d296e02e
AP
19596 /* Reorder the ready list, if the second to last ready insn
19597 is a nonepipeline insn. */
19598 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19599 {
19600 if (is_nonpipeline_insn (ready[n_ready - 1])
19601 && (recog_memoized (ready[n_ready - 2]) > 0))
19602 /* Simply swap first two insns. */
19603 {
19604 rtx tmp = ready[n_ready - 1];
19605 ready[n_ready - 1] = ready[n_ready - 2];
19606 ready[n_ready - 2] = tmp;
19607 }
19608 }
19609
44cd321e
PS
19610 if (rs6000_cpu == PROCESSOR_POWER6)
19611 load_store_pendulum = 0;
19612
19613 return rs6000_issue_rate ();
19614}
19615
19616/* Like rs6000_sched_reorder, but called after issuing each insn. */
19617
19618static int
19619rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19620 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19621{
19622 if (sched_verbose)
19623 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19624
19625 /* For Power6, we need to handle some special cases to try and keep the
19626 store queue from overflowing and triggering expensive flushes.
19627
19628 This code monitors how load and store instructions are being issued
19629 and skews the ready list one way or the other to increase the likelihood
19630 that a desired instruction is issued at the proper time.
19631
19632 A couple of things are done. First, we maintain a "load_store_pendulum"
19633 to track the current state of load/store issue.
19634
19635 - If the pendulum is at zero, then no loads or stores have been
19636 issued in the current cycle so we do nothing.
19637
19638 - If the pendulum is 1, then a single load has been issued in this
19639 cycle and we attempt to locate another load in the ready list to
19640 issue with it.
19641
2f8e468b 19642 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19643 issued in this cycle, so we increase the priority of the first load
19644 in the ready list to increase it's likelihood of being chosen first
19645 in the next cycle.
19646
19647 - If the pendulum is -1, then a single store has been issued in this
19648 cycle and we attempt to locate another store in the ready list to
19649 issue with it, preferring a store to an adjacent memory location to
19650 facilitate store pairing in the store queue.
19651
19652 - If the pendulum is 2, then two loads have already been
19653 issued in this cycle, so we increase the priority of the first store
19654 in the ready list to increase it's likelihood of being chosen first
19655 in the next cycle.
19656
19657 - If the pendulum < -2 or > 2, then do nothing.
19658
19659 Note: This code covers the most common scenarios. There exist non
19660 load/store instructions which make use of the LSU and which
19661 would need to be accounted for to strictly model the behavior
19662 of the machine. Those instructions are currently unaccounted
19663 for to help minimize compile time overhead of this code.
19664 */
19665 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19666 {
19667 int pos;
19668 int i;
19669 rtx tmp;
19670
19671 if (is_store_insn (last_scheduled_insn))
19672 /* Issuing a store, swing the load_store_pendulum to the left */
19673 load_store_pendulum--;
19674 else if (is_load_insn (last_scheduled_insn))
19675 /* Issuing a load, swing the load_store_pendulum to the right */
19676 load_store_pendulum++;
19677 else
19678 return cached_can_issue_more;
19679
19680 /* If the pendulum is balanced, or there is only one instruction on
19681 the ready list, then all is well, so return. */
19682 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19683 return cached_can_issue_more;
19684
19685 if (load_store_pendulum == 1)
19686 {
19687 /* A load has been issued in this cycle. Scan the ready list
19688 for another load to issue with it */
19689 pos = *pn_ready-1;
19690
19691 while (pos >= 0)
19692 {
19693 if (is_load_insn (ready[pos]))
19694 {
19695 /* Found a load. Move it to the head of the ready list,
19696 and adjust it's priority so that it is more likely to
19697 stay there */
19698 tmp = ready[pos];
19699 for (i=pos; i<*pn_ready-1; i++)
19700 ready[i] = ready[i + 1];
19701 ready[*pn_ready-1] = tmp;
e855c69d
AB
19702
19703 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19704 INSN_PRIORITY (tmp)++;
19705 break;
19706 }
19707 pos--;
19708 }
19709 }
19710 else if (load_store_pendulum == -2)
19711 {
19712 /* Two stores have been issued in this cycle. Increase the
19713 priority of the first load in the ready list to favor it for
19714 issuing in the next cycle. */
19715 pos = *pn_ready-1;
19716
19717 while (pos >= 0)
19718 {
19719 if (is_load_insn (ready[pos])
e855c69d
AB
19720 && !sel_sched_p ()
19721 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19722 {
19723 INSN_PRIORITY (ready[pos])++;
19724
19725 /* Adjust the pendulum to account for the fact that a load
19726 was found and increased in priority. This is to prevent
19727 increasing the priority of multiple loads */
19728 load_store_pendulum--;
19729
19730 break;
19731 }
19732 pos--;
19733 }
19734 }
19735 else if (load_store_pendulum == -1)
19736 {
19737 /* A store has been issued in this cycle. Scan the ready list for
19738 another store to issue with it, preferring a store to an adjacent
19739 memory location */
19740 int first_store_pos = -1;
19741
19742 pos = *pn_ready-1;
19743
19744 while (pos >= 0)
19745 {
19746 if (is_store_insn (ready[pos]))
19747 {
19748 /* Maintain the index of the first store found on the
19749 list */
19750 if (first_store_pos == -1)
19751 first_store_pos = pos;
19752
19753 if (is_store_insn (last_scheduled_insn)
19754 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19755 {
19756 /* Found an adjacent store. Move it to the head of the
19757 ready list, and adjust it's priority so that it is
19758 more likely to stay there */
19759 tmp = ready[pos];
19760 for (i=pos; i<*pn_ready-1; i++)
19761 ready[i] = ready[i + 1];
19762 ready[*pn_ready-1] = tmp;
e855c69d
AB
19763
19764 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e 19765 INSN_PRIORITY (tmp)++;
e855c69d 19766
44cd321e
PS
19767 first_store_pos = -1;
19768
19769 break;
19770 };
19771 }
19772 pos--;
19773 }
19774
19775 if (first_store_pos >= 0)
19776 {
19777 /* An adjacent store wasn't found, but a non-adjacent store was,
19778 so move the non-adjacent store to the front of the ready
19779 list, and adjust its priority so that it is more likely to
19780 stay there. */
19781 tmp = ready[first_store_pos];
19782 for (i=first_store_pos; i<*pn_ready-1; i++)
19783 ready[i] = ready[i + 1];
19784 ready[*pn_ready-1] = tmp;
e855c69d 19785 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19786 INSN_PRIORITY (tmp)++;
19787 }
19788 }
19789 else if (load_store_pendulum == 2)
19790 {
19791 /* Two loads have been issued in this cycle. Increase the priority
19792 of the first store in the ready list to favor it for issuing in
19793 the next cycle. */
19794 pos = *pn_ready-1;
19795
19796 while (pos >= 0)
19797 {
19798 if (is_store_insn (ready[pos])
e855c69d
AB
19799 && !sel_sched_p ()
19800 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19801 {
19802 INSN_PRIORITY (ready[pos])++;
19803
19804 /* Adjust the pendulum to account for the fact that a store
19805 was found and increased in priority. This is to prevent
19806 increasing the priority of multiple stores */
19807 load_store_pendulum++;
19808
19809 break;
19810 }
19811 pos--;
19812 }
19813 }
19814 }
19815
19816 return cached_can_issue_more;
19817}
19818
839a4992 19819/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19820 of group WHICH_GROUP.
19821
19822 If WHICH_GROUP == current_group, this function will return true if INSN
19823 causes the termination of the current group (i.e, the dispatch group to
19824 which INSN belongs). This means that INSN will be the last insn in the
19825 group it belongs to.
19826
19827 If WHICH_GROUP == previous_group, this function will return true if INSN
19828 causes the termination of the previous group (i.e, the dispatch group that
19829 precedes the group to which INSN belongs). This means that INSN will be
19830 the first insn in the group it belongs to). */
19831
19832static bool
19833insn_terminates_group_p (rtx insn, enum group_termination which_group)
19834{
44cd321e 19835 bool first, last;
cbe26ab8
DN
19836
19837 if (! insn)
19838 return false;
569fa502 19839
44cd321e
PS
19840 first = insn_must_be_first_in_group (insn);
19841 last = insn_must_be_last_in_group (insn);
cbe26ab8 19842
44cd321e 19843 if (first && last)
cbe26ab8
DN
19844 return true;
19845
19846 if (which_group == current_group)
44cd321e 19847 return last;
cbe26ab8 19848 else if (which_group == previous_group)
44cd321e
PS
19849 return first;
19850
19851 return false;
19852}
19853
19854
19855static bool
19856insn_must_be_first_in_group (rtx insn)
19857{
19858 enum attr_type type;
19859
19860 if (!insn
19861 || insn == NULL_RTX
19862 || GET_CODE (insn) == NOTE
19863 || GET_CODE (PATTERN (insn)) == USE
19864 || GET_CODE (PATTERN (insn)) == CLOBBER)
19865 return false;
19866
19867 switch (rs6000_cpu)
cbe26ab8 19868 {
44cd321e
PS
19869 case PROCESSOR_POWER5:
19870 if (is_cracked_insn (insn))
19871 return true;
19872 case PROCESSOR_POWER4:
19873 if (is_microcoded_insn (insn))
19874 return true;
19875
19876 if (!rs6000_sched_groups)
19877 return false;
19878
19879 type = get_attr_type (insn);
19880
19881 switch (type)
19882 {
19883 case TYPE_MFCR:
19884 case TYPE_MFCRF:
19885 case TYPE_MTCR:
19886 case TYPE_DELAYED_CR:
19887 case TYPE_CR_LOGICAL:
19888 case TYPE_MTJMPR:
19889 case TYPE_MFJMPR:
19890 case TYPE_IDIV:
19891 case TYPE_LDIV:
19892 case TYPE_LOAD_L:
19893 case TYPE_STORE_C:
19894 case TYPE_ISYNC:
19895 case TYPE_SYNC:
19896 return true;
19897 default:
19898 break;
19899 }
19900 break;
19901 case PROCESSOR_POWER6:
19902 type = get_attr_type (insn);
19903
19904 switch (type)
19905 {
19906 case TYPE_INSERT_DWORD:
19907 case TYPE_EXTS:
19908 case TYPE_CNTLZ:
19909 case TYPE_SHIFT:
19910 case TYPE_VAR_SHIFT_ROTATE:
19911 case TYPE_TRAP:
19912 case TYPE_IMUL:
19913 case TYPE_IMUL2:
19914 case TYPE_IMUL3:
19915 case TYPE_LMUL:
19916 case TYPE_IDIV:
19917 case TYPE_INSERT_WORD:
19918 case TYPE_DELAYED_COMPARE:
19919 case TYPE_IMUL_COMPARE:
19920 case TYPE_LMUL_COMPARE:
19921 case TYPE_FPCOMPARE:
19922 case TYPE_MFCR:
19923 case TYPE_MTCR:
19924 case TYPE_MFJMPR:
19925 case TYPE_MTJMPR:
19926 case TYPE_ISYNC:
19927 case TYPE_SYNC:
19928 case TYPE_LOAD_L:
19929 case TYPE_STORE_C:
19930 case TYPE_LOAD_U:
19931 case TYPE_LOAD_UX:
19932 case TYPE_LOAD_EXT_UX:
19933 case TYPE_STORE_U:
19934 case TYPE_STORE_UX:
19935 case TYPE_FPLOAD_U:
19936 case TYPE_FPLOAD_UX:
19937 case TYPE_FPSTORE_U:
19938 case TYPE_FPSTORE_UX:
19939 return true;
19940 default:
19941 break;
19942 }
19943 break;
19944 default:
19945 break;
19946 }
19947
19948 return false;
19949}
19950
19951static bool
19952insn_must_be_last_in_group (rtx insn)
19953{
19954 enum attr_type type;
19955
19956 if (!insn
19957 || insn == NULL_RTX
19958 || GET_CODE (insn) == NOTE
19959 || GET_CODE (PATTERN (insn)) == USE
19960 || GET_CODE (PATTERN (insn)) == CLOBBER)
19961 return false;
19962
19963 switch (rs6000_cpu) {
19964 case PROCESSOR_POWER4:
19965 case PROCESSOR_POWER5:
19966 if (is_microcoded_insn (insn))
19967 return true;
19968
19969 if (is_branch_slot_insn (insn))
19970 return true;
19971
19972 break;
19973 case PROCESSOR_POWER6:
19974 type = get_attr_type (insn);
19975
19976 switch (type)
19977 {
19978 case TYPE_EXTS:
19979 case TYPE_CNTLZ:
19980 case TYPE_SHIFT:
19981 case TYPE_VAR_SHIFT_ROTATE:
19982 case TYPE_TRAP:
19983 case TYPE_IMUL:
19984 case TYPE_IMUL2:
19985 case TYPE_IMUL3:
19986 case TYPE_LMUL:
19987 case TYPE_IDIV:
19988 case TYPE_DELAYED_COMPARE:
19989 case TYPE_IMUL_COMPARE:
19990 case TYPE_LMUL_COMPARE:
19991 case TYPE_FPCOMPARE:
19992 case TYPE_MFCR:
19993 case TYPE_MTCR:
19994 case TYPE_MFJMPR:
19995 case TYPE_MTJMPR:
19996 case TYPE_ISYNC:
19997 case TYPE_SYNC:
19998 case TYPE_LOAD_L:
19999 case TYPE_STORE_C:
20000 return true;
20001 default:
20002 break;
cbe26ab8 20003 }
44cd321e
PS
20004 break;
20005 default:
20006 break;
20007 }
cbe26ab8
DN
20008
20009 return false;
20010}
20011
839a4992 20012/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
20013 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
20014
20015static bool
20016is_costly_group (rtx *group_insns, rtx next_insn)
20017{
20018 int i;
cbe26ab8
DN
20019 int issue_rate = rs6000_issue_rate ();
20020
20021 for (i = 0; i < issue_rate; i++)
20022 {
e2f6ff94
MK
20023 sd_iterator_def sd_it;
20024 dep_t dep;
cbe26ab8 20025 rtx insn = group_insns[i];
b198261f 20026
cbe26ab8 20027 if (!insn)
c4ad648e 20028 continue;
b198261f 20029
e2f6ff94 20030 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 20031 {
b198261f
MK
20032 rtx next = DEP_CON (dep);
20033
20034 if (next == next_insn
20035 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
20036 return true;
c4ad648e 20037 }
cbe26ab8
DN
20038 }
20039
20040 return false;
20041}
20042
f676971a 20043/* Utility of the function redefine_groups.
cbe26ab8
DN
20044 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
20045 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
20046 to keep it "far" (in a separate group) from GROUP_INSNS, following
20047 one of the following schemes, depending on the value of the flag
20048 -minsert_sched_nops = X:
20049 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 20050 in order to force NEXT_INSN into a separate group.
f676971a
EC
20051 (2) X < sched_finish_regroup_exact: insert exactly X nops.
20052 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
20053 insertion (has a group just ended, how many vacant issue slots remain in the
20054 last group, and how many dispatch groups were encountered so far). */
20055
f676971a 20056static int
c4ad648e
AM
20057force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
20058 rtx next_insn, bool *group_end, int can_issue_more,
20059 int *group_count)
cbe26ab8
DN
20060{
20061 rtx nop;
20062 bool force;
20063 int issue_rate = rs6000_issue_rate ();
20064 bool end = *group_end;
20065 int i;
20066
20067 if (next_insn == NULL_RTX)
20068 return can_issue_more;
20069
20070 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
20071 return can_issue_more;
20072
20073 force = is_costly_group (group_insns, next_insn);
20074 if (!force)
20075 return can_issue_more;
20076
20077 if (sched_verbose > 6)
20078 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 20079 *group_count ,can_issue_more);
cbe26ab8
DN
20080
20081 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
20082 {
20083 if (*group_end)
c4ad648e 20084 can_issue_more = 0;
cbe26ab8
DN
20085
20086 /* Since only a branch can be issued in the last issue_slot, it is
20087 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
20088 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
20089 in this case the last nop will start a new group and the branch
20090 will be forced to the new group. */
cbe26ab8 20091 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 20092 can_issue_more--;
cbe26ab8
DN
20093
20094 while (can_issue_more > 0)
c4ad648e 20095 {
9390387d 20096 nop = gen_nop ();
c4ad648e
AM
20097 emit_insn_before (nop, next_insn);
20098 can_issue_more--;
20099 }
cbe26ab8
DN
20100
20101 *group_end = true;
20102 return 0;
f676971a 20103 }
cbe26ab8
DN
20104
20105 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
20106 {
20107 int n_nops = rs6000_sched_insert_nops;
20108
f676971a 20109 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 20110 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 20111 if (can_issue_more == 0)
c4ad648e 20112 can_issue_more = issue_rate;
cbe26ab8
DN
20113 can_issue_more--;
20114 if (can_issue_more == 0)
c4ad648e
AM
20115 {
20116 can_issue_more = issue_rate - 1;
20117 (*group_count)++;
20118 end = true;
20119 for (i = 0; i < issue_rate; i++)
20120 {
20121 group_insns[i] = 0;
20122 }
20123 }
cbe26ab8
DN
20124
20125 while (n_nops > 0)
c4ad648e
AM
20126 {
20127 nop = gen_nop ();
20128 emit_insn_before (nop, next_insn);
20129 if (can_issue_more == issue_rate - 1) /* new group begins */
20130 end = false;
20131 can_issue_more--;
20132 if (can_issue_more == 0)
20133 {
20134 can_issue_more = issue_rate - 1;
20135 (*group_count)++;
20136 end = true;
20137 for (i = 0; i < issue_rate; i++)
20138 {
20139 group_insns[i] = 0;
20140 }
20141 }
20142 n_nops--;
20143 }
cbe26ab8
DN
20144
20145 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 20146 can_issue_more++;
cbe26ab8 20147
c4ad648e
AM
20148 /* Is next_insn going to start a new group? */
20149 *group_end
20150 = (end
cbe26ab8
DN
20151 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20152 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20153 || (can_issue_more < issue_rate &&
c4ad648e 20154 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20155 if (*group_end && end)
c4ad648e 20156 (*group_count)--;
cbe26ab8
DN
20157
20158 if (sched_verbose > 6)
c4ad648e
AM
20159 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
20160 *group_count, can_issue_more);
f676971a
EC
20161 return can_issue_more;
20162 }
cbe26ab8
DN
20163
20164 return can_issue_more;
20165}
20166
20167/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 20168 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
20169 form in practice. It tries to achieve this synchronization by forcing the
20170 estimated processor grouping on the compiler (as opposed to the function
20171 'pad_goups' which tries to force the scheduler's grouping on the processor).
20172
20173 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
20174 examines the (estimated) dispatch groups that will be formed by the processor
20175 dispatcher. It marks these group boundaries to reflect the estimated
20176 processor grouping, overriding the grouping that the scheduler had marked.
20177 Depending on the value of the flag '-minsert-sched-nops' this function can
20178 force certain insns into separate groups or force a certain distance between
20179 them by inserting nops, for example, if there exists a "costly dependence"
20180 between the insns.
20181
20182 The function estimates the group boundaries that the processor will form as
0fa2e4df 20183 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
20184 each insn. A subsequent insn will start a new group if one of the following
20185 4 cases applies:
20186 - no more vacant issue slots remain in the current dispatch group.
20187 - only the last issue slot, which is the branch slot, is vacant, but the next
20188 insn is not a branch.
20189 - only the last 2 or less issue slots, including the branch slot, are vacant,
20190 which means that a cracked insn (which occupies two issue slots) can't be
20191 issued in this group.
f676971a 20192 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
20193 start a new group. */
20194
20195static int
20196redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20197{
20198 rtx insn, next_insn;
20199 int issue_rate;
20200 int can_issue_more;
20201 int slot, i;
20202 bool group_end;
20203 int group_count = 0;
20204 rtx *group_insns;
20205
20206 /* Initialize. */
20207 issue_rate = rs6000_issue_rate ();
5ead67f6 20208 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 20209 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
20210 {
20211 group_insns[i] = 0;
20212 }
20213 can_issue_more = issue_rate;
20214 slot = 0;
20215 insn = get_next_active_insn (prev_head_insn, tail);
20216 group_end = false;
20217
20218 while (insn != NULL_RTX)
20219 {
20220 slot = (issue_rate - can_issue_more);
20221 group_insns[slot] = insn;
20222 can_issue_more =
c4ad648e 20223 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 20224 if (insn_terminates_group_p (insn, current_group))
c4ad648e 20225 can_issue_more = 0;
cbe26ab8
DN
20226
20227 next_insn = get_next_active_insn (insn, tail);
20228 if (next_insn == NULL_RTX)
c4ad648e 20229 return group_count + 1;
cbe26ab8 20230
c4ad648e
AM
20231 /* Is next_insn going to start a new group? */
20232 group_end
20233 = (can_issue_more == 0
20234 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20235 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20236 || (can_issue_more < issue_rate &&
20237 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20238
f676971a 20239 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
20240 next_insn, &group_end, can_issue_more,
20241 &group_count);
cbe26ab8
DN
20242
20243 if (group_end)
c4ad648e
AM
20244 {
20245 group_count++;
20246 can_issue_more = 0;
20247 for (i = 0; i < issue_rate; i++)
20248 {
20249 group_insns[i] = 0;
20250 }
20251 }
cbe26ab8
DN
20252
20253 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 20254 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 20255 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 20256 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
20257
20258 insn = next_insn;
20259 if (can_issue_more == 0)
c4ad648e
AM
20260 can_issue_more = issue_rate;
20261 } /* while */
cbe26ab8
DN
20262
20263 return group_count;
20264}
20265
20266/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20267 dispatch group boundaries that the scheduler had marked. Pad with nops
20268 any dispatch groups which have vacant issue slots, in order to force the
20269 scheduler's grouping on the processor dispatcher. The function
20270 returns the number of dispatch groups found. */
20271
20272static int
20273pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20274{
20275 rtx insn, next_insn;
20276 rtx nop;
20277 int issue_rate;
20278 int can_issue_more;
20279 int group_end;
20280 int group_count = 0;
20281
20282 /* Initialize issue_rate. */
20283 issue_rate = rs6000_issue_rate ();
20284 can_issue_more = issue_rate;
20285
20286 insn = get_next_active_insn (prev_head_insn, tail);
20287 next_insn = get_next_active_insn (insn, tail);
20288
20289 while (insn != NULL_RTX)
20290 {
20291 can_issue_more =
20292 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20293
20294 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20295
20296 if (next_insn == NULL_RTX)
c4ad648e 20297 break;
cbe26ab8
DN
20298
20299 if (group_end)
c4ad648e
AM
20300 {
20301 /* If the scheduler had marked group termination at this location
e855c69d 20302 (between insn and next_insn), and neither insn nor next_insn will
c4ad648e
AM
20303 force group termination, pad the group with nops to force group
20304 termination. */
20305 if (can_issue_more
20306 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20307 && !insn_terminates_group_p (insn, current_group)
20308 && !insn_terminates_group_p (next_insn, previous_group))
20309 {
9390387d 20310 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20311 can_issue_more--;
20312
20313 while (can_issue_more)
20314 {
20315 nop = gen_nop ();
20316 emit_insn_before (nop, next_insn);
20317 can_issue_more--;
20318 }
20319 }
20320
20321 can_issue_more = issue_rate;
20322 group_count++;
20323 }
cbe26ab8
DN
20324
20325 insn = next_insn;
20326 next_insn = get_next_active_insn (insn, tail);
20327 }
20328
20329 return group_count;
20330}
20331
44cd321e
PS
20332/* We're beginning a new block. Initialize data structures as necessary. */
20333
20334static void
20335rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20336 int sched_verbose ATTRIBUTE_UNUSED,
20337 int max_ready ATTRIBUTE_UNUSED)
982afe02 20338{
44cd321e
PS
20339 last_scheduled_insn = NULL_RTX;
20340 load_store_pendulum = 0;
20341}
20342
cbe26ab8
DN
20343/* The following function is called at the end of scheduling BB.
20344 After reload, it inserts nops at insn group bundling. */
20345
20346static void
38f391a5 20347rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20348{
20349 int n_groups;
20350
20351 if (sched_verbose)
20352 fprintf (dump, "=== Finishing schedule.\n");
20353
ec507f2d 20354 if (reload_completed && rs6000_sched_groups)
cbe26ab8 20355 {
e855c69d
AB
20356 /* Do not run sched_finish hook when selective scheduling enabled. */
20357 if (sel_sched_p ())
20358 return;
20359
cbe26ab8 20360 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20361 return;
cbe26ab8
DN
20362
20363 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20364 n_groups = pad_groups (dump, sched_verbose,
20365 current_sched_info->prev_head,
20366 current_sched_info->next_tail);
cbe26ab8 20367 else
c4ad648e
AM
20368 n_groups = redefine_groups (dump, sched_verbose,
20369 current_sched_info->prev_head,
20370 current_sched_info->next_tail);
cbe26ab8
DN
20371
20372 if (sched_verbose >= 6)
20373 {
20374 fprintf (dump, "ngroups = %d\n", n_groups);
20375 print_rtl (dump, current_sched_info->prev_head);
20376 fprintf (dump, "Done finish_sched\n");
20377 }
20378 }
20379}
e855c69d
AB
20380
20381struct _rs6000_sched_context
20382{
20383 short cached_can_issue_more;
20384 rtx last_scheduled_insn;
20385 int load_store_pendulum;
20386};
20387
20388typedef struct _rs6000_sched_context rs6000_sched_context_def;
20389typedef rs6000_sched_context_def *rs6000_sched_context_t;
20390
20391/* Allocate store for new scheduling context. */
20392static void *
20393rs6000_alloc_sched_context (void)
20394{
20395 return xmalloc (sizeof (rs6000_sched_context_def));
20396}
20397
20398/* If CLEAN_P is true then initializes _SC with clean data,
20399 and from the global context otherwise. */
20400static void
20401rs6000_init_sched_context (void *_sc, bool clean_p)
20402{
20403 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20404
20405 if (clean_p)
20406 {
20407 sc->cached_can_issue_more = 0;
20408 sc->last_scheduled_insn = NULL_RTX;
20409 sc->load_store_pendulum = 0;
20410 }
20411 else
20412 {
20413 sc->cached_can_issue_more = cached_can_issue_more;
20414 sc->last_scheduled_insn = last_scheduled_insn;
20415 sc->load_store_pendulum = load_store_pendulum;
20416 }
20417}
20418
20419/* Sets the global scheduling context to the one pointed to by _SC. */
20420static void
20421rs6000_set_sched_context (void *_sc)
20422{
20423 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20424
20425 gcc_assert (sc != NULL);
20426
20427 cached_can_issue_more = sc->cached_can_issue_more;
20428 last_scheduled_insn = sc->last_scheduled_insn;
20429 load_store_pendulum = sc->load_store_pendulum;
20430}
20431
20432/* Free _SC. */
20433static void
20434rs6000_free_sched_context (void *_sc)
20435{
20436 gcc_assert (_sc != NULL);
20437
20438 free (_sc);
20439}
20440
b6c9286a 20441\f
b6c9286a
MM
20442/* Length in units of the trampoline for entering a nested function. */
20443
20444int
863d938c 20445rs6000_trampoline_size (void)
b6c9286a
MM
20446{
20447 int ret = 0;
20448
20449 switch (DEFAULT_ABI)
20450 {
20451 default:
37409796 20452 gcc_unreachable ();
b6c9286a
MM
20453
20454 case ABI_AIX:
8f802bfb 20455 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20456 break;
20457
4dabc42d 20458 case ABI_DARWIN:
b6c9286a 20459 case ABI_V4:
03a7e1a5 20460 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20461 break;
b6c9286a
MM
20462 }
20463
20464 return ret;
20465}
20466
20467/* Emit RTL insns to initialize the variable parts of a trampoline.
20468 FNADDR is an RTX for the address of the function's pure code.
20469 CXT is an RTX for the static chain value for the function. */
20470
20471void
a2369ed3 20472rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20473{
8bd04c56 20474 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20475 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20476
20477 switch (DEFAULT_ABI)
20478 {
20479 default:
37409796 20480 gcc_unreachable ();
b6c9286a 20481
8bd04c56 20482/* Macros to shorten the code expansions below. */
9613eaff 20483#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20484#define MEM_PLUS(addr,offset) \
9613eaff 20485 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20486
b6c9286a
MM
20487 /* Under AIX, just build the 3 word function descriptor */
20488 case ABI_AIX:
8bd04c56 20489 {
9613eaff
SH
20490 rtx fn_reg = gen_reg_rtx (Pmode);
20491 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20492 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20493 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20494 emit_move_insn (MEM_DEREF (addr), fn_reg);
20495 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20496 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20497 }
b6c9286a
MM
20498 break;
20499
4dabc42d
TC
20500 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20501 case ABI_DARWIN:
b6c9286a 20502 case ABI_V4:
9613eaff 20503 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 20504 FALSE, VOIDmode, 4,
9613eaff 20505 addr, Pmode,
eaf1bcf1 20506 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20507 fnaddr, Pmode,
20508 ctx_reg, Pmode);
b6c9286a 20509 break;
b6c9286a
MM
20510 }
20511
20512 return;
20513}
7509c759
MM
20514
20515\f
91d231cb 20516/* Table of valid machine attributes. */
a4f6c312 20517
91d231cb 20518const struct attribute_spec rs6000_attribute_table[] =
7509c759 20519{
91d231cb 20520 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20521 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20522 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20523 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20524 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20525 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20526#ifdef SUBTARGET_ATTRIBUTE_TABLE
20527 SUBTARGET_ATTRIBUTE_TABLE,
20528#endif
a5c76ee6 20529 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20530};
7509c759 20531
8bb418a3
ZL
20532/* Handle the "altivec" attribute. The attribute may have
20533 arguments as follows:
f676971a 20534
8bb418a3
ZL
20535 __attribute__((altivec(vector__)))
20536 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20537 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20538
20539 and may appear more than once (e.g., 'vector bool char') in a
20540 given declaration. */
20541
20542static tree
f90ac3f0
UP
20543rs6000_handle_altivec_attribute (tree *node,
20544 tree name ATTRIBUTE_UNUSED,
20545 tree args,
8bb418a3
ZL
20546 int flags ATTRIBUTE_UNUSED,
20547 bool *no_add_attrs)
20548{
20549 tree type = *node, result = NULL_TREE;
20550 enum machine_mode mode;
20551 int unsigned_p;
20552 char altivec_type
20553 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20554 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20555 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20556 : '?');
8bb418a3
ZL
20557
20558 while (POINTER_TYPE_P (type)
20559 || TREE_CODE (type) == FUNCTION_TYPE
20560 || TREE_CODE (type) == METHOD_TYPE
20561 || TREE_CODE (type) == ARRAY_TYPE)
20562 type = TREE_TYPE (type);
20563
20564 mode = TYPE_MODE (type);
20565
f90ac3f0
UP
20566 /* Check for invalid AltiVec type qualifiers. */
20567 if (type == long_unsigned_type_node || type == long_integer_type_node)
20568 {
20569 if (TARGET_64BIT)
20570 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20571 else if (rs6000_warn_altivec_long)
d4ee4d25 20572 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20573 }
20574 else if (type == long_long_unsigned_type_node
20575 || type == long_long_integer_type_node)
20576 error ("use of %<long long%> in AltiVec types is invalid");
20577 else if (type == double_type_node)
20578 error ("use of %<double%> in AltiVec types is invalid");
20579 else if (type == long_double_type_node)
20580 error ("use of %<long double%> in AltiVec types is invalid");
20581 else if (type == boolean_type_node)
20582 error ("use of boolean types in AltiVec types is invalid");
20583 else if (TREE_CODE (type) == COMPLEX_TYPE)
20584 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20585 else if (DECIMAL_FLOAT_MODE_P (mode))
20586 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20587
20588 switch (altivec_type)
20589 {
20590 case 'v':
8df83eae 20591 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20592 switch (mode)
20593 {
c4ad648e
AM
20594 case SImode:
20595 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20596 break;
20597 case HImode:
20598 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20599 break;
20600 case QImode:
20601 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20602 break;
20603 case SFmode: result = V4SF_type_node; break;
20604 /* If the user says 'vector int bool', we may be handed the 'bool'
20605 attribute _before_ the 'vector' attribute, and so select the
20606 proper type in the 'b' case below. */
20607 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20608 result = type;
20609 default: break;
8bb418a3
ZL
20610 }
20611 break;
20612 case 'b':
20613 switch (mode)
20614 {
c4ad648e
AM
20615 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20616 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20617 case QImode: case V16QImode: result = bool_V16QI_type_node;
20618 default: break;
8bb418a3
ZL
20619 }
20620 break;
20621 case 'p':
20622 switch (mode)
20623 {
c4ad648e
AM
20624 case V8HImode: result = pixel_V8HI_type_node;
20625 default: break;
8bb418a3
ZL
20626 }
20627 default: break;
20628 }
20629
4f538d42
UW
20630 /* Propagate qualifiers attached to the element type
20631 onto the vector type. */
20632 if (result && result != type && TYPE_QUALS (type))
20633 result = build_qualified_type (result, TYPE_QUALS (type));
7958a2a6 20634
8bb418a3
ZL
20635 *no_add_attrs = true; /* No need to hang on to the attribute. */
20636
f90ac3f0 20637 if (result)
5dc11954 20638 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20639
20640 return NULL_TREE;
20641}
20642
f18eca82
ZL
20643/* AltiVec defines four built-in scalar types that serve as vector
20644 elements; we must teach the compiler how to mangle them. */
20645
20646static const char *
3101faab 20647rs6000_mangle_type (const_tree type)
f18eca82 20648{
608063c3
JB
20649 type = TYPE_MAIN_VARIANT (type);
20650
20651 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20652 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20653 return NULL;
20654
f18eca82
ZL
20655 if (type == bool_char_type_node) return "U6__boolc";
20656 if (type == bool_short_type_node) return "U6__bools";
20657 if (type == pixel_type_node) return "u7__pixel";
20658 if (type == bool_int_type_node) return "U6__booli";
20659
337bde91
DE
20660 /* Mangle IBM extended float long double as `g' (__float128) on
20661 powerpc*-linux where long-double-64 previously was the default. */
20662 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20663 && TARGET_ELF
20664 && TARGET_LONG_DOUBLE_128
20665 && !TARGET_IEEEQUAD)
20666 return "g";
20667
f18eca82
ZL
20668 /* For all other types, use normal C++ mangling. */
20669 return NULL;
20670}
20671
a5c76ee6
ZW
20672/* Handle a "longcall" or "shortcall" attribute; arguments as in
20673 struct attribute_spec.handler. */
a4f6c312 20674
91d231cb 20675static tree
f676971a
EC
20676rs6000_handle_longcall_attribute (tree *node, tree name,
20677 tree args ATTRIBUTE_UNUSED,
20678 int flags ATTRIBUTE_UNUSED,
a2369ed3 20679 bool *no_add_attrs)
91d231cb
JM
20680{
20681 if (TREE_CODE (*node) != FUNCTION_TYPE
20682 && TREE_CODE (*node) != FIELD_DECL
20683 && TREE_CODE (*node) != TYPE_DECL)
20684 {
5c498b10 20685 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
20686 IDENTIFIER_POINTER (name));
20687 *no_add_attrs = true;
20688 }
6a4cee5f 20689
91d231cb 20690 return NULL_TREE;
7509c759
MM
20691}
20692
a5c76ee6
ZW
20693/* Set longcall attributes on all functions declared when
20694 rs6000_default_long_calls is true. */
20695static void
a2369ed3 20696rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20697{
20698 if (rs6000_default_long_calls
20699 && (TREE_CODE (type) == FUNCTION_TYPE
20700 || TREE_CODE (type) == METHOD_TYPE))
20701 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20702 NULL_TREE,
20703 TYPE_ATTRIBUTES (type));
16d6f994
EC
20704
20705#if TARGET_MACHO
20706 darwin_set_default_type_attributes (type);
20707#endif
a5c76ee6
ZW
20708}
20709
3cb999d8
DE
20710/* Return a reference suitable for calling a function with the
20711 longcall attribute. */
a4f6c312 20712
9390387d 20713rtx
a2369ed3 20714rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20715{
d330fd93 20716 const char *call_name;
6a4cee5f
MM
20717 tree node;
20718
20719 if (GET_CODE (call_ref) != SYMBOL_REF)
20720 return call_ref;
20721
20722 /* System V adds '.' to the internal name, so skip them. */
20723 call_name = XSTR (call_ref, 0);
20724 if (*call_name == '.')
20725 {
20726 while (*call_name == '.')
20727 call_name++;
20728
20729 node = get_identifier (call_name);
39403d82 20730 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20731 }
20732
20733 return force_reg (Pmode, call_ref);
20734}
7509c759 20735\f
77ccdfed
EC
20736#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20737#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20738#endif
20739
20740/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20741 struct attribute_spec.handler. */
20742static tree
20743rs6000_handle_struct_attribute (tree *node, tree name,
20744 tree args ATTRIBUTE_UNUSED,
20745 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20746{
20747 tree *type = NULL;
20748 if (DECL_P (*node))
20749 {
20750 if (TREE_CODE (*node) == TYPE_DECL)
20751 type = &TREE_TYPE (*node);
20752 }
20753 else
20754 type = node;
20755
20756 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20757 || TREE_CODE (*type) == UNION_TYPE)))
20758 {
20759 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
20760 *no_add_attrs = true;
20761 }
20762
20763 else if ((is_attribute_p ("ms_struct", name)
20764 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20765 || ((is_attribute_p ("gcc_struct", name)
20766 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20767 {
20768 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
20769 IDENTIFIER_POINTER (name));
20770 *no_add_attrs = true;
20771 }
20772
20773 return NULL_TREE;
20774}
20775
20776static bool
3101faab 20777rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20778{
20779 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20780 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20781 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20782}
20783\f
b64a1b53
RH
20784#ifdef USING_ELFOS_H
20785
d6b5193b 20786/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20787
d6b5193b
RS
20788static void
20789rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20790{
20791 if (DEFAULT_ABI == ABI_AIX
20792 && TARGET_MINIMAL_TOC
20793 && !TARGET_RELOCATABLE)
20794 {
20795 if (!toc_initialized)
20796 {
20797 toc_initialized = 1;
20798 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20799 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20800 fprintf (asm_out_file, "\t.tc ");
20801 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20802 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20803 fprintf (asm_out_file, "\n");
20804
20805 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20806 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20807 fprintf (asm_out_file, " = .+32768\n");
20808 }
20809 else
20810 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20811 }
20812 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20813 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20814 else
20815 {
20816 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20817 if (!toc_initialized)
20818 {
20819 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20820 fprintf (asm_out_file, " = .+32768\n");
20821 toc_initialized = 1;
20822 }
20823 }
20824}
20825
20826/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20827
b64a1b53 20828static void
d6b5193b
RS
20829rs6000_elf_asm_init_sections (void)
20830{
20831 toc_section
20832 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20833
20834 sdata2_section
20835 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20836 SDATA2_SECTION_ASM_OP);
20837}
20838
20839/* Implement TARGET_SELECT_RTX_SECTION. */
20840
20841static section *
f676971a 20842rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20843 unsigned HOST_WIDE_INT align)
7509c759 20844{
a9098fd0 20845 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20846 return toc_section;
7509c759 20847 else
d6b5193b 20848 return default_elf_select_rtx_section (mode, x, align);
7509c759 20849}
d9407988 20850\f
d1908feb
JJ
20851/* For a SYMBOL_REF, set generic flags and then perform some
20852 target-specific processing.
20853
d1908feb
JJ
20854 When the AIX ABI is requested on a non-AIX system, replace the
20855 function name with the real name (with a leading .) rather than the
20856 function descriptor name. This saves a lot of overriding code to
20857 read the prefixes. */
d9407988 20858
fb49053f 20859static void
a2369ed3 20860rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20861{
d1908feb 20862 default_encode_section_info (decl, rtl, first);
b2003250 20863
d1908feb
JJ
20864 if (first
20865 && TREE_CODE (decl) == FUNCTION_DECL
20866 && !TARGET_AIX
20867 && DEFAULT_ABI == ABI_AIX)
d9407988 20868 {
c6a2438a 20869 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20870 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20871 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20872 str[0] = '.';
20873 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20874 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20875 }
d9407988
MM
20876}
20877
21d9bb3f 20878static inline bool
0a2aaacc 20879compare_section_name (const char *section, const char *templ)
21d9bb3f
PB
20880{
20881 int len;
20882
0a2aaacc
KG
20883 len = strlen (templ);
20884 return (strncmp (section, templ, len) == 0
21d9bb3f
PB
20885 && (section[len] == 0 || section[len] == '.'));
20886}
20887
c1b7d95a 20888bool
3101faab 20889rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20890{
20891 if (rs6000_sdata == SDATA_NONE)
20892 return false;
20893
7482ad25
AF
20894 /* We want to merge strings, so we never consider them small data. */
20895 if (TREE_CODE (decl) == STRING_CST)
20896 return false;
20897
20898 /* Functions are never in the small data area. */
20899 if (TREE_CODE (decl) == FUNCTION_DECL)
20900 return false;
20901
0e5dbd9b
DE
20902 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20903 {
20904 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20905 if (compare_section_name (section, ".sdata")
20906 || compare_section_name (section, ".sdata2")
20907 || compare_section_name (section, ".gnu.linkonce.s")
20908 || compare_section_name (section, ".sbss")
20909 || compare_section_name (section, ".sbss2")
20910 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20911 || strcmp (section, ".PPC.EMB.sdata0") == 0
20912 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20913 return true;
20914 }
20915 else
20916 {
20917 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20918
20919 if (size > 0
307b599c 20920 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20921 /* If it's not public, and we're not going to reference it there,
20922 there's no need to put it in the small data section. */
0e5dbd9b
DE
20923 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20924 return true;
20925 }
20926
20927 return false;
20928}
20929
b91da81f 20930#endif /* USING_ELFOS_H */
aacd3885
RS
20931\f
20932/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20933
aacd3885 20934static bool
3101faab 20935rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20936{
20937 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20938}
a6c2a102 20939\f
000034eb 20940/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20941 ADDR can be effectively incremented by incrementing REG.
20942
20943 r0 is special and we must not select it as an address
20944 register by this routine since our caller will try to
20945 increment the returned register via an "la" instruction. */
000034eb 20946
9390387d 20947rtx
a2369ed3 20948find_addr_reg (rtx addr)
000034eb
DE
20949{
20950 while (GET_CODE (addr) == PLUS)
20951 {
02441cd6
JL
20952 if (GET_CODE (XEXP (addr, 0)) == REG
20953 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20954 addr = XEXP (addr, 0);
02441cd6
JL
20955 else if (GET_CODE (XEXP (addr, 1)) == REG
20956 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20957 addr = XEXP (addr, 1);
20958 else if (CONSTANT_P (XEXP (addr, 0)))
20959 addr = XEXP (addr, 1);
20960 else if (CONSTANT_P (XEXP (addr, 1)))
20961 addr = XEXP (addr, 0);
20962 else
37409796 20963 gcc_unreachable ();
000034eb 20964 }
37409796
NS
20965 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20966 return addr;
000034eb
DE
20967}
20968
a6c2a102 20969void
a2369ed3 20970rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20971{
20972 fatal_insn ("bad address", op);
20973}
c8023011 20974
ee890fe2
SS
20975#if TARGET_MACHO
20976
efdba735 20977static tree branch_island_list = 0;
ee890fe2 20978
efdba735
SH
20979/* Remember to generate a branch island for far calls to the given
20980 function. */
ee890fe2 20981
f676971a 20982static void
c4ad648e
AM
20983add_compiler_branch_island (tree label_name, tree function_name,
20984 int line_number)
ee890fe2 20985{
efdba735 20986 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20987 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20988 TREE_CHAIN (branch_island) = branch_island_list;
20989 branch_island_list = branch_island;
ee890fe2
SS
20990}
20991
efdba735
SH
20992#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20993#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20994#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20995 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20996
efdba735
SH
20997/* Generate far-jump branch islands for everything on the
20998 branch_island_list. Invoked immediately after the last instruction
20999 of the epilogue has been emitted; the branch-islands must be
21000 appended to, and contiguous with, the function body. Mach-O stubs
21001 are generated in machopic_output_stub(). */
ee890fe2 21002
efdba735
SH
21003static void
21004macho_branch_islands (void)
21005{
21006 char tmp_buf[512];
21007 tree branch_island;
21008
21009 for (branch_island = branch_island_list;
21010 branch_island;
21011 branch_island = TREE_CHAIN (branch_island))
21012 {
21013 const char *label =
21014 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
21015 const char *name =
11abc112 21016 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
21017 char name_buf[512];
21018 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
21019 if (name[0] == '*' || name[0] == '&')
21020 strcpy (name_buf, name+1);
21021 else
21022 {
21023 name_buf[0] = '_';
21024 strcpy (name_buf+1, name);
21025 }
21026 strcpy (tmp_buf, "\n");
21027 strcat (tmp_buf, label);
ee890fe2 21028#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21029 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21030 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21031#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
21032 if (flag_pic)
21033 {
21034 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
21035 strcat (tmp_buf, label);
21036 strcat (tmp_buf, "_pic\n");
21037 strcat (tmp_buf, label);
21038 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 21039
efdba735
SH
21040 strcat (tmp_buf, "\taddis r11,r11,ha16(");
21041 strcat (tmp_buf, name_buf);
21042 strcat (tmp_buf, " - ");
21043 strcat (tmp_buf, label);
21044 strcat (tmp_buf, "_pic)\n");
f676971a 21045
efdba735 21046 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 21047
efdba735
SH
21048 strcat (tmp_buf, "\taddi r12,r11,lo16(");
21049 strcat (tmp_buf, name_buf);
21050 strcat (tmp_buf, " - ");
21051 strcat (tmp_buf, label);
21052 strcat (tmp_buf, "_pic)\n");
f676971a 21053
efdba735
SH
21054 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
21055 }
21056 else
21057 {
21058 strcat (tmp_buf, ":\nlis r12,hi16(");
21059 strcat (tmp_buf, name_buf);
21060 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
21061 strcat (tmp_buf, name_buf);
21062 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
21063 }
21064 output_asm_insn (tmp_buf, 0);
ee890fe2 21065#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21066 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21067 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21068#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 21069 }
ee890fe2 21070
efdba735 21071 branch_island_list = 0;
ee890fe2
SS
21072}
21073
21074/* NO_PREVIOUS_DEF checks in the link list whether the function name is
21075 already there or not. */
21076
efdba735 21077static int
a2369ed3 21078no_previous_def (tree function_name)
ee890fe2 21079{
efdba735
SH
21080 tree branch_island;
21081 for (branch_island = branch_island_list;
21082 branch_island;
21083 branch_island = TREE_CHAIN (branch_island))
21084 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
21085 return 0;
21086 return 1;
21087}
21088
21089/* GET_PREV_LABEL gets the label name from the previous definition of
21090 the function. */
21091
efdba735 21092static tree
a2369ed3 21093get_prev_label (tree function_name)
ee890fe2 21094{
efdba735
SH
21095 tree branch_island;
21096 for (branch_island = branch_island_list;
21097 branch_island;
21098 branch_island = TREE_CHAIN (branch_island))
21099 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21100 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
21101 return 0;
21102}
21103
75b1b789
MS
21104#ifndef DARWIN_LINKER_GENERATES_ISLANDS
21105#define DARWIN_LINKER_GENERATES_ISLANDS 0
21106#endif
21107
21108/* KEXTs still need branch islands. */
21109#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
21110 || flag_mkernel || flag_apple_kext)
21111
ee890fe2 21112/* INSN is either a function call or a millicode call. It may have an
f676971a 21113 unconditional jump in its delay slot.
ee890fe2
SS
21114
21115 CALL_DEST is the routine we are calling. */
21116
21117char *
c4ad648e
AM
21118output_call (rtx insn, rtx *operands, int dest_operand_number,
21119 int cookie_operand_number)
ee890fe2
SS
21120{
21121 static char buf[256];
75b1b789
MS
21122 if (DARWIN_GENERATE_ISLANDS
21123 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 21124 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
21125 {
21126 tree labelname;
efdba735 21127 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 21128
ee890fe2
SS
21129 if (no_previous_def (funname))
21130 {
ee890fe2
SS
21131 rtx label_rtx = gen_label_rtx ();
21132 char *label_buf, temp_buf[256];
21133 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
21134 CODE_LABEL_NUMBER (label_rtx));
21135 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
21136 labelname = get_identifier (label_buf);
a38e7aa5 21137 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
21138 }
21139 else
21140 labelname = get_prev_label (funname);
21141
efdba735
SH
21142 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
21143 instruction will reach 'foo', otherwise link as 'bl L42'".
21144 "L42" should be a 'branch island', that will do a far jump to
21145 'foo'. Branch islands are generated in
21146 macho_branch_islands(). */
ee890fe2 21147 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 21148 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
21149 }
21150 else
efdba735
SH
21151 sprintf (buf, "bl %%z%d", dest_operand_number);
21152 return buf;
ee890fe2
SS
21153}
21154
ee890fe2
SS
21155/* Generate PIC and indirect symbol stubs. */
21156
21157void
a2369ed3 21158machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
21159{
21160 unsigned int length;
a4f6c312
SS
21161 char *symbol_name, *lazy_ptr_name;
21162 char *local_label_0;
ee890fe2
SS
21163 static int label = 0;
21164
df56a27f 21165 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 21166 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 21167
ee890fe2 21168
ee890fe2 21169 length = strlen (symb);
5ead67f6 21170 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21171 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
21172
5ead67f6 21173 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21174 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
21175
ee890fe2 21176 if (flag_pic == 2)
56c779bc 21177 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 21178 else
56c779bc 21179 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
21180
21181 if (flag_pic == 2)
21182 {
d974312d
DJ
21183 fprintf (file, "\t.align 5\n");
21184
21185 fprintf (file, "%s:\n", stub);
21186 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21187
876455fa 21188 label++;
5ead67f6 21189 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 21190 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 21191
ee890fe2
SS
21192 fprintf (file, "\tmflr r0\n");
21193 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
21194 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
21195 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
21196 lazy_ptr_name, local_label_0);
21197 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
21198 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
21199 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
21200 lazy_ptr_name, local_label_0);
21201 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
21202 fprintf (file, "\tbctr\n");
21203 }
21204 else
d974312d
DJ
21205 {
21206 fprintf (file, "\t.align 4\n");
21207
21208 fprintf (file, "%s:\n", stub);
21209 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21210
21211 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
21212 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21213 (TARGET_64BIT ? "ldu" : "lwzu"),
21214 lazy_ptr_name);
d974312d
DJ
21215 fprintf (file, "\tmtctr r12\n");
21216 fprintf (file, "\tbctr\n");
21217 }
f676971a 21218
56c779bc 21219 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
21220 fprintf (file, "%s:\n", lazy_ptr_name);
21221 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
21222 fprintf (file, "%sdyld_stub_binding_helper\n",
21223 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
21224}
21225
21226/* Legitimize PIC addresses. If the address is already
21227 position-independent, we return ORIG. Newly generated
21228 position-independent addresses go into a reg. This is REG if non
21229 zero, otherwise we allocate register(s) as necessary. */
21230
4fbbe694 21231#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
21232
21233rtx
f676971a 21234rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 21235 rtx reg)
ee890fe2
SS
21236{
21237 rtx base, offset;
21238
21239 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21240 reg = gen_reg_rtx (Pmode);
21241
21242 if (GET_CODE (orig) == CONST)
21243 {
37409796
NS
21244 rtx reg_temp;
21245
ee890fe2
SS
21246 if (GET_CODE (XEXP (orig, 0)) == PLUS
21247 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21248 return orig;
21249
37409796 21250 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 21251
37409796
NS
21252 /* Use a different reg for the intermediate value, as
21253 it will be marked UNCHANGING. */
b3a13419 21254 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
21255 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21256 Pmode, reg_temp);
21257 offset =
21258 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21259 Pmode, reg);
bb8df8a6 21260
ee890fe2
SS
21261 if (GET_CODE (offset) == CONST_INT)
21262 {
21263 if (SMALL_INT (offset))
ed8908e7 21264 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
21265 else if (! reload_in_progress && ! reload_completed)
21266 offset = force_reg (Pmode, offset);
21267 else
c859cda6
DJ
21268 {
21269 rtx mem = force_const_mem (Pmode, orig);
21270 return machopic_legitimize_pic_address (mem, Pmode, reg);
21271 }
ee890fe2 21272 }
f1c25d3b 21273 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
21274 }
21275
21276 /* Fall back on generic machopic code. */
21277 return machopic_legitimize_pic_address (orig, mode, reg);
21278}
21279
c4e18b1c
GK
21280/* Output a .machine directive for the Darwin assembler, and call
21281 the generic start_file routine. */
21282
21283static void
21284rs6000_darwin_file_start (void)
21285{
94ff898d 21286 static const struct
c4e18b1c
GK
21287 {
21288 const char *arg;
21289 const char *name;
21290 int if_set;
21291 } mapping[] = {
55dbfb48 21292 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
21293 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21294 { "power4", "ppc970", 0 },
21295 { "G5", "ppc970", 0 },
21296 { "7450", "ppc7450", 0 },
21297 { "7400", "ppc7400", MASK_ALTIVEC },
21298 { "G4", "ppc7400", 0 },
21299 { "750", "ppc750", 0 },
21300 { "740", "ppc750", 0 },
21301 { "G3", "ppc750", 0 },
21302 { "604e", "ppc604e", 0 },
21303 { "604", "ppc604", 0 },
21304 { "603e", "ppc603", 0 },
21305 { "603", "ppc603", 0 },
21306 { "601", "ppc601", 0 },
21307 { NULL, "ppc", 0 } };
21308 const char *cpu_id = "";
21309 size_t i;
94ff898d 21310
9390387d 21311 rs6000_file_start ();
192d0f89 21312 darwin_file_start ();
c4e18b1c
GK
21313
21314 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21315 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21316 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21317 && rs6000_select[i].string[0] != '\0')
21318 cpu_id = rs6000_select[i].string;
21319
21320 /* Look through the mapping array. Pick the first name that either
21321 matches the argument, has a bit set in IF_SET that is also set
21322 in the target flags, or has a NULL name. */
21323
21324 i = 0;
21325 while (mapping[i].arg != NULL
21326 && strcmp (mapping[i].arg, cpu_id) != 0
21327 && (mapping[i].if_set & target_flags) == 0)
21328 i++;
21329
21330 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21331}
21332
ee890fe2 21333#endif /* TARGET_MACHO */
7c262518
RH
21334
21335#if TARGET_ELF
9b580a0b
RH
21336static int
21337rs6000_elf_reloc_rw_mask (void)
7c262518 21338{
9b580a0b
RH
21339 if (flag_pic)
21340 return 3;
21341 else if (DEFAULT_ABI == ABI_AIX)
21342 return 2;
21343 else
21344 return 0;
7c262518 21345}
d9f6800d
RH
21346
21347/* Record an element in the table of global constructors. SYMBOL is
21348 a SYMBOL_REF of the function to be called; PRIORITY is a number
21349 between 0 and MAX_INIT_PRIORITY.
21350
21351 This differs from default_named_section_asm_out_constructor in
21352 that we have special handling for -mrelocatable. */
21353
21354static void
a2369ed3 21355rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
21356{
21357 const char *section = ".ctors";
21358 char buf[16];
21359
21360 if (priority != DEFAULT_INIT_PRIORITY)
21361 {
21362 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21363 /* Invert the numbering so the linker puts us in the proper
21364 order; constructors are run from right to left, and the
21365 linker sorts in increasing order. */
21366 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21367 section = buf;
21368 }
21369
d6b5193b 21370 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21371 assemble_align (POINTER_SIZE);
d9f6800d
RH
21372
21373 if (TARGET_RELOCATABLE)
21374 {
21375 fputs ("\t.long (", asm_out_file);
21376 output_addr_const (asm_out_file, symbol);
21377 fputs (")@fixup\n", asm_out_file);
21378 }
21379 else
c8af3574 21380 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21381}
21382
21383static void
a2369ed3 21384rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21385{
21386 const char *section = ".dtors";
21387 char buf[16];
21388
21389 if (priority != DEFAULT_INIT_PRIORITY)
21390 {
21391 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21392 /* Invert the numbering so the linker puts us in the proper
21393 order; constructors are run from right to left, and the
21394 linker sorts in increasing order. */
21395 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21396 section = buf;
21397 }
21398
d6b5193b 21399 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21400 assemble_align (POINTER_SIZE);
d9f6800d
RH
21401
21402 if (TARGET_RELOCATABLE)
21403 {
21404 fputs ("\t.long (", asm_out_file);
21405 output_addr_const (asm_out_file, symbol);
21406 fputs (")@fixup\n", asm_out_file);
21407 }
21408 else
c8af3574 21409 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21410}
9739c90c
JJ
21411
21412void
a2369ed3 21413rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21414{
21415 if (TARGET_64BIT)
21416 {
21417 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21418 ASM_OUTPUT_LABEL (file, name);
21419 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21420 rs6000_output_function_entry (file, name);
21421 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21422 if (DOT_SYMBOLS)
9739c90c 21423 {
85b776df 21424 fputs ("\t.size\t", file);
9739c90c 21425 assemble_name (file, name);
85b776df
AM
21426 fputs (",24\n\t.type\t.", file);
21427 assemble_name (file, name);
21428 fputs (",@function\n", file);
21429 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21430 {
21431 fputs ("\t.globl\t.", file);
21432 assemble_name (file, name);
21433 putc ('\n', file);
21434 }
9739c90c 21435 }
85b776df
AM
21436 else
21437 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21438 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21439 rs6000_output_function_entry (file, name);
21440 fputs (":\n", file);
9739c90c
JJ
21441 return;
21442 }
21443
21444 if (TARGET_RELOCATABLE
7f970b70 21445 && !TARGET_SECURE_PLT
e3b5732b 21446 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21447 && uses_TOC ())
9739c90c
JJ
21448 {
21449 char buf[256];
21450
21451 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21452
21453 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21454 fprintf (file, "\t.long ");
21455 assemble_name (file, buf);
21456 putc ('-', file);
21457 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21458 assemble_name (file, buf);
21459 putc ('\n', file);
21460 }
21461
21462 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21463 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21464
21465 if (DEFAULT_ABI == ABI_AIX)
21466 {
21467 const char *desc_name, *orig_name;
21468
21469 orig_name = (*targetm.strip_name_encoding) (name);
21470 desc_name = orig_name;
21471 while (*desc_name == '.')
21472 desc_name++;
21473
21474 if (TREE_PUBLIC (decl))
21475 fprintf (file, "\t.globl %s\n", desc_name);
21476
21477 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21478 fprintf (file, "%s:\n", desc_name);
21479 fprintf (file, "\t.long %s\n", orig_name);
21480 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21481 if (DEFAULT_ABI == ABI_AIX)
21482 fputs ("\t.long 0\n", file);
21483 fprintf (file, "\t.previous\n");
21484 }
21485 ASM_OUTPUT_LABEL (file, name);
21486}
1334b570
AM
21487
21488static void
21489rs6000_elf_end_indicate_exec_stack (void)
21490{
21491 if (TARGET_32BIT)
21492 file_end_indicate_exec_stack ();
21493}
7c262518
RH
21494#endif
21495
cbaaba19 21496#if TARGET_XCOFF
0d5817b2
DE
21497static void
21498rs6000_xcoff_asm_output_anchor (rtx symbol)
21499{
21500 char buffer[100];
21501
21502 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21503 SYMBOL_REF_BLOCK_OFFSET (symbol));
21504 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21505}
21506
7c262518 21507static void
a2369ed3 21508rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21509{
21510 fputs (GLOBAL_ASM_OP, stream);
21511 RS6000_OUTPUT_BASENAME (stream, name);
21512 putc ('\n', stream);
21513}
21514
d6b5193b
RS
21515/* A get_unnamed_decl callback, used for read-only sections. PTR
21516 points to the section string variable. */
21517
21518static void
21519rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21520{
890f9edf
OH
21521 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21522 *(const char *const *) directive,
21523 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21524}
21525
21526/* Likewise for read-write sections. */
21527
21528static void
21529rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21530{
890f9edf
OH
21531 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21532 *(const char *const *) directive,
21533 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21534}
21535
21536/* A get_unnamed_section callback, used for switching to toc_section. */
21537
21538static void
21539rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21540{
21541 if (TARGET_MINIMAL_TOC)
21542 {
21543 /* toc_section is always selected at least once from
21544 rs6000_xcoff_file_start, so this is guaranteed to
21545 always be defined once and only once in each file. */
21546 if (!toc_initialized)
21547 {
21548 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21549 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21550 toc_initialized = 1;
21551 }
21552 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21553 (TARGET_32BIT ? "" : ",3"));
21554 }
21555 else
21556 fputs ("\t.toc\n", asm_out_file);
21557}
21558
21559/* Implement TARGET_ASM_INIT_SECTIONS. */
21560
21561static void
21562rs6000_xcoff_asm_init_sections (void)
21563{
21564 read_only_data_section
21565 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21566 &xcoff_read_only_section_name);
21567
21568 private_data_section
21569 = get_unnamed_section (SECTION_WRITE,
21570 rs6000_xcoff_output_readwrite_section_asm_op,
21571 &xcoff_private_data_section_name);
21572
21573 read_only_private_data_section
21574 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21575 &xcoff_private_data_section_name);
21576
21577 toc_section
21578 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21579
21580 readonly_data_section = read_only_data_section;
21581 exception_section = data_section;
21582}
21583
9b580a0b
RH
21584static int
21585rs6000_xcoff_reloc_rw_mask (void)
21586{
21587 return 3;
21588}
21589
b275d088 21590static void
c18a5b6c
MM
21591rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21592 tree decl ATTRIBUTE_UNUSED)
7c262518 21593{
0e5dbd9b
DE
21594 int smclass;
21595 static const char * const suffix[3] = { "PR", "RO", "RW" };
21596
21597 if (flags & SECTION_CODE)
21598 smclass = 0;
21599 else if (flags & SECTION_WRITE)
21600 smclass = 2;
21601 else
21602 smclass = 1;
21603
5b5198f7 21604 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21605 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21606 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21607}
ae46c4e0 21608
d6b5193b 21609static section *
f676971a 21610rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21611 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21612{
9b580a0b 21613 if (decl_readonly_section (decl, reloc))
ae46c4e0 21614 {
0e5dbd9b 21615 if (TREE_PUBLIC (decl))
d6b5193b 21616 return read_only_data_section;
ae46c4e0 21617 else
d6b5193b 21618 return read_only_private_data_section;
ae46c4e0
RH
21619 }
21620 else
21621 {
0e5dbd9b 21622 if (TREE_PUBLIC (decl))
d6b5193b 21623 return data_section;
ae46c4e0 21624 else
d6b5193b 21625 return private_data_section;
ae46c4e0
RH
21626 }
21627}
21628
21629static void
a2369ed3 21630rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21631{
21632 const char *name;
ae46c4e0 21633
5b5198f7
DE
21634 /* Use select_section for private and uninitialized data. */
21635 if (!TREE_PUBLIC (decl)
21636 || DECL_COMMON (decl)
0e5dbd9b
DE
21637 || DECL_INITIAL (decl) == NULL_TREE
21638 || DECL_INITIAL (decl) == error_mark_node
21639 || (flag_zero_initialized_in_bss
21640 && initializer_zerop (DECL_INITIAL (decl))))
21641 return;
21642
21643 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21644 name = (*targetm.strip_name_encoding) (name);
21645 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21646}
b64a1b53 21647
fb49053f
RH
21648/* Select section for constant in constant pool.
21649
21650 On RS/6000, all constants are in the private read-only data area.
21651 However, if this is being placed in the TOC it must be output as a
21652 toc entry. */
21653
d6b5193b 21654static section *
f676971a 21655rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21656 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21657{
21658 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21659 return toc_section;
b64a1b53 21660 else
d6b5193b 21661 return read_only_private_data_section;
b64a1b53 21662}
772c5265
RH
21663
21664/* Remove any trailing [DS] or the like from the symbol name. */
21665
21666static const char *
a2369ed3 21667rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21668{
21669 size_t len;
21670 if (*name == '*')
21671 name++;
21672 len = strlen (name);
21673 if (name[len - 1] == ']')
21674 return ggc_alloc_string (name, len - 4);
21675 else
21676 return name;
21677}
21678
5add3202
DE
21679/* Section attributes. AIX is always PIC. */
21680
21681static unsigned int
a2369ed3 21682rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21683{
5b5198f7 21684 unsigned int align;
9b580a0b 21685 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21686
21687 /* Align to at least UNIT size. */
21688 if (flags & SECTION_CODE)
21689 align = MIN_UNITS_PER_WORD;
21690 else
21691 /* Increase alignment of large objects if not already stricter. */
21692 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21693 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21694 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21695
21696 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21697}
a5fe455b 21698
1bc7c5b6
ZW
21699/* Output at beginning of assembler file.
21700
21701 Initialize the section names for the RS/6000 at this point.
21702
21703 Specify filename, including full path, to assembler.
21704
21705 We want to go into the TOC section so at least one .toc will be emitted.
21706 Also, in order to output proper .bs/.es pairs, we need at least one static
21707 [RW] section emitted.
21708
21709 Finally, declare mcount when profiling to make the assembler happy. */
21710
21711static void
863d938c 21712rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21713{
21714 rs6000_gen_section_name (&xcoff_bss_section_name,
21715 main_input_filename, ".bss_");
21716 rs6000_gen_section_name (&xcoff_private_data_section_name,
21717 main_input_filename, ".rw_");
21718 rs6000_gen_section_name (&xcoff_read_only_section_name,
21719 main_input_filename, ".ro_");
21720
21721 fputs ("\t.file\t", asm_out_file);
21722 output_quoted_string (asm_out_file, main_input_filename);
21723 fputc ('\n', asm_out_file);
1bc7c5b6 21724 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21725 switch_to_section (private_data_section);
21726 switch_to_section (text_section);
1bc7c5b6
ZW
21727 if (profile_flag)
21728 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21729 rs6000_file_start ();
21730}
21731
a5fe455b
ZW
21732/* Output at end of assembler file.
21733 On the RS/6000, referencing data should automatically pull in text. */
21734
21735static void
863d938c 21736rs6000_xcoff_file_end (void)
a5fe455b 21737{
d6b5193b 21738 switch_to_section (text_section);
a5fe455b 21739 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21740 switch_to_section (data_section);
a5fe455b
ZW
21741 fputs (TARGET_32BIT
21742 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21743 asm_out_file);
21744}
f1384257 21745#endif /* TARGET_XCOFF */
0e5dbd9b 21746
3c50106f
RH
21747/* Compute a (partial) cost for rtx X. Return true if the complete
21748 cost has been computed, and false if subexpressions should be
21749 scanned. In either case, *TOTAL contains the cost result. */
21750
21751static bool
f40751dd
JH
21752rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
21753 bool speed)
3c50106f 21754{
f0517163
RS
21755 enum machine_mode mode = GET_MODE (x);
21756
3c50106f
RH
21757 switch (code)
21758 {
30a555d9 21759 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21760 case CONST_INT:
066cd967
DE
21761 if (((outer_code == SET
21762 || outer_code == PLUS
21763 || outer_code == MINUS)
279bb624
DE
21764 && (satisfies_constraint_I (x)
21765 || satisfies_constraint_L (x)))
066cd967 21766 || (outer_code == AND
279bb624
DE
21767 && (satisfies_constraint_K (x)
21768 || (mode == SImode
21769 ? satisfies_constraint_L (x)
21770 : satisfies_constraint_J (x))
1990cd79
AM
21771 || mask_operand (x, mode)
21772 || (mode == DImode
21773 && mask64_operand (x, DImode))))
22e54023 21774 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21775 && (satisfies_constraint_K (x)
21776 || (mode == SImode
21777 ? satisfies_constraint_L (x)
21778 : satisfies_constraint_J (x))))
066cd967
DE
21779 || outer_code == ASHIFT
21780 || outer_code == ASHIFTRT
21781 || outer_code == LSHIFTRT
21782 || outer_code == ROTATE
21783 || outer_code == ROTATERT
d5861a7a 21784 || outer_code == ZERO_EXTRACT
066cd967 21785 || (outer_code == MULT
279bb624 21786 && satisfies_constraint_I (x))
22e54023
DE
21787 || ((outer_code == DIV || outer_code == UDIV
21788 || outer_code == MOD || outer_code == UMOD)
21789 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21790 || (outer_code == COMPARE
279bb624
DE
21791 && (satisfies_constraint_I (x)
21792 || satisfies_constraint_K (x)))
22e54023 21793 || (outer_code == EQ
279bb624
DE
21794 && (satisfies_constraint_I (x)
21795 || satisfies_constraint_K (x)
21796 || (mode == SImode
21797 ? satisfies_constraint_L (x)
21798 : satisfies_constraint_J (x))))
22e54023 21799 || (outer_code == GTU
279bb624 21800 && satisfies_constraint_I (x))
22e54023 21801 || (outer_code == LTU
279bb624 21802 && satisfies_constraint_P (x)))
066cd967
DE
21803 {
21804 *total = 0;
21805 return true;
21806 }
21807 else if ((outer_code == PLUS
4ae234b0 21808 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21809 || (outer_code == MINUS
4ae234b0 21810 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21811 || ((outer_code == SET
21812 || outer_code == IOR
21813 || outer_code == XOR)
21814 && (INTVAL (x)
21815 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21816 {
21817 *total = COSTS_N_INSNS (1);
21818 return true;
21819 }
21820 /* FALLTHRU */
21821
21822 case CONST_DOUBLE:
f6fe3a22 21823 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21824 {
f6fe3a22
DE
21825 if ((outer_code == IOR || outer_code == XOR)
21826 && CONST_DOUBLE_HIGH (x) == 0
21827 && (CONST_DOUBLE_LOW (x)
21828 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21829 {
21830 *total = 0;
21831 return true;
21832 }
21833 else if ((outer_code == AND && and64_2_operand (x, DImode))
21834 || ((outer_code == SET
21835 || outer_code == IOR
21836 || outer_code == XOR)
21837 && CONST_DOUBLE_HIGH (x) == 0))
21838 {
21839 *total = COSTS_N_INSNS (1);
21840 return true;
21841 }
066cd967
DE
21842 }
21843 /* FALLTHRU */
21844
3c50106f 21845 case CONST:
066cd967 21846 case HIGH:
3c50106f 21847 case SYMBOL_REF:
066cd967
DE
21848 case MEM:
21849 /* When optimizing for size, MEM should be slightly more expensive
21850 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21851 L1 cache latency is about two instructions. */
f40751dd 21852 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21853 return true;
21854
30a555d9
DE
21855 case LABEL_REF:
21856 *total = 0;
21857 return true;
21858
3c50106f 21859 case PLUS:
f0517163 21860 if (mode == DFmode)
066cd967
DE
21861 {
21862 if (GET_CODE (XEXP (x, 0)) == MULT)
21863 {
21864 /* FNMA accounted in outer NEG. */
21865 if (outer_code == NEG)
21866 *total = rs6000_cost->dmul - rs6000_cost->fp;
21867 else
21868 *total = rs6000_cost->dmul;
21869 }
21870 else
21871 *total = rs6000_cost->fp;
21872 }
f0517163 21873 else if (mode == SFmode)
066cd967
DE
21874 {
21875 /* FNMA accounted in outer NEG. */
21876 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21877 *total = 0;
21878 else
21879 *total = rs6000_cost->fp;
21880 }
f0517163 21881 else
066cd967
DE
21882 *total = COSTS_N_INSNS (1);
21883 return false;
3c50106f 21884
52190329 21885 case MINUS:
f0517163 21886 if (mode == DFmode)
066cd967 21887 {
762c919f
JM
21888 if (GET_CODE (XEXP (x, 0)) == MULT
21889 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21890 {
21891 /* FNMA accounted in outer NEG. */
21892 if (outer_code == NEG)
762c919f 21893 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21894 else
21895 *total = rs6000_cost->dmul;
21896 }
21897 else
21898 *total = rs6000_cost->fp;
21899 }
f0517163 21900 else if (mode == SFmode)
066cd967
DE
21901 {
21902 /* FNMA accounted in outer NEG. */
21903 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21904 *total = 0;
21905 else
21906 *total = rs6000_cost->fp;
21907 }
f0517163 21908 else
c4ad648e 21909 *total = COSTS_N_INSNS (1);
066cd967 21910 return false;
3c50106f
RH
21911
21912 case MULT:
c9dbf840 21913 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21914 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21915 {
8b897cfa
RS
21916 if (INTVAL (XEXP (x, 1)) >= -256
21917 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21918 *total = rs6000_cost->mulsi_const9;
8b897cfa 21919 else
06a67bdd 21920 *total = rs6000_cost->mulsi_const;
3c50106f 21921 }
066cd967
DE
21922 /* FMA accounted in outer PLUS/MINUS. */
21923 else if ((mode == DFmode || mode == SFmode)
21924 && (outer_code == PLUS || outer_code == MINUS))
21925 *total = 0;
f0517163 21926 else if (mode == DFmode)
06a67bdd 21927 *total = rs6000_cost->dmul;
f0517163 21928 else if (mode == SFmode)
06a67bdd 21929 *total = rs6000_cost->fp;
f0517163 21930 else if (mode == DImode)
06a67bdd 21931 *total = rs6000_cost->muldi;
8b897cfa 21932 else
06a67bdd 21933 *total = rs6000_cost->mulsi;
066cd967 21934 return false;
3c50106f
RH
21935
21936 case DIV:
21937 case MOD:
f0517163
RS
21938 if (FLOAT_MODE_P (mode))
21939 {
06a67bdd
RS
21940 *total = mode == DFmode ? rs6000_cost->ddiv
21941 : rs6000_cost->sdiv;
066cd967 21942 return false;
f0517163 21943 }
5efb1046 21944 /* FALLTHRU */
3c50106f
RH
21945
21946 case UDIV:
21947 case UMOD:
627b6fe2
DJ
21948 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21949 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21950 {
21951 if (code == DIV || code == MOD)
21952 /* Shift, addze */
21953 *total = COSTS_N_INSNS (2);
21954 else
21955 /* Shift */
21956 *total = COSTS_N_INSNS (1);
21957 }
c4ad648e 21958 else
627b6fe2
DJ
21959 {
21960 if (GET_MODE (XEXP (x, 1)) == DImode)
21961 *total = rs6000_cost->divdi;
21962 else
21963 *total = rs6000_cost->divsi;
21964 }
21965 /* Add in shift and subtract for MOD. */
21966 if (code == MOD || code == UMOD)
21967 *total += COSTS_N_INSNS (2);
066cd967 21968 return false;
3c50106f 21969
32f56aad 21970 case CTZ:
3c50106f
RH
21971 case FFS:
21972 *total = COSTS_N_INSNS (4);
066cd967 21973 return false;
3c50106f 21974
32f56aad
DE
21975 case POPCOUNT:
21976 *total = COSTS_N_INSNS (6);
21977 return false;
21978
06a67bdd 21979 case NOT:
066cd967
DE
21980 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21981 {
21982 *total = 0;
21983 return false;
21984 }
21985 /* FALLTHRU */
21986
21987 case AND:
32f56aad 21988 case CLZ:
066cd967
DE
21989 case IOR:
21990 case XOR:
d5861a7a
DE
21991 case ZERO_EXTRACT:
21992 *total = COSTS_N_INSNS (1);
21993 return false;
21994
066cd967
DE
21995 case ASHIFT:
21996 case ASHIFTRT:
21997 case LSHIFTRT:
21998 case ROTATE:
21999 case ROTATERT:
d5861a7a 22000 /* Handle mul_highpart. */
066cd967
DE
22001 if (outer_code == TRUNCATE
22002 && GET_CODE (XEXP (x, 0)) == MULT)
22003 {
22004 if (mode == DImode)
22005 *total = rs6000_cost->muldi;
22006 else
22007 *total = rs6000_cost->mulsi;
22008 return true;
22009 }
d5861a7a
DE
22010 else if (outer_code == AND)
22011 *total = 0;
22012 else
22013 *total = COSTS_N_INSNS (1);
22014 return false;
22015
22016 case SIGN_EXTEND:
22017 case ZERO_EXTEND:
22018 if (GET_CODE (XEXP (x, 0)) == MEM)
22019 *total = 0;
22020 else
22021 *total = COSTS_N_INSNS (1);
066cd967 22022 return false;
06a67bdd 22023
066cd967
DE
22024 case COMPARE:
22025 case NEG:
22026 case ABS:
22027 if (!FLOAT_MODE_P (mode))
22028 {
22029 *total = COSTS_N_INSNS (1);
22030 return false;
22031 }
22032 /* FALLTHRU */
22033
22034 case FLOAT:
22035 case UNSIGNED_FLOAT:
22036 case FIX:
22037 case UNSIGNED_FIX:
06a67bdd
RS
22038 case FLOAT_TRUNCATE:
22039 *total = rs6000_cost->fp;
066cd967 22040 return false;
06a67bdd 22041
a2af5043
DJ
22042 case FLOAT_EXTEND:
22043 if (mode == DFmode)
22044 *total = 0;
22045 else
22046 *total = rs6000_cost->fp;
22047 return false;
22048
06a67bdd
RS
22049 case UNSPEC:
22050 switch (XINT (x, 1))
22051 {
22052 case UNSPEC_FRSP:
22053 *total = rs6000_cost->fp;
22054 return true;
22055
22056 default:
22057 break;
22058 }
22059 break;
22060
22061 case CALL:
22062 case IF_THEN_ELSE:
f40751dd 22063 if (!speed)
06a67bdd
RS
22064 {
22065 *total = COSTS_N_INSNS (1);
22066 return true;
22067 }
066cd967
DE
22068 else if (FLOAT_MODE_P (mode)
22069 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
22070 {
22071 *total = rs6000_cost->fp;
22072 return false;
22073 }
06a67bdd
RS
22074 break;
22075
c0600ecd
DE
22076 case EQ:
22077 case GTU:
22078 case LTU:
22e54023
DE
22079 /* Carry bit requires mode == Pmode.
22080 NEG or PLUS already counted so only add one. */
22081 if (mode == Pmode
22082 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 22083 {
22e54023
DE
22084 *total = COSTS_N_INSNS (1);
22085 return true;
22086 }
22087 if (outer_code == SET)
22088 {
22089 if (XEXP (x, 1) == const0_rtx)
c0600ecd 22090 {
22e54023 22091 *total = COSTS_N_INSNS (2);
c0600ecd 22092 return true;
c0600ecd 22093 }
22e54023
DE
22094 else if (mode == Pmode)
22095 {
22096 *total = COSTS_N_INSNS (3);
22097 return false;
22098 }
22099 }
22100 /* FALLTHRU */
22101
22102 case GT:
22103 case LT:
22104 case UNORDERED:
22105 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
22106 {
22107 *total = COSTS_N_INSNS (2);
22108 return true;
c0600ecd 22109 }
22e54023
DE
22110 /* CC COMPARE. */
22111 if (outer_code == COMPARE)
22112 {
22113 *total = 0;
22114 return true;
22115 }
22116 break;
c0600ecd 22117
3c50106f 22118 default:
06a67bdd 22119 break;
3c50106f 22120 }
06a67bdd
RS
22121
22122 return false;
3c50106f
RH
22123}
22124
34bb030a
DE
22125/* A C expression returning the cost of moving data from a register of class
22126 CLASS1 to one of CLASS2. */
22127
22128int
f676971a 22129rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 22130 enum reg_class from, enum reg_class to)
34bb030a
DE
22131{
22132 /* Moves from/to GENERAL_REGS. */
22133 if (reg_classes_intersect_p (to, GENERAL_REGS)
22134 || reg_classes_intersect_p (from, GENERAL_REGS))
22135 {
22136 if (! reg_classes_intersect_p (to, GENERAL_REGS))
22137 from = to;
22138
22139 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
22140 return (rs6000_memory_move_cost (mode, from, 0)
22141 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
22142
c4ad648e
AM
22143 /* It's more expensive to move CR_REGS than CR0_REGS because of the
22144 shift. */
34bb030a
DE
22145 else if (from == CR_REGS)
22146 return 4;
22147
aafc759a
PH
22148 /* Power6 has slower LR/CTR moves so make them more expensive than
22149 memory in order to bias spills to memory .*/
22150 else if (rs6000_cpu == PROCESSOR_POWER6
22151 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
22152 return 6 * hard_regno_nregs[0][mode];
22153
34bb030a 22154 else
c4ad648e 22155 /* A move will cost one instruction per GPR moved. */
c8b622ff 22156 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
22157 }
22158
c4ad648e 22159 /* Moving between two similar registers is just one instruction. */
34bb030a 22160 else if (reg_classes_intersect_p (to, from))
7393f7f8 22161 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 22162
c4ad648e 22163 /* Everything else has to go through GENERAL_REGS. */
34bb030a 22164 else
f676971a 22165 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
22166 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
22167}
22168
22169/* A C expressions returning the cost of moving data of MODE from a register to
22170 or from memory. */
22171
22172int
0a2aaacc 22173rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
a2369ed3 22174 int in ATTRIBUTE_UNUSED)
34bb030a 22175{
0a2aaacc 22176 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
c8b622ff 22177 return 4 * hard_regno_nregs[0][mode];
0a2aaacc 22178 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
c8b622ff 22179 return 4 * hard_regno_nregs[32][mode];
0a2aaacc 22180 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
c8b622ff 22181 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a 22182 else
0a2aaacc 22183 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
34bb030a
DE
22184}
22185
9c78b944
DE
22186/* Returns a code for a target-specific builtin that implements
22187 reciprocal of the function, or NULL_TREE if not available. */
22188
22189static tree
22190rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
22191 bool sqrt ATTRIBUTE_UNUSED)
22192{
22193 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
22194 && flag_finite_math_only && !flag_trapping_math
22195 && flag_unsafe_math_optimizations))
22196 return NULL_TREE;
22197
22198 if (md_fn)
22199 return NULL_TREE;
22200 else
22201 switch (fn)
22202 {
22203 case BUILT_IN_SQRTF:
22204 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
22205
22206 default:
22207 return NULL_TREE;
22208 }
22209}
22210
ef765ea9
DE
22211/* Newton-Raphson approximation of single-precision floating point divide n/d.
22212 Assumes no trapping math and finite arguments. */
22213
22214void
9c78b944 22215rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22216{
22217 rtx x0, e0, e1, y1, u0, v0, one;
22218
22219 x0 = gen_reg_rtx (SFmode);
22220 e0 = gen_reg_rtx (SFmode);
22221 e1 = gen_reg_rtx (SFmode);
22222 y1 = gen_reg_rtx (SFmode);
22223 u0 = gen_reg_rtx (SFmode);
22224 v0 = gen_reg_rtx (SFmode);
22225 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22226
22227 /* x0 = 1./d estimate */
22228 emit_insn (gen_rtx_SET (VOIDmode, x0,
22229 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22230 UNSPEC_FRES)));
22231 /* e0 = 1. - d * x0 */
22232 emit_insn (gen_rtx_SET (VOIDmode, e0,
22233 gen_rtx_MINUS (SFmode, one,
22234 gen_rtx_MULT (SFmode, d, x0))));
22235 /* e1 = e0 + e0 * e0 */
22236 emit_insn (gen_rtx_SET (VOIDmode, e1,
22237 gen_rtx_PLUS (SFmode,
22238 gen_rtx_MULT (SFmode, e0, e0), e0)));
22239 /* y1 = x0 + e1 * x0 */
22240 emit_insn (gen_rtx_SET (VOIDmode, y1,
22241 gen_rtx_PLUS (SFmode,
22242 gen_rtx_MULT (SFmode, e1, x0), x0)));
22243 /* u0 = n * y1 */
22244 emit_insn (gen_rtx_SET (VOIDmode, u0,
22245 gen_rtx_MULT (SFmode, n, y1)));
22246 /* v0 = n - d * u0 */
22247 emit_insn (gen_rtx_SET (VOIDmode, v0,
22248 gen_rtx_MINUS (SFmode, n,
22249 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
22250 /* dst = u0 + v0 * y1 */
22251 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22252 gen_rtx_PLUS (SFmode,
22253 gen_rtx_MULT (SFmode, v0, y1), u0)));
22254}
22255
22256/* Newton-Raphson approximation of double-precision floating point divide n/d.
22257 Assumes no trapping math and finite arguments. */
22258
22259void
9c78b944 22260rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22261{
22262 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22263
22264 x0 = gen_reg_rtx (DFmode);
22265 e0 = gen_reg_rtx (DFmode);
22266 e1 = gen_reg_rtx (DFmode);
22267 e2 = gen_reg_rtx (DFmode);
22268 y1 = gen_reg_rtx (DFmode);
22269 y2 = gen_reg_rtx (DFmode);
22270 y3 = gen_reg_rtx (DFmode);
22271 u0 = gen_reg_rtx (DFmode);
22272 v0 = gen_reg_rtx (DFmode);
22273 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22274
22275 /* x0 = 1./d estimate */
22276 emit_insn (gen_rtx_SET (VOIDmode, x0,
22277 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22278 UNSPEC_FRES)));
22279 /* e0 = 1. - d * x0 */
22280 emit_insn (gen_rtx_SET (VOIDmode, e0,
22281 gen_rtx_MINUS (DFmode, one,
22282 gen_rtx_MULT (SFmode, d, x0))));
22283 /* y1 = x0 + e0 * x0 */
22284 emit_insn (gen_rtx_SET (VOIDmode, y1,
22285 gen_rtx_PLUS (DFmode,
22286 gen_rtx_MULT (DFmode, e0, x0), x0)));
22287 /* e1 = e0 * e0 */
22288 emit_insn (gen_rtx_SET (VOIDmode, e1,
22289 gen_rtx_MULT (DFmode, e0, e0)));
22290 /* y2 = y1 + e1 * y1 */
22291 emit_insn (gen_rtx_SET (VOIDmode, y2,
22292 gen_rtx_PLUS (DFmode,
22293 gen_rtx_MULT (DFmode, e1, y1), y1)));
22294 /* e2 = e1 * e1 */
22295 emit_insn (gen_rtx_SET (VOIDmode, e2,
22296 gen_rtx_MULT (DFmode, e1, e1)));
22297 /* y3 = y2 + e2 * y2 */
22298 emit_insn (gen_rtx_SET (VOIDmode, y3,
22299 gen_rtx_PLUS (DFmode,
22300 gen_rtx_MULT (DFmode, e2, y2), y2)));
22301 /* u0 = n * y3 */
22302 emit_insn (gen_rtx_SET (VOIDmode, u0,
22303 gen_rtx_MULT (DFmode, n, y3)));
22304 /* v0 = n - d * u0 */
22305 emit_insn (gen_rtx_SET (VOIDmode, v0,
22306 gen_rtx_MINUS (DFmode, n,
22307 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
22308 /* dst = u0 + v0 * y3 */
22309 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22310 gen_rtx_PLUS (DFmode,
22311 gen_rtx_MULT (DFmode, v0, y3), u0)));
22312}
22313
565ef4ba 22314
9c78b944
DE
22315/* Newton-Raphson approximation of single-precision floating point rsqrt.
22316 Assumes no trapping math and finite arguments. */
22317
22318void
22319rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22320{
22321 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22322 half, one, halfthree, c1, cond, label;
22323
22324 x0 = gen_reg_rtx (SFmode);
22325 x1 = gen_reg_rtx (SFmode);
22326 x2 = gen_reg_rtx (SFmode);
22327 y1 = gen_reg_rtx (SFmode);
22328 u0 = gen_reg_rtx (SFmode);
22329 u1 = gen_reg_rtx (SFmode);
22330 u2 = gen_reg_rtx (SFmode);
22331 v0 = gen_reg_rtx (SFmode);
22332 v1 = gen_reg_rtx (SFmode);
22333 v2 = gen_reg_rtx (SFmode);
22334 t0 = gen_reg_rtx (SFmode);
22335 halfthree = gen_reg_rtx (SFmode);
22336 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22337 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22338
22339 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22340 emit_insn (gen_rtx_SET (VOIDmode, t0,
22341 gen_rtx_MULT (SFmode, src, src)));
22342
22343 emit_insn (gen_rtx_SET (VOIDmode, cond,
22344 gen_rtx_COMPARE (CCFPmode, t0, src)));
22345 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22346 emit_unlikely_jump (c1, label);
22347
22348 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22349 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22350
22351 /* halfthree = 1.5 = 1.0 + 0.5 */
22352 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22353 gen_rtx_PLUS (SFmode, one, half)));
22354
22355 /* x0 = rsqrt estimate */
22356 emit_insn (gen_rtx_SET (VOIDmode, x0,
22357 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22358 UNSPEC_RSQRT)));
22359
22360 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22361 emit_insn (gen_rtx_SET (VOIDmode, y1,
22362 gen_rtx_MINUS (SFmode,
22363 gen_rtx_MULT (SFmode, src, halfthree),
22364 src)));
22365
22366 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22367 emit_insn (gen_rtx_SET (VOIDmode, u0,
22368 gen_rtx_MULT (SFmode, x0, x0)));
22369 emit_insn (gen_rtx_SET (VOIDmode, v0,
22370 gen_rtx_MINUS (SFmode,
22371 halfthree,
22372 gen_rtx_MULT (SFmode, y1, u0))));
22373 emit_insn (gen_rtx_SET (VOIDmode, x1,
22374 gen_rtx_MULT (SFmode, x0, v0)));
22375
22376 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22377 emit_insn (gen_rtx_SET (VOIDmode, u1,
22378 gen_rtx_MULT (SFmode, x1, x1)));
22379 emit_insn (gen_rtx_SET (VOIDmode, v1,
22380 gen_rtx_MINUS (SFmode,
22381 halfthree,
22382 gen_rtx_MULT (SFmode, y1, u1))));
22383 emit_insn (gen_rtx_SET (VOIDmode, x2,
22384 gen_rtx_MULT (SFmode, x1, v1)));
22385
22386 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22387 emit_insn (gen_rtx_SET (VOIDmode, u2,
22388 gen_rtx_MULT (SFmode, x2, x2)));
22389 emit_insn (gen_rtx_SET (VOIDmode, v2,
22390 gen_rtx_MINUS (SFmode,
22391 halfthree,
22392 gen_rtx_MULT (SFmode, y1, u2))));
22393 emit_insn (gen_rtx_SET (VOIDmode, dst,
22394 gen_rtx_MULT (SFmode, x2, v2)));
22395
22396 emit_label (XEXP (label, 0));
22397}
22398
565ef4ba
RS
22399/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22400 target, and SRC is the argument operand. */
22401
22402void
22403rs6000_emit_popcount (rtx dst, rtx src)
22404{
22405 enum machine_mode mode = GET_MODE (dst);
22406 rtx tmp1, tmp2;
22407
22408 tmp1 = gen_reg_rtx (mode);
22409
22410 if (mode == SImode)
22411 {
22412 emit_insn (gen_popcntbsi2 (tmp1, src));
22413 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22414 NULL_RTX, 0);
22415 tmp2 = force_reg (SImode, tmp2);
22416 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22417 }
22418 else
22419 {
22420 emit_insn (gen_popcntbdi2 (tmp1, src));
22421 tmp2 = expand_mult (DImode, tmp1,
22422 GEN_INT ((HOST_WIDE_INT)
22423 0x01010101 << 32 | 0x01010101),
22424 NULL_RTX, 0);
22425 tmp2 = force_reg (DImode, tmp2);
22426 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22427 }
22428}
22429
22430
22431/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22432 target, and SRC is the argument operand. */
22433
22434void
22435rs6000_emit_parity (rtx dst, rtx src)
22436{
22437 enum machine_mode mode = GET_MODE (dst);
22438 rtx tmp;
22439
22440 tmp = gen_reg_rtx (mode);
22441 if (mode == SImode)
22442 {
22443 /* Is mult+shift >= shift+xor+shift+xor? */
22444 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22445 {
22446 rtx tmp1, tmp2, tmp3, tmp4;
22447
22448 tmp1 = gen_reg_rtx (SImode);
22449 emit_insn (gen_popcntbsi2 (tmp1, src));
22450
22451 tmp2 = gen_reg_rtx (SImode);
22452 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22453 tmp3 = gen_reg_rtx (SImode);
22454 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22455
22456 tmp4 = gen_reg_rtx (SImode);
22457 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22458 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22459 }
22460 else
22461 rs6000_emit_popcount (tmp, src);
22462 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22463 }
22464 else
22465 {
22466 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22467 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22468 {
22469 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22470
22471 tmp1 = gen_reg_rtx (DImode);
22472 emit_insn (gen_popcntbdi2 (tmp1, src));
22473
22474 tmp2 = gen_reg_rtx (DImode);
22475 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22476 tmp3 = gen_reg_rtx (DImode);
22477 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22478
22479 tmp4 = gen_reg_rtx (DImode);
22480 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22481 tmp5 = gen_reg_rtx (DImode);
22482 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22483
22484 tmp6 = gen_reg_rtx (DImode);
22485 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22486 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22487 }
22488 else
22489 rs6000_emit_popcount (tmp, src);
22490 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22491 }
22492}
22493
ded9bf77
AH
22494/* Return an RTX representing where to find the function value of a
22495 function returning MODE. */
22496static rtx
22497rs6000_complex_function_value (enum machine_mode mode)
22498{
22499 unsigned int regno;
22500 rtx r1, r2;
22501 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22502 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22503
18f63bfa
AH
22504 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22505 regno = FP_ARG_RETURN;
354ed18f
AH
22506 else
22507 {
18f63bfa 22508 regno = GP_ARG_RETURN;
ded9bf77 22509
18f63bfa
AH
22510 /* 32-bit is OK since it'll go in r3/r4. */
22511 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22512 return gen_rtx_REG (mode, regno);
22513 }
22514
18f63bfa
AH
22515 if (inner_bytes >= 8)
22516 return gen_rtx_REG (mode, regno);
22517
ded9bf77
AH
22518 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22519 const0_rtx);
22520 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22521 GEN_INT (inner_bytes));
ded9bf77
AH
22522 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22523}
22524
a6ebc39a
AH
22525/* Define how to find the value returned by a function.
22526 VALTYPE is the data type of the value (as a tree).
22527 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22528 otherwise, FUNC is 0.
22529
22530 On the SPE, both FPs and vectors are returned in r3.
22531
22532 On RS/6000 an integer value is in r3 and a floating-point value is in
22533 fp1, unless -msoft-float. */
22534
22535rtx
586de218 22536rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22537{
22538 enum machine_mode mode;
2a8fa26c 22539 unsigned int regno;
a6ebc39a 22540
594a51fe
SS
22541 /* Special handling for structs in darwin64. */
22542 if (rs6000_darwin64_abi
22543 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22544 && TREE_CODE (valtype) == RECORD_TYPE
22545 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22546 {
22547 CUMULATIVE_ARGS valcum;
22548 rtx valret;
22549
0b5383eb 22550 valcum.words = 0;
594a51fe
SS
22551 valcum.fregno = FP_ARG_MIN_REG;
22552 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22553 /* Do a trial code generation as if this were going to be passed as
22554 an argument; if any part goes in memory, we return NULL. */
22555 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22556 if (valret)
22557 return valret;
22558 /* Otherwise fall through to standard ABI rules. */
22559 }
22560
0e67400a
FJ
22561 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22562 {
22563 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22564 return gen_rtx_PARALLEL (DImode,
22565 gen_rtvec (2,
22566 gen_rtx_EXPR_LIST (VOIDmode,
22567 gen_rtx_REG (SImode, GP_ARG_RETURN),
22568 const0_rtx),
22569 gen_rtx_EXPR_LIST (VOIDmode,
22570 gen_rtx_REG (SImode,
22571 GP_ARG_RETURN + 1),
22572 GEN_INT (4))));
22573 }
0f086e42
FJ
22574 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22575 {
22576 return gen_rtx_PARALLEL (DCmode,
22577 gen_rtvec (4,
22578 gen_rtx_EXPR_LIST (VOIDmode,
22579 gen_rtx_REG (SImode, GP_ARG_RETURN),
22580 const0_rtx),
22581 gen_rtx_EXPR_LIST (VOIDmode,
22582 gen_rtx_REG (SImode,
22583 GP_ARG_RETURN + 1),
22584 GEN_INT (4)),
22585 gen_rtx_EXPR_LIST (VOIDmode,
22586 gen_rtx_REG (SImode,
22587 GP_ARG_RETURN + 2),
22588 GEN_INT (8)),
22589 gen_rtx_EXPR_LIST (VOIDmode,
22590 gen_rtx_REG (SImode,
22591 GP_ARG_RETURN + 3),
22592 GEN_INT (12))));
22593 }
602ea4d3 22594
7348aa7f
FXC
22595 mode = TYPE_MODE (valtype);
22596 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22597 || POINTER_TYPE_P (valtype))
b78d48dd 22598 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22599
e41b2a33
PB
22600 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22601 /* _Decimal128 must use an even/odd register pair. */
22602 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
2c5cac98
ME
22603 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS
22604 && ((TARGET_SINGLE_FLOAT && (mode == SFmode)) || TARGET_DOUBLE_FLOAT))
2a8fa26c 22605 regno = FP_ARG_RETURN;
ded9bf77 22606 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22607 && targetm.calls.split_complex_arg)
ded9bf77 22608 return rs6000_complex_function_value (mode);
44688022 22609 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22610 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22611 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22612 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22613 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22614 && (mode == DFmode || mode == DCmode
22615 || mode == TFmode || mode == TCmode))
18f63bfa 22616 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22617 else
22618 regno = GP_ARG_RETURN;
22619
22620 return gen_rtx_REG (mode, regno);
22621}
22622
ded9bf77
AH
22623/* Define how to find the value returned by a library function
22624 assuming the value has mode MODE. */
22625rtx
22626rs6000_libcall_value (enum machine_mode mode)
22627{
22628 unsigned int regno;
22629
2e6c9641
FJ
22630 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22631 {
22632 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22633 return gen_rtx_PARALLEL (DImode,
22634 gen_rtvec (2,
22635 gen_rtx_EXPR_LIST (VOIDmode,
22636 gen_rtx_REG (SImode, GP_ARG_RETURN),
22637 const0_rtx),
22638 gen_rtx_EXPR_LIST (VOIDmode,
22639 gen_rtx_REG (SImode,
22640 GP_ARG_RETURN + 1),
22641 GEN_INT (4))));
22642 }
22643
e41b2a33
PB
22644 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22645 /* _Decimal128 must use an even/odd register pair. */
22646 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22647 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
22648 && TARGET_HARD_FLOAT && TARGET_FPRS)
22649 regno = FP_ARG_RETURN;
44688022
AM
22650 else if (ALTIVEC_VECTOR_MODE (mode)
22651 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22652 regno = ALTIVEC_ARG_RETURN;
42ba5130 22653 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22654 return rs6000_complex_function_value (mode);
18f63bfa 22655 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22656 && (mode == DFmode || mode == DCmode
22657 || mode == TFmode || mode == TCmode))
18f63bfa 22658 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22659 else
22660 regno = GP_ARG_RETURN;
22661
22662 return gen_rtx_REG (mode, regno);
22663}
22664
d1d0c603
JJ
22665/* Define the offset between two registers, FROM to be eliminated and its
22666 replacement TO, at the start of a routine. */
22667HOST_WIDE_INT
22668rs6000_initial_elimination_offset (int from, int to)
22669{
22670 rs6000_stack_t *info = rs6000_stack_info ();
22671 HOST_WIDE_INT offset;
22672
7d5175e1 22673 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22674 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22675 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22676 {
22677 offset = info->push_p ? 0 : -info->total_size;
22678 if (FRAME_GROWS_DOWNWARD)
5b667039 22679 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22680 }
22681 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22682 offset = FRAME_GROWS_DOWNWARD
5b667039 22683 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22684 : 0;
22685 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22686 offset = info->total_size;
22687 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22688 offset = info->push_p ? info->total_size : 0;
22689 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22690 offset = 0;
22691 else
37409796 22692 gcc_unreachable ();
d1d0c603
JJ
22693
22694 return offset;
22695}
22696
58646b77 22697/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 22698
c8e4f0e9 22699static bool
3101faab 22700rs6000_is_opaque_type (const_tree type)
62e1dfcf 22701{
58646b77 22702 return (type == opaque_V2SI_type_node
2abe3e28 22703 || type == opaque_V2SF_type_node
58646b77 22704 || type == opaque_V4SI_type_node);
62e1dfcf
NC
22705}
22706
96714395 22707static rtx
a2369ed3 22708rs6000_dwarf_register_span (rtx reg)
96714395 22709{
6cd1d2e2
DJ
22710 rtx parts[8];
22711 int i, words;
22712 unsigned regno = REGNO (reg);
22713 enum machine_mode mode = GET_MODE (reg);
96714395 22714
4d4cbc0e 22715 if (TARGET_SPE
6cd1d2e2 22716 && regno < 32
4d4cbc0e 22717 && (SPE_VECTOR_MODE (GET_MODE (reg))
6cd1d2e2
DJ
22718 || (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode)
22719 && mode != SFmode && mode != SDmode && mode != SCmode)))
4d4cbc0e
AH
22720 ;
22721 else
96714395
AH
22722 return NULL_RTX;
22723
22724 regno = REGNO (reg);
22725
22726 /* The duality of the SPE register size wreaks all kinds of havoc.
22727 This is a way of distinguishing r0 in 32-bits from r0 in
22728 64-bits. */
6cd1d2e2
DJ
22729 words = (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
22730 gcc_assert (words <= 4);
22731 for (i = 0; i < words; i++, regno++)
22732 {
22733 if (BYTES_BIG_ENDIAN)
22734 {
22735 parts[2 * i] = gen_rtx_REG (SImode, regno + 1200);
22736 parts[2 * i + 1] = gen_rtx_REG (SImode, regno);
22737 }
22738 else
22739 {
22740 parts[2 * i] = gen_rtx_REG (SImode, regno);
22741 parts[2 * i + 1] = gen_rtx_REG (SImode, regno + 1200);
22742 }
22743 }
22744
22745 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (words * 2, parts));
96714395
AH
22746}
22747
37ea0b7e
JM
22748/* Fill in sizes for SPE register high parts in table used by unwinder. */
22749
22750static void
22751rs6000_init_dwarf_reg_sizes_extra (tree address)
22752{
22753 if (TARGET_SPE)
22754 {
22755 int i;
22756 enum machine_mode mode = TYPE_MODE (char_type_node);
22757 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
22758 rtx mem = gen_rtx_MEM (BLKmode, addr);
22759 rtx value = gen_int_mode (4, mode);
22760
22761 for (i = 1201; i < 1232; i++)
22762 {
22763 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22764 HOST_WIDE_INT offset
22765 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22766
22767 emit_move_insn (adjust_address (mem, mode, offset), value);
22768 }
22769 }
22770}
22771
93c9d1ba
AM
22772/* Map internal gcc register numbers to DWARF2 register numbers. */
22773
22774unsigned int
22775rs6000_dbx_register_number (unsigned int regno)
22776{
22777 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22778 return regno;
22779 if (regno == MQ_REGNO)
22780 return 100;
1de43f85 22781 if (regno == LR_REGNO)
93c9d1ba 22782 return 108;
1de43f85 22783 if (regno == CTR_REGNO)
93c9d1ba
AM
22784 return 109;
22785 if (CR_REGNO_P (regno))
22786 return regno - CR0_REGNO + 86;
22787 if (regno == XER_REGNO)
22788 return 101;
22789 if (ALTIVEC_REGNO_P (regno))
22790 return regno - FIRST_ALTIVEC_REGNO + 1124;
22791 if (regno == VRSAVE_REGNO)
22792 return 356;
22793 if (regno == VSCR_REGNO)
22794 return 67;
22795 if (regno == SPE_ACC_REGNO)
22796 return 99;
22797 if (regno == SPEFSCR_REGNO)
22798 return 612;
22799 /* SPE high reg number. We get these values of regno from
22800 rs6000_dwarf_register_span. */
37409796
NS
22801 gcc_assert (regno >= 1200 && regno < 1232);
22802 return regno;
93c9d1ba
AM
22803}
22804
93f90be6 22805/* target hook eh_return_filter_mode */
f676971a 22806static enum machine_mode
93f90be6
FJ
22807rs6000_eh_return_filter_mode (void)
22808{
22809 return TARGET_32BIT ? SImode : word_mode;
22810}
22811
00b79d54
BE
22812/* Target hook for scalar_mode_supported_p. */
22813static bool
22814rs6000_scalar_mode_supported_p (enum machine_mode mode)
22815{
22816 if (DECIMAL_FLOAT_MODE_P (mode))
22817 return true;
22818 else
22819 return default_scalar_mode_supported_p (mode);
22820}
22821
f676971a
EC
22822/* Target hook for vector_mode_supported_p. */
22823static bool
22824rs6000_vector_mode_supported_p (enum machine_mode mode)
22825{
22826
96038623
DE
22827 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22828 return true;
22829
f676971a
EC
22830 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22831 return true;
22832
22833 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22834 return true;
22835
22836 else
22837 return false;
22838}
22839
bb8df8a6
EC
22840/* Target hook for invalid_arg_for_unprototyped_fn. */
22841static const char *
3101faab 22842invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22843{
22844 return (!rs6000_darwin64_abi
22845 && typelist == 0
22846 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22847 && (funcdecl == NULL_TREE
22848 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22849 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22850 ? N_("AltiVec argument passed to unprototyped function")
22851 : NULL;
22852}
22853
3aebbe5f
JJ
22854/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22855 setup by using __stack_chk_fail_local hidden function instead of
22856 calling __stack_chk_fail directly. Otherwise it is better to call
22857 __stack_chk_fail directly. */
22858
22859static tree
22860rs6000_stack_protect_fail (void)
22861{
22862 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22863 ? default_hidden_stack_protect_fail ()
22864 : default_external_stack_protect_fail ();
22865}
22866
c921bad8
AP
22867void
22868rs6000_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
22869 int num_operands ATTRIBUTE_UNUSED)
22870{
22871 if (rs6000_warn_cell_microcode)
22872 {
22873 const char *temp;
22874 int insn_code_number = recog_memoized (insn);
22875 location_t location = locator_location (INSN_LOCATOR (insn));
22876
22877 /* Punt on insns we cannot recognize. */
22878 if (insn_code_number < 0)
22879 return;
22880
22881 temp = get_insn_template (insn_code_number, insn);
22882
22883 if (get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS)
22884 warning_at (location, OPT_mwarn_cell_microcode,
22885 "emitting microcode insn %s\t[%s] #%d",
22886 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22887 else if (get_attr_cell_micro (insn) == CELL_MICRO_CONDITIONAL)
22888 warning_at (location, OPT_mwarn_cell_microcode,
22889 "emitting conditional microcode insn %s\t[%s] #%d",
22890 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22891 }
22892}
22893
17211ab5 22894#include "gt-rs6000.h"